mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-04 05:25:00 +00:00
Compare commits
517 Commits
async-2
...
adam/aweso
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bb267f56b3 | ||
|
|
15ca7c9807 | ||
|
|
2b4da7e39b | ||
|
|
31f81f38af | ||
|
|
72cf77b7c7 | ||
|
|
7b06652bff | ||
|
|
0698031ed4 | ||
|
|
b8264a8131 | ||
|
|
cad923018e | ||
|
|
db94b49941 | ||
|
|
72d15d9cbf | ||
|
|
e0186eadc0 | ||
|
|
4cfa5b04af | ||
|
|
f2c54b1f8b | ||
|
|
d7d0bc6582 | ||
|
|
dd9dc2500b | ||
|
|
4efb109da8 | ||
|
|
bcf9a6bdf1 | ||
|
|
e3a25ecdc0 | ||
|
|
783521928d | ||
|
|
9a876abd31 | ||
|
|
97f58b412e | ||
|
|
99a8b0f750 | ||
|
|
a9017d7c25 | ||
|
|
d9e4b26648 | ||
|
|
0d03bafe49 | ||
|
|
fee15a31f9 | ||
|
|
997d3910d4 | ||
|
|
a3918cc0d7 | ||
|
|
f056986b07 | ||
|
|
59c1f02f98 | ||
|
|
3a71a2b1f8 | ||
|
|
2ef1215b49 | ||
|
|
130ac83076 | ||
|
|
dd606a0702 | ||
|
|
84cd772f50 | ||
|
|
fa1d7af22f | ||
|
|
a771ba3bc0 | ||
|
|
8b612c658d | ||
|
|
7dd0da5fd7 | ||
|
|
f7b3525c4e | ||
|
|
de83bdae48 | ||
|
|
d90b610767 | ||
|
|
2d41de6b72 | ||
|
|
f391c3caf3 | ||
|
|
9bdf150676 | ||
|
|
0c199609eb | ||
|
|
6eff9d3753 | ||
|
|
7ab16457c7 | ||
|
|
e7ad8132b5 | ||
|
|
da87e45534 | ||
|
|
2ffaef5563 | ||
|
|
55cb350d2c | ||
|
|
7fa271a1b4 | ||
|
|
c53ca372f2 | ||
|
|
75bc8501f4 | ||
|
|
1e22b47fe1 | ||
|
|
74e2dca207 | ||
|
|
a669de24b7 | ||
|
|
e1e9c449e9 | ||
|
|
60e1dc0239 | ||
|
|
10eb94fd82 | ||
|
|
ccc8587e5f | ||
|
|
53c96193c1 | ||
|
|
d4f11e00b1 | ||
|
|
321233b82c | ||
|
|
eb188051d4 | ||
|
|
a136084e11 | ||
|
|
bc06f3179d | ||
|
|
ee84d971b2 | ||
|
|
264d80ef4c | ||
|
|
dba68187ac | ||
|
|
ca4a1936b3 | ||
|
|
77c8d31a90 | ||
|
|
ab7196f86c | ||
|
|
88b3a66bf9 | ||
|
|
ea77666b4a | ||
|
|
db98e5f39b | ||
|
|
df59c5cb9d | ||
|
|
e786e95358 | ||
|
|
75ada5623c | ||
|
|
ad5c655c45 | ||
|
|
65e607454e | ||
|
|
f238be6003 | ||
|
|
dc31e4c5fa | ||
|
|
665d8cd266 | ||
|
|
8324114e84 | ||
|
|
b83e6ee4ce | ||
|
|
58bab0d310 | ||
|
|
1af51aaaba | ||
|
|
a09327b831 | ||
|
|
16543bf74c | ||
|
|
aa4cd373ac | ||
|
|
351e294362 | ||
|
|
a0c5b1cd9d | ||
|
|
df2ed1e584 | ||
|
|
b354f7a3a5 | ||
|
|
bb53d1e1c6 | ||
|
|
2aabd8d0e1 | ||
|
|
aca97c2c6c | ||
|
|
8e7d959cf4 | ||
|
|
b23f031db9 | ||
|
|
1ba529a9d5 | ||
|
|
3d29c183ef | ||
|
|
8a108b590d | ||
|
|
bca0f67344 | ||
|
|
f3dad51134 | ||
|
|
f51840829c | ||
|
|
aa1c0d0870 | ||
|
|
dee5ee6589 | ||
|
|
b799f479c4 | ||
|
|
b4352fefa5 | ||
|
|
77d06fb60e | ||
|
|
00b647457c | ||
|
|
153d10a35c | ||
|
|
06713c641e | ||
|
|
210978ec2d | ||
|
|
42f7d43139 | ||
|
|
19967f5ad7 | ||
|
|
a1de3eb47d | ||
|
|
e88841bdec | ||
|
|
c8e4915f8e | ||
|
|
a93a3f0598 | ||
|
|
084f81fc8d | ||
|
|
d148f36e87 | ||
|
|
150d9c35b7 | ||
|
|
e11198616e | ||
|
|
2f27f1e6f9 | ||
|
|
5392ca9794 | ||
|
|
46672eb583 | ||
|
|
79653eee80 | ||
|
|
16ad86c52a | ||
|
|
6b7c6be5f5 | ||
|
|
fda1c2cc79 | ||
|
|
ef2fee0ee3 | ||
|
|
e287d0811d | ||
|
|
a7164f3c9f | ||
|
|
c55060039a | ||
|
|
c68d8deddd | ||
|
|
f6eabc5db1 | ||
|
|
72d5884db6 | ||
|
|
3595c89c79 | ||
|
|
9ebbc718c5 | ||
|
|
e862480b86 | ||
|
|
1f3d8fe6f1 | ||
|
|
41ae036ab4 | ||
|
|
588d176b96 | ||
|
|
f8697120a0 | ||
|
|
1a767105e6 | ||
|
|
4067b6ed2c | ||
|
|
b272dbfd1f | ||
|
|
48be7bbf86 | ||
|
|
51e22cea71 | ||
|
|
2241e27e68 | ||
|
|
11c90ae879 | ||
|
|
cf55125202 | ||
|
|
9cefb85905 | ||
|
|
fc672da0e0 | ||
|
|
25b297b142 | ||
|
|
ab03c12fa8 | ||
|
|
3095c805ad | ||
|
|
9c18daafb8 | ||
|
|
16182417fb | ||
|
|
9af35201e4 | ||
|
|
f21b982955 | ||
|
|
b3a20d05c5 | ||
|
|
4cd024a2b2 | ||
|
|
63d08ebfd2 | ||
|
|
c696197b03 | ||
|
|
738a72228b | ||
|
|
90641f4488 | ||
|
|
a4cc7eaf9b | ||
|
|
fdca728fdc | ||
|
|
d2c4ae8cdf | ||
|
|
f3d3ac30a6 | ||
|
|
f8cc4ade8a | ||
|
|
b3975b7bbd | ||
|
|
4f1b61f5bc | ||
|
|
beeb37b4fd | ||
|
|
43aa2bad22 | ||
|
|
1b2ba921bb | ||
|
|
f543da0ea8 | ||
|
|
e60c9efa84 | ||
|
|
c52fc6f240 | ||
|
|
ee136b024a | ||
|
|
699bc5f34b | ||
|
|
9eed8e842c | ||
|
|
6d652a12ee | ||
|
|
e043e06656 | ||
|
|
14b52599f4 | ||
|
|
e3e2c0c567 | ||
|
|
4fc5d60f03 | ||
|
|
c37a9e0f82 | ||
|
|
fed17ebb96 | ||
|
|
eeac678872 | ||
|
|
f9ed0f2df9 | ||
|
|
0ddbacac85 | ||
|
|
f0d28aa5cf | ||
|
|
cc84f6fee4 | ||
|
|
00e6eef369 | ||
|
|
1ae71907bc | ||
|
|
3ff688fba2 | ||
|
|
bb59b3d456 | ||
|
|
186ea74ada | ||
|
|
c108f2dcf3 | ||
|
|
4cca232d83 | ||
|
|
1db511e9cb | ||
|
|
76afa7d3bf | ||
|
|
3513f7b1cd | ||
|
|
4531fe39e6 | ||
|
|
8d276a85bc | ||
|
|
5f0d042bc3 | ||
|
|
408f07e3c4 | ||
|
|
d1a540c90c | ||
|
|
00df8e930e | ||
|
|
3b768b1b77 | ||
|
|
42a7ececa0 | ||
|
|
e8867de049 | ||
|
|
a1dfa3dfa3 | ||
|
|
83917d4f79 | ||
|
|
513cd4f905 | ||
|
|
eda0309df3 | ||
|
|
74e27c028e | ||
|
|
36c06c4089 | ||
|
|
249b8a9cdd | ||
|
|
62bee15f00 | ||
|
|
d8797b69e4 | ||
|
|
084fe72b02 | ||
|
|
c823acaa3f | ||
|
|
e0d6cd9cb7 | ||
|
|
01021e102b | ||
|
|
6de738ff17 | ||
|
|
c0612547eb | ||
|
|
e960907698 | ||
|
|
84e03b1b27 | ||
|
|
f1a80da34b | ||
|
|
5a5a55e556 | ||
|
|
e1f132b45b | ||
|
|
087011aede | ||
|
|
1430bf9b31 | ||
|
|
4e5de817ef | ||
|
|
5d6b94f8c3 | ||
|
|
8dfbe56f42 | ||
|
|
df79d983d7 | ||
|
|
6c23a28826 | ||
|
|
f72289570a | ||
|
|
51bc9dc20e | ||
|
|
e45ac6bfa9 | ||
|
|
44d1cbdb0c | ||
|
|
3fa7e854d3 | ||
|
|
0f5c080be6 | ||
|
|
871009ef8d | ||
|
|
89a565c6c4 | ||
|
|
6ed60e4d5f | ||
|
|
4dab1df3ae | ||
|
|
7b7aa2cdf0 | ||
|
|
d1e1a65f32 | ||
|
|
9d332b4ac5 | ||
|
|
fc2462e281 | ||
|
|
fb2cddabf0 | ||
|
|
33481a9465 | ||
|
|
45de87cb97 | ||
|
|
553c533ada | ||
|
|
634e562b93 | ||
|
|
c789ead590 | ||
|
|
d23560a441 | ||
|
|
9f306966db | ||
|
|
8eea2cef97 | ||
|
|
3abbb89c2e | ||
|
|
76de7d54c7 | ||
|
|
35aee6e066 | ||
|
|
8a0152fe7c | ||
|
|
7769435fc8 | ||
|
|
ae311ea66e | ||
|
|
d15816f162 | ||
|
|
5cbb31c559 | ||
|
|
b7f5b36f2b | ||
|
|
e9dd413de6 | ||
|
|
c5de3d8cc1 | ||
|
|
624c385e27 | ||
|
|
893890c985 | ||
|
|
e12efc8b52 | ||
|
|
015dfa41b1 | ||
|
|
5e72ce5fbe | ||
|
|
236e653176 | ||
|
|
3946c3ba03 | ||
|
|
5bdd39e6eb | ||
|
|
46267c0531 | ||
|
|
7b96eb7704 | ||
|
|
60d5955101 | ||
|
|
587675e488 | ||
|
|
c0ae319c63 | ||
|
|
d810f8d8db | ||
|
|
a88390a546 | ||
|
|
8575e5615d | ||
|
|
5fe9516c09 | ||
|
|
938775789d | ||
|
|
f37bebe51f | ||
|
|
21f14cd3f2 | ||
|
|
4e7baeb2c9 | ||
|
|
d78a682dd8 | ||
|
|
44021b7abc | ||
|
|
7f9e543213 | ||
|
|
c489e5a59b | ||
|
|
8de30db7f9 | ||
|
|
415f0c6774 | ||
|
|
aa9cf195a5 | ||
|
|
6412fc8135 | ||
|
|
8b1ba9a00c | ||
|
|
b02584ef9e | ||
|
|
88cd6bfd1a | ||
|
|
89a3da14c9 | ||
|
|
619d987492 | ||
|
|
744e410a1a | ||
|
|
6e51967993 | ||
|
|
7989ab2e28 | ||
|
|
a3570a568d | ||
|
|
c0cd998836 | ||
|
|
1a452acd1c | ||
|
|
6c54083b08 | ||
|
|
76105ebdaf | ||
|
|
2c452201fa | ||
|
|
9fc7fc73f9 | ||
|
|
e7417e35ba | ||
|
|
19eb4c7cba | ||
|
|
1f39a0c9da | ||
|
|
a480a8893c | ||
|
|
d3a9e341a5 | ||
|
|
95caffe607 | ||
|
|
fdeca61284 | ||
|
|
45dc653191 | ||
|
|
6e48302b7b | ||
|
|
a7918d7b11 | ||
|
|
ec21253af9 | ||
|
|
97cdda8663 | ||
|
|
35ac2b9d71 | ||
|
|
14affd8ffa | ||
|
|
c48409c903 | ||
|
|
227f66f299 | ||
|
|
c2d9bf94d1 | ||
|
|
8f03841161 | ||
|
|
344a1ed912 | ||
|
|
ff71993b31 | ||
|
|
18bb773b2c | ||
|
|
7f905c7940 | ||
|
|
8a4ba6fc56 | ||
|
|
e0b275c01c | ||
|
|
5926db85df | ||
|
|
a4715a10e7 | ||
|
|
de0f5c0fcb | ||
|
|
88d85ce6ac | ||
|
|
131c171d3e | ||
|
|
c5ddef6ef7 | ||
|
|
f36486d006 | ||
|
|
eaf466c5c3 | ||
|
|
b41bcc349e | ||
|
|
825c61bdcd | ||
|
|
f13f49bd71 | ||
|
|
88f5d4544b | ||
|
|
b7432a20f0 | ||
|
|
ab57c85e66 | ||
|
|
c7c41bc0d8 | ||
|
|
c66f9b06a4 | ||
|
|
a3ac7e7ca7 | ||
|
|
187b762f8a | ||
|
|
3a7fbdfa52 | ||
|
|
bbeb46b37f | ||
|
|
5450b9a700 | ||
|
|
353b28647c | ||
|
|
2411f4f870 | ||
|
|
34cd059423 | ||
|
|
4b4ec12c87 | ||
|
|
441147c0dc | ||
|
|
08ceac9f46 | ||
|
|
6bc690b50b | ||
|
|
c41888b338 | ||
|
|
6d3c980b39 | ||
|
|
eb5db176fa | ||
|
|
a77c03fcaf | ||
|
|
04b25011e9 | ||
|
|
96b34aec10 | ||
|
|
d560b46c85 | ||
|
|
94789ce455 | ||
|
|
55797e5873 | ||
|
|
675cab3074 | ||
|
|
8d63ab646e | ||
|
|
37a2fa1cdc | ||
|
|
79ed9650b3 | ||
|
|
b6dc58164e | ||
|
|
f9a974c1fe | ||
|
|
91e672befb | ||
|
|
c14d18b9df | ||
|
|
d2cfc1844c | ||
|
|
2fb3243a1a | ||
|
|
6f3124a84f | ||
|
|
59eb5bd9c4 | ||
|
|
ad2b6bb4f7 | ||
|
|
51d64ee10e | ||
|
|
1159efc6cc | ||
|
|
3cfb76a56f | ||
|
|
7a1ad6688a | ||
|
|
4f4af6e3fd | ||
|
|
2736b79097 | ||
|
|
8da2ffa433 | ||
|
|
5bf3d6dc32 | ||
|
|
30d4380afe | ||
|
|
c8b5aad482 | ||
|
|
fa1d440947 | ||
|
|
a89fc3a276 | ||
|
|
3875f62453 | ||
|
|
23e1447ab6 | ||
|
|
91364c6a7c | ||
|
|
a070493fba | ||
|
|
ca0a6ab72c | ||
|
|
10e0562a82 | ||
|
|
44998a2a2b | ||
|
|
f8e033e560 | ||
|
|
5842da84af | ||
|
|
10cc270170 | ||
|
|
f51bdd56aa | ||
|
|
7f79c49f93 | ||
|
|
e4920255f0 | ||
|
|
f8e8273d39 | ||
|
|
ceddab98d1 | ||
|
|
0faa176791 | ||
|
|
9b0e0ee536 | ||
|
|
881d2756db | ||
|
|
ab5af40999 | ||
|
|
6aeef8dcf9 | ||
|
|
5d62196dde | ||
|
|
7fe27ac310 | ||
|
|
1e300349ce | ||
|
|
6b01a7b08e | ||
|
|
34d948df18 | ||
|
|
27091c4f1d | ||
|
|
970a3d7f2a | ||
|
|
2bedbbfc54 | ||
|
|
8de33f0db3 | ||
|
|
df4eab67dc | ||
|
|
2d13bc0046 | ||
|
|
704a0cb35d | ||
|
|
06a983e445 | ||
|
|
2d10df8b87 | ||
|
|
baf66db9dc | ||
|
|
3545693999 | ||
|
|
84fb99c2c8 | ||
|
|
21e2983ae1 | ||
|
|
004e0941d5 | ||
|
|
188a426dde | ||
|
|
6fcfae8bfe | ||
|
|
9515350f52 | ||
|
|
6b88f82656 | ||
|
|
e42d953f47 | ||
|
|
471a3f63fe | ||
|
|
9c257faf26 | ||
|
|
d18cad6d76 | ||
|
|
061273be22 | ||
|
|
b89de6caad | ||
|
|
9bc0a1d7c7 | ||
|
|
eee518b7fa | ||
|
|
b7b78edaa3 | ||
|
|
3eaac68ab4 | ||
|
|
a7672190e9 | ||
|
|
4e4e89b6eb | ||
|
|
33dd519f56 | ||
|
|
5c1149aa8b | ||
|
|
9061e92af6 | ||
|
|
49f5ceaa9b | ||
|
|
525b309d37 | ||
|
|
bdb3a787fc | ||
|
|
a9601ef848 | ||
|
|
6fc4b045fd | ||
|
|
446852c7d0 | ||
|
|
c635f00899 | ||
|
|
1393629bc5 | ||
|
|
49ce17b759 | ||
|
|
74888021c8 | ||
|
|
9483856439 | ||
|
|
dbbc7c8132 | ||
|
|
5d9c99508d | ||
|
|
e4d5b56951 | ||
|
|
e31238d121 | ||
|
|
a283d99e1b | ||
|
|
8cb621ebed | ||
|
|
b203d165f5 | ||
|
|
c695e1136d | ||
|
|
d847202308 | ||
|
|
9d24e0a4b8 | ||
|
|
745fe1eb9f | ||
|
|
3e52b85e9d | ||
|
|
d26f020b50 | ||
|
|
095b5f702c | ||
|
|
9622853b8d | ||
|
|
b94e75fabe | ||
|
|
23dd041e2e | ||
|
|
c73ca21b4d | ||
|
|
7ebdc85ad2 | ||
|
|
99e2c8c90d | ||
|
|
f24bfdf945 | ||
|
|
7963233702 | ||
|
|
b550df2038 | ||
|
|
fb55624f5f | ||
|
|
e96366f489 | ||
|
|
900190cf54 | ||
|
|
2af744b474 | ||
|
|
11153084e2 | ||
|
|
4b9c814bfc | ||
|
|
1b5d3a3b6e | ||
|
|
373637e6a7 | ||
|
|
cb223217c1 | ||
|
|
eab97a3f8b | ||
|
|
fdfaa8ab45 | ||
|
|
2321d9dbee | ||
|
|
bf74dd887a | ||
|
|
3612035894 | ||
|
|
6553e9b0cd | ||
|
|
09f2410170 |
@@ -3,10 +3,11 @@
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"csharpier": {
|
||||
"version": "0.27.3",
|
||||
"version": "1.1.2",
|
||||
"commands": [
|
||||
"dotnet-csharpier"
|
||||
]
|
||||
"csharpier"
|
||||
],
|
||||
"rollForward": false
|
||||
}
|
||||
}
|
||||
}
|
||||
7
.copilot-agent.yml
Normal file
7
.copilot-agent.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
enabled: true
|
||||
agent:
|
||||
name: copilot-coding-agent
|
||||
allow:
|
||||
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
|
||||
actions: ["create", "modify"]
|
||||
require_review_before_merge: true
|
||||
@@ -70,7 +70,7 @@ indent_style = tab
|
||||
|
||||
[*.{cs,csx,cake,vb,vbx}]
|
||||
# Default Severity for all .NET Code Style rules below
|
||||
dotnet_analyzer_diagnostic.severity = warning
|
||||
dotnet_analyzer_diagnostic.severity = silent
|
||||
|
||||
##########################################
|
||||
# File Header (Uncomment to support file headers)
|
||||
@@ -269,6 +269,8 @@ dotnet_diagnostic.CA1305.severity = suggestion
|
||||
dotnet_diagnostic.CA1307.severity = suggestion
|
||||
dotnet_diagnostic.CA1309.severity = suggestion
|
||||
dotnet_diagnostic.CA1310.severity = error
|
||||
dotnet_diagnostic.CA1507.severity = suggestion
|
||||
dotnet_diagnostic.CA1513.severity = suggestion
|
||||
dotnet_diagnostic.CA1707.severity = suggestion
|
||||
dotnet_diagnostic.CA1708.severity = suggestion
|
||||
dotnet_diagnostic.CA1711.severity = suggestion
|
||||
@@ -286,6 +288,7 @@ dotnet_diagnostic.CA1834.severity = error
|
||||
dotnet_diagnostic.CA1845.severity = suggestion
|
||||
dotnet_diagnostic.CA1848.severity = suggestion
|
||||
dotnet_diagnostic.CA1852.severity = suggestion
|
||||
dotnet_diagnostic.CA1860.severity = silent
|
||||
dotnet_diagnostic.CA2016.severity = suggestion
|
||||
dotnet_diagnostic.CA2201.severity = error
|
||||
dotnet_diagnostic.CA2206.severity = error
|
||||
@@ -303,13 +306,12 @@ dotnet_diagnostic.CS1998.severity = error
|
||||
dotnet_diagnostic.CS8602.severity = error
|
||||
dotnet_diagnostic.CS8604.severity = error
|
||||
dotnet_diagnostic.CS8618.severity = error
|
||||
dotnet_diagnostic.CS0618.severity = error
|
||||
dotnet_diagnostic.CS0618.severity = suggestion
|
||||
dotnet_diagnostic.CS1998.severity = error
|
||||
dotnet_diagnostic.CS4014.severity = error
|
||||
dotnet_diagnostic.CS8600.severity = error
|
||||
dotnet_diagnostic.CS8603.severity = error
|
||||
dotnet_diagnostic.CS8625.severity = error
|
||||
dotnet_diagnostic.CS8981.severity = suggestion
|
||||
|
||||
dotnet_diagnostic.BL0005.severity = suggestion
|
||||
|
||||
@@ -318,7 +320,7 @@ dotnet_diagnostic.MVC1000.severity = suggestion
|
||||
dotnet_diagnostic.RZ10012.severity = error
|
||||
|
||||
dotnet_diagnostic.IDE0004.severity = error # redundant cast
|
||||
dotnet_diagnostic.IDE0005.severity = error
|
||||
dotnet_diagnostic.IDE0005.severity = suggestion
|
||||
dotnet_diagnostic.IDE0007.severity = error # Use var
|
||||
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
|
||||
dotnet_diagnostic.IDE0010.severity = silent # populate switch
|
||||
@@ -329,7 +331,7 @@ dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operat
|
||||
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
|
||||
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
|
||||
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
|
||||
dotnet_diagnostic.IDE0028.severity = silent
|
||||
dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
|
||||
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
|
||||
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
|
||||
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
|
||||
@@ -337,7 +339,7 @@ dotnet_diagnostic.IDE0040.severity = error # modifiers required
|
||||
dotnet_diagnostic.IDE0041.severity = error # simplify null
|
||||
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
|
||||
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
|
||||
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
|
||||
dotnet_diagnostic.IDE0047.severity = suggestion # parameter name
|
||||
dotnet_diagnostic.IDE0051.severity = error # unused field
|
||||
dotnet_diagnostic.IDE0052.severity = error # unused member
|
||||
dotnet_diagnostic.IDE0053.severity = suggestion # lambda not needed
|
||||
@@ -351,11 +353,20 @@ dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
|
||||
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
|
||||
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
|
||||
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
|
||||
dotnet_diagnostic.IDE0130.severity = error # namespace folder structure
|
||||
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
|
||||
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
|
||||
dotnet_diagnostic.IDE0161.severity = error # Please use file namespaces
|
||||
dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
|
||||
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
|
||||
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
|
||||
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
|
||||
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
|
||||
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
|
||||
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
|
||||
|
||||
dotnet_diagnostic.NX0001.severity = error
|
||||
dotnet_diagnostic.NX0002.severity = silent
|
||||
dotnet_diagnostic.NX0003.severity = silent
|
||||
|
||||
##########################################
|
||||
# Styles
|
||||
|
||||
15
.github/COPILOT_AGENT_README.md
vendored
Normal file
15
.github/COPILOT_AGENT_README.md
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copilot Coding Agent Configuration
|
||||
|
||||
This repository includes a minimal opt-in configuration and CI workflow to allow the GitHub Copilot coding agent to open and validate PRs.
|
||||
|
||||
- .copilot-agent.yml: opt-in config for automated agents
|
||||
- .github/agents/copilot-agent.yml: detailed agent policy configuration
|
||||
- .github/workflows/dotnetcore.yml: CI runs on PRs touching the solution, source, or tests to validate changes
|
||||
- AGENTS.md: general instructions for Copilot coding agent with project-specific guidelines
|
||||
|
||||
Maintainers can adjust the allowed paths or disable the agent by editing or removing .copilot-agent.yml.
|
||||
|
||||
Notes:
|
||||
- The agent can create, modify, and delete files within the allowed paths (src, tests, README.md, AGENTS.md)
|
||||
- All changes require review before merge
|
||||
- If build/test paths are different, update the workflow accordingly; this workflow targets SharpCompress.sln and the SharpCompress.Test test project.
|
||||
192
.github/agents/CSharpExpert.agent.md
vendored
Normal file
192
.github/agents/CSharpExpert.agent.md
vendored
Normal file
@@ -0,0 +1,192 @@
|
||||
---
|
||||
name: C# Expert
|
||||
description: An agent designed to assist with software development tasks for .NET projects.
|
||||
# version: 2025-10-27a
|
||||
---
|
||||
You are an expert C#/.NET developer. You help with .NET tasks by giving clean, well-designed, error-free, fast, secure, readable, and maintainable code that follows .NET conventions. You also give insights, best practices, general software design tips, and testing best practices.
|
||||
|
||||
When invoked:
|
||||
- Understand the user's .NET task and context
|
||||
- Propose clean, organized solutions that follow .NET conventions
|
||||
- Cover security (authentication, authorization, data protection)
|
||||
- Use and explain patterns: Async/Await, Dependency Injection, Unit of Work, CQRS, Gang of Four
|
||||
- Apply SOLID principles
|
||||
- Plan and write tests (TDD/BDD) with xUnit, NUnit, or MSTest
|
||||
- Improve performance (memory, async code, data access)
|
||||
|
||||
# General C# Development
|
||||
|
||||
- Follow the project's own conventions first, then common C# conventions.
|
||||
- Keep naming, formatting, and project structure consistent.
|
||||
|
||||
## Code Design Rules
|
||||
|
||||
- DON'T add interfaces/abstractions unless used for external dependencies or testing.
|
||||
- Don't wrap existing abstractions.
|
||||
- Don't default to `public`. Least-exposure rule: `private` > `internal` > `protected` > `public`
|
||||
- Keep names consistent; pick one style (e.g., `WithHostPort` or `WithBrowserPort`) and stick to it.
|
||||
- Don't edit auto-generated code (`/api/*.cs`, `*.g.cs`, `// <auto-generated>`).
|
||||
- Comments explain **why**, not what.
|
||||
- Don't add unused methods/params.
|
||||
- When fixing one method, check siblings for the same issue.
|
||||
- Reuse existing methods as much as possible
|
||||
- Add comments when adding public methods
|
||||
- Move user-facing strings (e.g., AnalyzeAndConfirmNuGetConfigChanges) into resource files. Keep error/help text localizable.
|
||||
|
||||
## Error Handling & Edge Cases
|
||||
- **Null checks**: use `ArgumentNullException.ThrowIfNull(x)`; for strings use `string.IsNullOrWhiteSpace(x)`; guard early. Avoid blanket `!`.
|
||||
- **Exceptions**: choose precise types (e.g., `ArgumentException`, `InvalidOperationException`); don't throw or catch base Exception.
|
||||
- **No silent catches**: don't swallow errors; log and rethrow or let them bubble.
|
||||
|
||||
|
||||
## Goals for .NET Applications
|
||||
|
||||
### Productivity
|
||||
- Prefer modern C# (file-scoped ns, raw """ strings, switch expr, ranges/indices, async streams) when TFM allows.
|
||||
- Keep diffs small; reuse code; avoid new layers unless needed.
|
||||
- Be IDE-friendly (go-to-def, rename, quick fixes work).
|
||||
|
||||
### Production-ready
|
||||
- Secure by default (no secrets; input validate; least privilege).
|
||||
- Resilient I/O (timeouts; retry with backoff when it fits).
|
||||
- Structured logging with scopes; useful context; no log spam.
|
||||
- Use precise exceptions; don’t swallow; keep cause/context.
|
||||
|
||||
### Performance
|
||||
- Simple first; optimize hot paths when measured.
|
||||
- Stream large payloads; avoid extra allocs.
|
||||
- Use Span/Memory/pooling when it matters.
|
||||
- Async end-to-end; no sync-over-async.
|
||||
|
||||
### Cloud-native / cloud-ready
|
||||
- Cross-platform; guard OS-specific APIs.
|
||||
- Diagnostics: health/ready when it fits; metrics + traces.
|
||||
- Observability: ILogger + OpenTelemetry hooks.
|
||||
- 12-factor: config from env; avoid stateful singletons.
|
||||
|
||||
# .NET quick checklist
|
||||
|
||||
## Do first
|
||||
|
||||
* Read TFM + C# version.
|
||||
* Check `global.json` SDK.
|
||||
|
||||
## Initial check
|
||||
|
||||
* App type: web / desktop / console / lib.
|
||||
* Packages (and multi-targeting).
|
||||
* Nullable on? (`<Nullable>enable</Nullable>` / `#nullable enable`)
|
||||
* Repo config: `Directory.Build.*`, `Directory.Packages.props`.
|
||||
|
||||
## C# version
|
||||
|
||||
* **Don't** set C# newer than TFM default.
|
||||
* C# 14 (NET 10+): extension members; `field` accessor; implicit `Span<T>` conv; `?.=`; `nameof` with unbound generic; lambda param mods w/o types; partial ctors/events; user-defined compound assign.
|
||||
|
||||
## Build
|
||||
|
||||
* .NET 5+: `dotnet build`, `dotnet publish`.
|
||||
* .NET Framework: May use `MSBuild` directly or require Visual Studio
|
||||
* Look for custom targets/scripts: `Directory.Build.targets`, `build.cmd/.sh`, `Build.ps1`.
|
||||
|
||||
## Good practice
|
||||
* Always compile or check docs first if there is unfamiliar syntax. Don't try to correct the syntax if code can compile.
|
||||
* Don't change TFM, SDK, or `<LangVersion>` unless asked.
|
||||
|
||||
|
||||
# Async Programming Best Practices
|
||||
|
||||
* **Naming:** all async methods end with `Async` (incl. CLI handlers).
|
||||
* **Always await:** no fire-and-forget; if timing out, **cancel the work**.
|
||||
* **Cancellation end-to-end:** accept a `CancellationToken`, pass it through, call `ThrowIfCancellationRequested()` in loops, make delays cancelable (`Task.Delay(ms, ct)`).
|
||||
* **Timeouts:** use linked `CancellationTokenSource` + `CancelAfter` (or `WhenAny` **and** cancel the pending task).
|
||||
* **Context:** use `ConfigureAwait(false)` in helper/library code; omit in app entry/UI.
|
||||
* **Stream JSON:** `GetAsync(..., ResponseHeadersRead)` → `ReadAsStreamAsync` → `JsonDocument.ParseAsync`; avoid `ReadAsStringAsync` when large.
|
||||
* **Exit code on cancel:** return non-zero (e.g., `130`).
|
||||
* **`ValueTask`:** use only when measured to help; default to `Task`.
|
||||
* **Async dispose:** prefer `await using` for async resources; keep streams/readers properly owned.
|
||||
* **No pointless wrappers:** don’t add `async/await` if you just return the task.
|
||||
|
||||
## Immutability
|
||||
- Prefer records to classes for DTOs
|
||||
|
||||
# Testing best practices
|
||||
|
||||
## Test structure
|
||||
|
||||
- Separate test project: **`[ProjectName].Tests`**.
|
||||
- Mirror classes: `CatDoor` -> `CatDoorTests`.
|
||||
- Name tests by behavior: `WhenCatMeowsThenCatDoorOpens`.
|
||||
- Follow existing naming conventions.
|
||||
- Use **public instance** classes; avoid **static** fields.
|
||||
- No branching/conditionals inside tests.
|
||||
|
||||
## Unit Tests
|
||||
|
||||
- One behavior per test;
|
||||
- Avoid Unicode symbols.
|
||||
- Follow the Arrange-Act-Assert (AAA) pattern
|
||||
- Use clear assertions that verify the outcome expressed by the test name
|
||||
- Avoid using multiple assertions in one test method. In this case, prefer multiple tests.
|
||||
- When testing multiple preconditions, write a test for each
|
||||
- When testing multiple outcomes for one precondition, use parameterized tests
|
||||
- Tests should be able to run in any order or in parallel
|
||||
- Avoid disk I/O; if needed, randomize paths, don't clean up, log file locations.
|
||||
- Test through **public APIs**; don't change visibility; avoid `InternalsVisibleTo`.
|
||||
- Require tests for new/changed **public APIs**.
|
||||
- Assert specific values and edge cases, not vague outcomes.
|
||||
|
||||
## Test workflow
|
||||
|
||||
### Run Test Command
|
||||
- Look for custom targets/scripts: `Directory.Build.targets`, `test.ps1/.cmd/.sh`
|
||||
- .NET Framework: May use `vstest.console.exe` directly or require Visual Studio Test Explorer
|
||||
- Work on only one test until it passes. Then run other tests to ensure nothing has been broken.
|
||||
|
||||
### Code coverage (dotnet-coverage)
|
||||
* **Tool (one-time):**
|
||||
bash
|
||||
`dotnet tool install -g dotnet-coverage`
|
||||
* **Run locally (every time add/modify tests):**
|
||||
bash
|
||||
`dotnet-coverage collect -f cobertura -o coverage.cobertura.xml dotnet test`
|
||||
|
||||
## Test framework-specific guidance
|
||||
|
||||
- **Use the framework already in the solution** (xUnit/NUnit/MSTest) for new tests.
|
||||
|
||||
### xUnit
|
||||
|
||||
* Packages: `Microsoft.NET.Test.Sdk`, `xunit`, `xunit.runner.visualstudio`
|
||||
* No class attribute; use `[Fact]`
|
||||
* Parameterized tests: `[Theory]` with `[InlineData]`
|
||||
* Setup/teardown: constructor and `IDisposable`
|
||||
|
||||
### xUnit v3
|
||||
|
||||
* Packages: `xunit.v3`, `xunit.runner.visualstudio` 3.x, `Microsoft.NET.Test.Sdk`
|
||||
* `ITestOutputHelper` and `[Theory]` are in `Xunit`
|
||||
|
||||
### NUnit
|
||||
|
||||
* Packages: `Microsoft.NET.Test.Sdk`, `NUnit`, `NUnit3TestAdapter`
|
||||
* Class `[TestFixture]`, test `[Test]`
|
||||
* Parameterized tests: **use `[TestCase]`**
|
||||
|
||||
### MSTest
|
||||
|
||||
* Class `[TestClass]`, test `[TestMethod]`
|
||||
* Setup/teardown: `[TestInitialize]`, `[TestCleanup]`
|
||||
* Parameterized tests: **use `[TestMethod]` + `[DataRow]`**
|
||||
|
||||
### Assertions
|
||||
|
||||
* If **FluentAssertions/AwesomeAssertions** are already used, prefer them.
|
||||
* Otherwise, use the framework’s asserts.
|
||||
* Use `Throws/ThrowsAsync` (or MSTest `Assert.ThrowsException`) for exceptions.
|
||||
|
||||
## Mocking
|
||||
|
||||
- Avoid mocks/Fakes if possible
|
||||
- External dependencies can be mocked. Never mock code whose implementation is part of the solution under test.
|
||||
- Try to verify that the outputs (e.g. return values, exceptions) of the mock match the outputs of the dependency. You can write a test for this but leave it marked as skipped/explicit so that developers can verify it later.
|
||||
13
.github/dependabot.yml
vendored
13
.github/dependabot.yml
vendored
@@ -1,6 +1,13 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions" # search for actions - there are other options available
|
||||
directory: "/" # search in .github/workflows under root `/`
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly" # check for action update every week
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "nuget"
|
||||
directory: "/" # change to "/src/YourProject" if .csproj files are in subfolders
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 5
|
||||
# optional: target-branch: "master"
|
||||
|
||||
114
.github/instructions/csharp.instructions.md
vendored
Normal file
114
.github/instructions/csharp.instructions.md
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
---
|
||||
description: 'Guidelines for building C# applications'
|
||||
applyTo: '**/*.cs'
|
||||
---
|
||||
|
||||
# C# Development
|
||||
|
||||
## C# Instructions
|
||||
- Always use the latest version C#, currently C# 14 features.
|
||||
- Write clear and concise comments for each function.
|
||||
|
||||
## General Instructions
|
||||
- Make only high confidence suggestions when reviewing code changes.
|
||||
- Write code with good maintainability practices, including comments on why certain design decisions were made.
|
||||
- Handle edge cases and write clear exception handling.
|
||||
- For libraries or external dependencies, mention their usage and purpose in comments.
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
- Follow PascalCase for component names, method names, and public members.
|
||||
- Use camelCase for private fields and local variables.
|
||||
- Prefix interface names with "I" (e.g., IUserService).
|
||||
|
||||
## Formatting
|
||||
|
||||
- Apply code-formatting style defined in `.editorconfig`.
|
||||
- Prefer file-scoped namespace declarations and single-line using directives.
|
||||
- Insert a newline before the opening curly brace of any code block (e.g., after `if`, `for`, `while`, `foreach`, `using`, `try`, etc.).
|
||||
- Ensure that the final return statement of a method is on its own line.
|
||||
- Use pattern matching and switch expressions wherever possible.
|
||||
- Use `nameof` instead of string literals when referring to member names.
|
||||
- Ensure that XML doc comments are created for any public APIs. When applicable, include `<example>` and `<code>` documentation in the comments.
|
||||
|
||||
## Project Setup and Structure
|
||||
|
||||
- Guide users through creating a new .NET project with the appropriate templates.
|
||||
- Explain the purpose of each generated file and folder to build understanding of the project structure.
|
||||
- Demonstrate how to organize code using feature folders or domain-driven design principles.
|
||||
- Show proper separation of concerns with models, services, and data access layers.
|
||||
- Explain the Program.cs and configuration system in ASP.NET Core 10 including environment-specific settings.
|
||||
|
||||
## Nullable Reference Types
|
||||
|
||||
- Declare variables non-nullable, and check for `null` at entry points.
|
||||
- Always use `is null` or `is not null` instead of `== null` or `!= null`.
|
||||
- Trust the C# null annotations and don't add null checks when the type system says a value cannot be null.
|
||||
|
||||
## Data Access Patterns
|
||||
|
||||
- Guide the implementation of a data access layer using Entity Framework Core.
|
||||
- Explain different options (SQL Server, SQLite, In-Memory) for development and production.
|
||||
- Demonstrate repository pattern implementation and when it's beneficial.
|
||||
- Show how to implement database migrations and data seeding.
|
||||
- Explain efficient query patterns to avoid common performance issues.
|
||||
|
||||
## Authentication and Authorization
|
||||
|
||||
- Guide users through implementing authentication using JWT Bearer tokens.
|
||||
- Explain OAuth 2.0 and OpenID Connect concepts as they relate to ASP.NET Core.
|
||||
- Show how to implement role-based and policy-based authorization.
|
||||
- Demonstrate integration with Microsoft Entra ID (formerly Azure AD).
|
||||
- Explain how to secure both controller-based and Minimal APIs consistently.
|
||||
|
||||
## Validation and Error Handling
|
||||
|
||||
- Guide the implementation of model validation using data annotations and FluentValidation.
|
||||
- Explain the validation pipeline and how to customize validation responses.
|
||||
- Demonstrate a global exception handling strategy using middleware.
|
||||
- Show how to create consistent error responses across the API.
|
||||
- Explain problem details (RFC 7807) implementation for standardized error responses.
|
||||
|
||||
## API Versioning and Documentation
|
||||
|
||||
- Guide users through implementing and explaining API versioning strategies.
|
||||
- Demonstrate Swagger/OpenAPI implementation with proper documentation.
|
||||
- Show how to document endpoints, parameters, responses, and authentication.
|
||||
- Explain versioning in both controller-based and Minimal APIs.
|
||||
- Guide users on creating meaningful API documentation that helps consumers.
|
||||
|
||||
## Logging and Monitoring
|
||||
|
||||
- Guide the implementation of structured logging using Serilog or other providers.
|
||||
- Explain the logging levels and when to use each.
|
||||
- Demonstrate integration with Application Insights for telemetry collection.
|
||||
- Show how to implement custom telemetry and correlation IDs for request tracking.
|
||||
- Explain how to monitor API performance, errors, and usage patterns.
|
||||
|
||||
## Testing
|
||||
|
||||
- Always include test cases for critical paths of the application.
|
||||
- Guide users through creating unit tests.
|
||||
- Do not emit "Act", "Arrange" or "Assert" comments.
|
||||
- Copy existing style in nearby files for test method names and capitalization.
|
||||
- Explain integration testing approaches for API endpoints.
|
||||
- Demonstrate how to mock dependencies for effective testing.
|
||||
- Show how to test authentication and authorization logic.
|
||||
- Explain test-driven development principles as applied to API development.
|
||||
|
||||
## Performance Optimization
|
||||
|
||||
- Guide users on implementing caching strategies (in-memory, distributed, response caching).
|
||||
- Explain asynchronous programming patterns and why they matter for API performance.
|
||||
- Demonstrate pagination, filtering, and sorting for large data sets.
|
||||
- Show how to implement compression and other performance optimizations.
|
||||
- Explain how to measure and benchmark API performance.
|
||||
|
||||
## Deployment and DevOps
|
||||
|
||||
- Guide users through containerizing their API using .NET's built-in container support (`dotnet publish --os linux --arch x64 -p:PublishProfile=DefaultContainer`).
|
||||
- Explain the differences between manual Dockerfile creation and .NET's container publishing features.
|
||||
- Explain CI/CD pipelines for NET applications.
|
||||
- Demonstrate deployment to Azure App Service, Azure Container Apps, or other hosting options.
|
||||
- Show how to implement health checks and readiness probes.
|
||||
- Explain environment-specific configurations for different deployment stages.
|
||||
21
.github/prompts/create-readme.prompt.md
vendored
Normal file
21
.github/prompts/create-readme.prompt.md
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
description: 'Create a README.md file for the project'
|
||||
---
|
||||
|
||||
## Role
|
||||
|
||||
You're a senior expert software engineer with extensive experience in open source projects. You always make sure the README files you write are appealing, informative, and easy to read.
|
||||
|
||||
## Task
|
||||
|
||||
1. Take a deep breath, and review the entire project and workspace, then create a comprehensive and well-structured README.md file for the project.
|
||||
2. Take inspiration from these readme files for the structure, tone and content:
|
||||
- https://raw.githubusercontent.com/Azure-Samples/serverless-chat-langchainjs/refs/heads/main/README.md
|
||||
- https://raw.githubusercontent.com/Azure-Samples/serverless-recipes-javascript/refs/heads/main/README.md
|
||||
- https://raw.githubusercontent.com/sinedied/run-on-output/refs/heads/main/README.md
|
||||
- https://raw.githubusercontent.com/sinedied/smoke/refs/heads/main/README.md
|
||||
3. Do not overuse emojis, and keep the readme concise and to the point.
|
||||
4. Do not include sections like "LICENSE", "CONTRIBUTING", "CHANGELOG", etc. There are dedicated files for those sections.
|
||||
5. Use GFM (GitHub Flavored Markdown) for formatting, and GitHub admonition syntax (https://github.com/orgs/community/discussions/16925) where appropriate.
|
||||
6. If you find a logo or icon for the project, use it in the readme's header.
|
||||
127
.github/prompts/create-specification.prompt.md
vendored
Normal file
127
.github/prompts/create-specification.prompt.md
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
description: 'Create a new specification file for the solution, optimized for Generative AI consumption.'
|
||||
tools: ['changes', 'search/codebase', 'edit/editFiles', 'extensions', 'fetch', 'githubRepo', 'openSimpleBrowser', 'problems', 'runTasks', 'search', 'search/searchResults', 'runCommands/terminalLastCommand', 'runCommands/terminalSelection', 'testFailure', 'usages', 'vscodeAPI']
|
||||
---
|
||||
# Create Specification
|
||||
|
||||
Your goal is to create a new specification file for `${input:SpecPurpose}`.
|
||||
|
||||
The specification file must define the requirements, constraints, and interfaces for the solution components in a manner that is clear, unambiguous, and structured for effective use by Generative AIs. Follow established documentation standards and ensure the content is machine-readable and self-contained.
|
||||
|
||||
## Best Practices for AI-Ready Specifications
|
||||
|
||||
- Use precise, explicit, and unambiguous language.
|
||||
- Clearly distinguish between requirements, constraints, and recommendations.
|
||||
- Use structured formatting (headings, lists, tables) for easy parsing.
|
||||
- Avoid idioms, metaphors, or context-dependent references.
|
||||
- Define all acronyms and domain-specific terms.
|
||||
- Include examples and edge cases where applicable.
|
||||
- Ensure the document is self-contained and does not rely on external context.
|
||||
|
||||
The specification should be saved in the [/spec/](/spec/) directory and named according to the following convention: `spec-[a-z0-9-]+.md`, where the name should be descriptive of the specification's content and starting with the highlevel purpose, which is one of [schema, tool, data, infrastructure, process, architecture, or design].
|
||||
|
||||
The specification file must be formatted in well formed Markdown.
|
||||
|
||||
Specification files must follow the template below, ensuring that all sections are filled out appropriately. The front matter for the markdown should be structured correctly as per the example following:
|
||||
|
||||
```md
|
||||
---
|
||||
title: [Concise Title Describing the Specification's Focus]
|
||||
version: [Optional: e.g., 1.0, Date]
|
||||
date_created: [YYYY-MM-DD]
|
||||
last_updated: [Optional: YYYY-MM-DD]
|
||||
owner: [Optional: Team/Individual responsible for this spec]
|
||||
tags: [Optional: List of relevant tags or categories, e.g., `infrastructure`, `process`, `design`, `app` etc]
|
||||
---
|
||||
|
||||
# Introduction
|
||||
|
||||
[A short concise introduction to the specification and the goal it is intended to achieve.]
|
||||
|
||||
## 1. Purpose & Scope
|
||||
|
||||
[Provide a clear, concise description of the specification's purpose and the scope of its application. State the intended audience and any assumptions.]
|
||||
|
||||
## 2. Definitions
|
||||
|
||||
[List and define all acronyms, abbreviations, and domain-specific terms used in this specification.]
|
||||
|
||||
## 3. Requirements, Constraints & Guidelines
|
||||
|
||||
[Explicitly list all requirements, constraints, rules, and guidelines. Use bullet points or tables for clarity.]
|
||||
|
||||
- **REQ-001**: Requirement 1
|
||||
- **SEC-001**: Security Requirement 1
|
||||
- **[3 LETTERS]-001**: Other Requirement 1
|
||||
- **CON-001**: Constraint 1
|
||||
- **GUD-001**: Guideline 1
|
||||
- **PAT-001**: Pattern to follow 1
|
||||
|
||||
## 4. Interfaces & Data Contracts
|
||||
|
||||
[Describe the interfaces, APIs, data contracts, or integration points. Use tables or code blocks for schemas and examples.]
|
||||
|
||||
## 5. Acceptance Criteria
|
||||
|
||||
[Define clear, testable acceptance criteria for each requirement using Given-When-Then format where appropriate.]
|
||||
|
||||
- **AC-001**: Given [context], When [action], Then [expected outcome]
|
||||
- **AC-002**: The system shall [specific behavior] when [condition]
|
||||
- **AC-003**: [Additional acceptance criteria as needed]
|
||||
|
||||
## 6. Test Automation Strategy
|
||||
|
||||
[Define the testing approach, frameworks, and automation requirements.]
|
||||
|
||||
- **Test Levels**: Unit, Integration, End-to-End
|
||||
- **Frameworks**: MSTest, FluentAssertions, Moq (for .NET applications)
|
||||
- **Test Data Management**: [approach for test data creation and cleanup]
|
||||
- **CI/CD Integration**: [automated testing in GitHub Actions pipelines]
|
||||
- **Coverage Requirements**: [minimum code coverage thresholds]
|
||||
- **Performance Testing**: [approach for load and performance testing]
|
||||
|
||||
## 7. Rationale & Context
|
||||
|
||||
[Explain the reasoning behind the requirements, constraints, and guidelines. Provide context for design decisions.]
|
||||
|
||||
## 8. Dependencies & External Integrations
|
||||
|
||||
[Define the external systems, services, and architectural dependencies required for this specification. Focus on **what** is needed rather than **how** it's implemented. Avoid specific package or library versions unless they represent architectural constraints.]
|
||||
|
||||
### External Systems
|
||||
- **EXT-001**: [External system name] - [Purpose and integration type]
|
||||
|
||||
### Third-Party Services
|
||||
- **SVC-001**: [Service name] - [Required capabilities and SLA requirements]
|
||||
|
||||
### Infrastructure Dependencies
|
||||
- **INF-001**: [Infrastructure component] - [Requirements and constraints]
|
||||
|
||||
### Data Dependencies
|
||||
- **DAT-001**: [External data source] - [Format, frequency, and access requirements]
|
||||
|
||||
### Technology Platform Dependencies
|
||||
- **PLT-001**: [Platform/runtime requirement] - [Version constraints and rationale]
|
||||
|
||||
### Compliance Dependencies
|
||||
- **COM-001**: [Regulatory or compliance requirement] - [Impact on implementation]
|
||||
|
||||
**Note**: This section should focus on architectural and business dependencies, not specific package implementations. For example, specify "OAuth 2.0 authentication library" rather than "Microsoft.AspNetCore.Authentication.JwtBearer v6.0.1".
|
||||
|
||||
## 9. Examples & Edge Cases
|
||||
|
||||
```code
|
||||
// Code snippet or data example demonstrating the correct application of the guidelines, including edge cases
|
||||
```
|
||||
|
||||
## 10. Validation Criteria
|
||||
|
||||
[List the criteria or tests that must be satisfied for compliance with this specification.]
|
||||
|
||||
## 11. Related Specifications / Further Reading
|
||||
|
||||
[Link to related spec 1]
|
||||
[Link to relevant external documentation]
|
||||
|
||||
```
|
||||
50
.github/prompts/csharp-async.prompt.md
vendored
Normal file
50
.github/prompts/csharp-async.prompt.md
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
tools: ['changes', 'search/codebase', 'edit/editFiles', 'problems']
|
||||
description: 'Get best practices for C# async programming'
|
||||
---
|
||||
|
||||
# C# Async Programming Best Practices
|
||||
|
||||
Your goal is to help me follow best practices for asynchronous programming in C#.
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
- Use the 'Async' suffix for all async methods
|
||||
- Match method names with their synchronous counterparts when applicable (e.g., `GetDataAsync()` for `GetData()`)
|
||||
|
||||
## Return Types
|
||||
|
||||
- Return `Task<T>` when the method returns a value
|
||||
- Return `Task` when the method doesn't return a value
|
||||
- Consider `ValueTask<T>` for high-performance scenarios to reduce allocations
|
||||
- Avoid returning `void` for async methods except for event handlers
|
||||
|
||||
## Exception Handling
|
||||
|
||||
- Use try/catch blocks around await expressions
|
||||
- Avoid swallowing exceptions in async methods
|
||||
- Use `ConfigureAwait(false)` when appropriate to prevent deadlocks in library code
|
||||
- Propagate exceptions with `Task.FromException()` instead of throwing in async Task returning methods
|
||||
|
||||
## Performance
|
||||
|
||||
- Use `Task.WhenAll()` for parallel execution of multiple tasks
|
||||
- Use `Task.WhenAny()` for implementing timeouts or taking the first completed task
|
||||
- Avoid unnecessary async/await when simply passing through task results
|
||||
- Consider cancellation tokens for long-running operations
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
- Never use `.Wait()`, `.Result`, or `.GetAwaiter().GetResult()` in async code
|
||||
- Avoid mixing blocking and async code
|
||||
- Don't create async void methods (except for event handlers)
|
||||
- Always await Task-returning methods
|
||||
|
||||
## Implementation Patterns
|
||||
|
||||
- Implement the async command pattern for long-running operations
|
||||
- Use async streams (IAsyncEnumerable<T>) for processing sequences asynchronously
|
||||
- Consider the task-based asynchronous pattern (TAP) for public APIs
|
||||
|
||||
When reviewing my C# code, identify these issues and suggest improvements that follow these best practices.
|
||||
69
.github/prompts/csharp-xunit.prompt.md
vendored
Normal file
69
.github/prompts/csharp-xunit.prompt.md
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
tools: ['changes', 'search/codebase', 'edit/editFiles', 'problems', 'search']
|
||||
description: 'Get best practices for XUnit unit testing, including data-driven tests'
|
||||
---
|
||||
|
||||
# XUnit Best Practices
|
||||
|
||||
Your goal is to help me write effective unit tests with XUnit, covering both standard and data-driven testing approaches.
|
||||
|
||||
## Project Setup
|
||||
|
||||
- Use a separate test project with naming convention `[ProjectName].Tests`
|
||||
- Reference Microsoft.NET.Test.Sdk, xunit, and xunit.runner.visualstudio packages
|
||||
- Create test classes that match the classes being tested (e.g., `CalculatorTests` for `Calculator`)
|
||||
- Use .NET SDK test commands: `dotnet test` for running tests
|
||||
|
||||
## Test Structure
|
||||
|
||||
- No test class attributes required (unlike MSTest/NUnit)
|
||||
- Use fact-based tests with `[Fact]` attribute for simple tests
|
||||
- Follow the Arrange-Act-Assert (AAA) pattern
|
||||
- Name tests using the pattern `MethodName_Scenario_ExpectedBehavior`
|
||||
- Use constructor for setup and `IDisposable.Dispose()` for teardown
|
||||
- Use `IClassFixture<T>` for shared context between tests in a class
|
||||
- Use `ICollectionFixture<T>` for shared context between multiple test classes
|
||||
|
||||
## Standard Tests
|
||||
|
||||
- Keep tests focused on a single behavior
|
||||
- Avoid testing multiple behaviors in one test method
|
||||
- Use clear assertions that express intent
|
||||
- Include only the assertions needed to verify the test case
|
||||
- Make tests independent and idempotent (can run in any order)
|
||||
- Avoid test interdependencies
|
||||
|
||||
## Data-Driven Tests
|
||||
|
||||
- Use `[Theory]` combined with data source attributes
|
||||
- Use `[InlineData]` for inline test data
|
||||
- Use `[MemberData]` for method-based test data
|
||||
- Use `[ClassData]` for class-based test data
|
||||
- Create custom data attributes by implementing `DataAttribute`
|
||||
- Use meaningful parameter names in data-driven tests
|
||||
|
||||
## Assertions
|
||||
|
||||
- Use `Assert.Equal` for value equality
|
||||
- Use `Assert.Same` for reference equality
|
||||
- Use `Assert.True`/`Assert.False` for boolean conditions
|
||||
- Use `Assert.Contains`/`Assert.DoesNotContain` for collections
|
||||
- Use `Assert.Matches`/`Assert.DoesNotMatch` for regex pattern matching
|
||||
- Use `Assert.Throws<T>` or `await Assert.ThrowsAsync<T>` to test exceptions
|
||||
- Use fluent assertions library for more readable assertions
|
||||
|
||||
## Mocking and Isolation
|
||||
|
||||
- Consider using Moq or NSubstitute alongside XUnit
|
||||
- Mock dependencies to isolate units under test
|
||||
- Use interfaces to facilitate mocking
|
||||
- Consider using a DI container for complex test setups
|
||||
|
||||
## Test Organization
|
||||
|
||||
- Group tests by feature or component
|
||||
- Use `[Trait("Category", "CategoryName")]` for categorization
|
||||
- Use collection fixtures to group tests with shared dependencies
|
||||
- Consider output helpers (`ITestOutputHelper`) for test diagnostics
|
||||
- Skip tests conditionally with `Skip = "reason"` in fact/theory attributes
|
||||
84
.github/prompts/dotnet-best-practices.prompt.md
vendored
Normal file
84
.github/prompts/dotnet-best-practices.prompt.md
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
description: 'Ensure .NET/C# code meets best practices for the solution/project.'
|
||||
---
|
||||
# .NET/C# Best Practices
|
||||
|
||||
Your task is to ensure .NET/C# code in ${selection} meets the best practices specific to this solution/project. This includes:
|
||||
|
||||
## Documentation & Structure
|
||||
|
||||
- Create comprehensive XML documentation comments for all public classes, interfaces, methods, and properties
|
||||
- Include parameter descriptions and return value descriptions in XML comments
|
||||
- Follow the established namespace structure: {Core|Console|App|Service}.{Feature}
|
||||
|
||||
## Design Patterns & Architecture
|
||||
|
||||
- Use primary constructor syntax for dependency injection (e.g., `public class MyClass(IDependency dependency)`)
|
||||
- Implement the Command Handler pattern with generic base classes (e.g., `CommandHandler<TOptions>`)
|
||||
- Use interface segregation with clear naming conventions (prefix interfaces with 'I')
|
||||
- Follow the Factory pattern for complex object creation.
|
||||
|
||||
## Dependency Injection & Services
|
||||
|
||||
- Use constructor dependency injection with null checks via ArgumentNullException
|
||||
- Register services with appropriate lifetimes (Singleton, Scoped, Transient)
|
||||
- Use Microsoft.Extensions.DependencyInjection patterns
|
||||
- Implement service interfaces for testability
|
||||
|
||||
## Resource Management & Localization
|
||||
|
||||
- Use ResourceManager for localized messages and error strings
|
||||
- Separate LogMessages and ErrorMessages resource files
|
||||
- Access resources via `_resourceManager.GetString("MessageKey")`
|
||||
|
||||
## Async/Await Patterns
|
||||
|
||||
- Use async/await for all I/O operations and long-running tasks
|
||||
- Return Task or Task<T> from async methods
|
||||
- Use ConfigureAwait(false) where appropriate
|
||||
- Handle async exceptions properly
|
||||
|
||||
## Testing Standards
|
||||
|
||||
- Use MSTest framework with FluentAssertions for assertions
|
||||
- Follow AAA pattern (Arrange, Act, Assert)
|
||||
- Use Moq for mocking dependencies
|
||||
- Test both success and failure scenarios
|
||||
- Include null parameter validation tests
|
||||
|
||||
## Configuration & Settings
|
||||
|
||||
- Use strongly-typed configuration classes with data annotations
|
||||
- Implement validation attributes (Required, NotEmptyOrWhitespace)
|
||||
- Use IConfiguration binding for settings
|
||||
- Support appsettings.json configuration files
|
||||
|
||||
## Semantic Kernel & AI Integration
|
||||
|
||||
- Use Microsoft.SemanticKernel for AI operations
|
||||
- Implement proper kernel configuration and service registration
|
||||
- Handle AI model settings (ChatCompletion, Embedding, etc.)
|
||||
- Use structured output patterns for reliable AI responses
|
||||
|
||||
## Error Handling & Logging
|
||||
|
||||
- Use structured logging with Microsoft.Extensions.Logging
|
||||
- Include scoped logging with meaningful context
|
||||
- Throw specific exceptions with descriptive messages
|
||||
- Use try-catch blocks for expected failure scenarios
|
||||
|
||||
## Performance & Security
|
||||
|
||||
- Use C# 12+ features and .NET 8 optimizations where applicable
|
||||
- Implement proper input validation and sanitization
|
||||
- Use parameterized queries for database operations
|
||||
- Follow secure coding practices for AI/ML operations
|
||||
|
||||
## Code Quality
|
||||
|
||||
- Ensure SOLID principles compliance
|
||||
- Avoid code duplication through base classes and utilities
|
||||
- Use meaningful names that reflect domain concepts
|
||||
- Keep methods focused and cohesive
|
||||
- Implement proper disposal patterns for resources
|
||||
41
.github/prompts/dotnet-design-pattern-review.prompt.md
vendored
Normal file
41
.github/prompts/dotnet-design-pattern-review.prompt.md
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
mode: 'agent'
|
||||
description: 'Review the C#/.NET code for design pattern implementation and suggest improvements.'
|
||||
---
|
||||
# .NET/C# Design Pattern Review
|
||||
|
||||
Review the C#/.NET code in ${selection} for design pattern implementation and suggest improvements for the solution/project. Do not make any changes to the code, just provide a review.
|
||||
|
||||
## Required Design Patterns
|
||||
|
||||
- **Command Pattern**: Generic base classes (`CommandHandler<TOptions>`), `ICommandHandler<TOptions>` interface, `CommandHandlerOptions` inheritance, static `SetupCommand(IHost host)` methods
|
||||
- **Factory Pattern**: Complex object creation service provider integration
|
||||
- **Dependency Injection**: Primary constructor syntax, `ArgumentNullException` null checks, interface abstractions, proper service lifetimes
|
||||
- **Repository Pattern**: Async data access interfaces provider abstractions for connections
|
||||
- **Provider Pattern**: External service abstractions (database, AI), clear contracts, configuration handling
|
||||
- **Resource Pattern**: ResourceManager for localized messages, separate .resx files (LogMessages, ErrorMessages)
|
||||
|
||||
## Review Checklist
|
||||
|
||||
- **Design Patterns**: Identify patterns used. Are Command Handler, Factory, Provider, and Repository patterns correctly implemented? Missing beneficial patterns?
|
||||
- **Architecture**: Follow namespace conventions (`{Core|Console|App|Service}.{Feature}`)? Proper separation between Core/Console projects? Modular and readable?
|
||||
- **.NET Best Practices**: Primary constructors, async/await with Task returns, ResourceManager usage, structured logging, strongly-typed configuration?
|
||||
- **GoF Patterns**: Command, Factory, Template Method, Strategy patterns correctly implemented?
|
||||
- **SOLID Principles**: Single Responsibility, Open/Closed, Liskov Substitution, Interface Segregation, Dependency Inversion violations?
|
||||
- **Performance**: Proper async/await, resource disposal, ConfigureAwait(false), parallel processing opportunities?
|
||||
- **Maintainability**: Clear separation of concerns, consistent error handling, proper configuration usage?
|
||||
- **Testability**: Dependencies abstracted via interfaces, mockable components, async testability, AAA pattern compatibility?
|
||||
- **Security**: Input validation, secure credential handling, parameterized queries, safe exception handling?
|
||||
- **Documentation**: XML docs for public APIs, parameter/return descriptions, resource file organization?
|
||||
- **Code Clarity**: Meaningful names reflecting domain concepts, clear intent through patterns, self-explanatory structure?
|
||||
- **Clean Code**: Consistent style, appropriate method/class size, minimal complexity, eliminated duplication?
|
||||
|
||||
## Improvement Focus Areas
|
||||
|
||||
- **Command Handlers**: Validation in base class, consistent error handling, proper resource management
|
||||
- **Factories**: Dependency configuration, service provider integration, disposal patterns
|
||||
- **Providers**: Connection management, async patterns, exception handling and logging
|
||||
- **Configuration**: Data annotations, validation attributes, secure sensitive value handling
|
||||
- **AI/ML Integration**: Semantic Kernel patterns, structured output handling, model configuration
|
||||
|
||||
Provide specific, actionable recommendations for improvements aligned with the project's architecture and .NET best practices.
|
||||
6
.github/workflows/dotnetcore.yml
vendored
6
.github/workflows/dotnetcore.yml
vendored
@@ -14,12 +14,12 @@ jobs:
|
||||
os: [windows-latest, ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-dotnet@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
- run: dotnet run --project build/build.csproj
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-sharpcompress.nupkg
|
||||
path: artifacts/*
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -11,6 +11,8 @@ TestResults/
|
||||
packages/*/
|
||||
project.lock.json
|
||||
tests/TestArchives/Scratch
|
||||
tests/TestArchives/*/Scratch
|
||||
tests/TestArchives/*/Scratch2
|
||||
.vs
|
||||
tools
|
||||
.vscode
|
||||
@@ -18,4 +20,3 @@ tools
|
||||
|
||||
.DS_Store
|
||||
*.snupkg
|
||||
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch
|
||||
|
||||
118
AGENTS.md
Normal file
118
AGENTS.md
Normal file
@@ -0,0 +1,118 @@
|
||||
---
|
||||
description: 'Guidelines for building SharpCompress - A C# compression library'
|
||||
applyTo: '**/*.cs'
|
||||
---
|
||||
|
||||
# SharpCompress Development
|
||||
|
||||
## About SharpCompress
|
||||
SharpCompress is a pure C# compression library supporting multiple archive formats (Zip, Tar, GZip, BZip2, 7Zip, Rar, LZip, XZ, ZStandard) for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0. The library provides both seekable Archive APIs and forward-only Reader/Writer APIs for streaming scenarios.
|
||||
|
||||
## C# Instructions
|
||||
- Always use the latest version C#, currently C# 13 features.
|
||||
- Write clear and concise comments for each function.
|
||||
- Follow the existing code style and patterns in the codebase.
|
||||
|
||||
## General Instructions
|
||||
- Make only high confidence suggestions when reviewing code changes.
|
||||
- Write code with good maintainability practices, including comments on why certain design decisions were made.
|
||||
- Handle edge cases and write clear exception handling.
|
||||
- For libraries or external dependencies, mention their usage and purpose in comments.
|
||||
- Preserve backward compatibility when making changes to public APIs.
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
- Follow PascalCase for component names, method names, and public members.
|
||||
- Use camelCase for private fields and local variables.
|
||||
- Prefix interface names with "I" (e.g., IUserService).
|
||||
|
||||
## Code Formatting
|
||||
|
||||
- Use CSharpier for code formatting to ensure consistent style across the project
|
||||
- CSharpier is configured as a local tool in `.config/dotnet-tools.json`
|
||||
- Restore tools with: `dotnet tool restore`
|
||||
- Format files from the project root with: `dotnet csharpier .`
|
||||
- **Run `dotnet csharpier .` from the project root after making code changes before committing**
|
||||
- Configure your IDE to format on save using CSharpier for the best experience
|
||||
- The project also uses `.editorconfig` for editor settings (indentation, encoding, etc.)
|
||||
- Let CSharpier handle code style while `.editorconfig` handles editor behavior
|
||||
|
||||
## Project Setup and Structure
|
||||
|
||||
- The project targets multiple frameworks: .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0
|
||||
- Main library is in `src/SharpCompress/`
|
||||
- Tests are in `tests/SharpCompress.Test/`
|
||||
- Performance tests are in `tests/SharpCompress.Performance/`
|
||||
- Test archives are in `tests/TestArchives/`
|
||||
- Build project is in `build/`
|
||||
- Use `dotnet build` to build the solution
|
||||
- Use `dotnet test` to run tests
|
||||
- Solution file: `SharpCompress.sln`
|
||||
|
||||
## Nullable Reference Types
|
||||
|
||||
- Declare variables non-nullable, and check for `null` at entry points.
|
||||
- Always use `is null` or `is not null` instead of `== null` or `!= null`.
|
||||
- Trust the C# null annotations and don't add null checks when the type system says a value cannot be null.
|
||||
|
||||
## SharpCompress-Specific Guidelines
|
||||
|
||||
### Supported Formats
|
||||
SharpCompress supports multiple archive and compression formats:
|
||||
- **Archive Formats**: Zip, Tar, 7Zip, Rar (read-only)
|
||||
- **Compression**: DEFLATE, BZip2, LZMA/LZMA2, PPMd, ZStandard (decompress only), Deflate64 (decompress only)
|
||||
- **Combined Formats**: Tar.GZip, Tar.BZip2, Tar.LZip, Tar.XZ, Tar.ZStandard
|
||||
- See FORMATS.md for complete format support matrix
|
||||
|
||||
### Stream Handling Rules
|
||||
- **Disposal**: As of version 0.21, SharpCompress closes wrapped streams by default
|
||||
- Use `ReaderOptions` or `WriterOptions` with `LeaveStreamOpen = true` to control stream disposal
|
||||
- Use `NonDisposingStream` wrapper when working with compression streams directly to prevent disposal
|
||||
- Always dispose of readers, writers, and archives in `using` blocks
|
||||
- For forward-only operations, use Reader/Writer APIs; for random access, use Archive APIs
|
||||
|
||||
### Async/Await Patterns
|
||||
- All I/O operations support async/await with `CancellationToken`
|
||||
- Async methods follow the naming convention: `MethodNameAsync`
|
||||
- Key async methods:
|
||||
- `WriteEntryToAsync` - Extract entry asynchronously
|
||||
- `WriteAllToDirectoryAsync` - Extract all entries asynchronously
|
||||
- `WriteAsync` - Write entry asynchronously
|
||||
- `WriteAllAsync` - Write directory asynchronously
|
||||
- `OpenEntryStreamAsync` - Open entry stream asynchronously
|
||||
- Always provide `CancellationToken` parameter in async methods
|
||||
|
||||
### Archive APIs vs Reader/Writer APIs
|
||||
- **Archive API**: Use for random access with seekable streams (e.g., `ZipArchive`, `TarArchive`)
|
||||
- **Reader API**: Use for forward-only reading on non-seekable streams (e.g., `ZipReader`, `TarReader`)
|
||||
- **Writer API**: Use for forward-only writing on streams (e.g., `ZipWriter`, `TarWriter`)
|
||||
- 7Zip only supports Archive API due to format limitations
|
||||
|
||||
### Tar-Specific Considerations
|
||||
- Tar format requires file size in the header
|
||||
- If no size is specified to TarWriter and the stream is not seekable, an exception will be thrown
|
||||
- Tar combined with compression (GZip, BZip2, LZip, XZ) is supported
|
||||
|
||||
### Zip-Specific Considerations
|
||||
- Supports Zip64 for large files (seekable streams only)
|
||||
- Supports PKWare and WinZip AES encryption
|
||||
- Multiple compression methods: None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA, PPMd
|
||||
- Encrypted LZMA is not supported
|
||||
|
||||
### Performance Considerations
|
||||
- For large files, use Reader/Writer APIs with non-seekable streams to avoid loading entire file in memory
|
||||
- Leverage async I/O for better scalability
|
||||
- Consider compression level trade-offs (speed vs. size)
|
||||
- Use appropriate buffer sizes for stream operations
|
||||
|
||||
## Testing
|
||||
|
||||
- Always include test cases for critical paths of the application.
|
||||
- Test with multiple archive formats when making changes to core functionality.
|
||||
- Include tests for both Archive and Reader/Writer APIs when applicable.
|
||||
- Test async operations with cancellation tokens.
|
||||
- Do not emit "Act", "Arrange" or "Assert" comments.
|
||||
- Copy existing style in nearby files for test method names and capitalization.
|
||||
- Use test archives from `tests/TestArchives` directory for consistency.
|
||||
- Test stream disposal and `LeaveStreamOpen` behavior.
|
||||
- Test edge cases: empty archives, large files, corrupted archives, encrypted archives.
|
||||
@@ -10,5 +10,7 @@
|
||||
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
|
||||
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
|
||||
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
|
||||
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
|
||||
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
21
Directory.Packages.props
Normal file
21
Directory.Packages.props
Normal file
@@ -0,0 +1,21 @@
|
||||
<Project>
|
||||
<ItemGroup>
|
||||
<PackageVersion Include="Bullseye" Version="6.0.0" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.2.1" />
|
||||
<PackageVersion Include="Glob" Version="1.1.9" />
|
||||
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.0" />
|
||||
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
|
||||
<PackageVersion Include="SimpleExec" Version="12.0.0" />
|
||||
<PackageVersion Include="System.Buffers" Version="4.6.1" />
|
||||
<PackageVersion Include="System.Memory" Version="4.6.3" />
|
||||
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
|
||||
<PackageVersion Include="xunit" Version="2.9.3" />
|
||||
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
|
||||
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
|
||||
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="8.0.21" />
|
||||
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -11,10 +11,11 @@
|
||||
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
|
||||
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
|
||||
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
|
||||
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
|
||||
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
|
||||
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.Zstandard | ZStandard | Decompress | TarArchive | TarReader | N/A |
|
||||
| Tar.LZip | LZMA | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
|
||||
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
|
||||
@@ -41,6 +42,7 @@ For those who want to directly compress/decompress bits. The single file formats
|
||||
| ADCStream | Decompress |
|
||||
| LZipStream | Both |
|
||||
| XZStream | Decompress |
|
||||
| ZStandardStream | Decompress |
|
||||
|
||||
## Archive Formats vs Compression
|
||||
|
||||
|
||||
9
NuGet.config
Normal file
9
NuGet.config
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<packageSourceMapping>
|
||||
<!-- key value for <packageSource> should match key values from <packageSources> element -->
|
||||
<packageSource key="nuget.org">
|
||||
<package pattern="*" />
|
||||
</packageSource>
|
||||
</packageSourceMapping>
|
||||
</configuration>
|
||||
87
README.md
87
README.md
@@ -1,12 +1,14 @@
|
||||
# SharpCompress
|
||||
|
||||
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd, unarc and unarj with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
|
||||
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
|
||||
|
||||
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [Async Usage](#async-usage) section below.
|
||||
|
||||
GitHub Actions Build -
|
||||
[](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
|
||||
[](https://www.robiniadocs.com/d/sharpcompress/api/SharpCompress.html)
|
||||
[](https://dndocs.com/d/sharpcompress/api/index.html)
|
||||
|
||||
## Need Help?
|
||||
|
||||
@@ -20,16 +22,94 @@ In general, I recommend GZip (Deflate)/BZip2 (BZip)/LZip (LZMA) as the simplicit
|
||||
|
||||
Zip is okay, but it's a very hap-hazard format and the variation in headers and implementations makes it hard to get correct. Uses Deflate by default but supports a lot of compression methods.
|
||||
|
||||
RAR is not recommended as it's a propriatory format and the compression is closed source. Use Tar/LZip for LZMA
|
||||
RAR is not recommended as it's a proprietary format and the compression is closed source. Use Tar/LZip for LZMA
|
||||
|
||||
7Zip and XZ both are overly complicated. 7Zip does not support streamable formats. XZ has known holes explained here: (http://www.nongnu.org/lzip/xz_inadequate.html) Use Tar/LZip for LZMA compression instead.
|
||||
|
||||
ZStandard is an efficient format that works well for streaming with a flexible compression level to tweak the speed/performance trade off you are looking for. We currently only implement decompression for ZStandard but as we leverage the [ZstdSharp](https://github.com/oleg-st/ZstdSharp) library one could likely add compression support without much trouble (PRs are welcome!).
|
||||
|
||||
## A Simple Request
|
||||
|
||||
Hi everyone. I hope you're using SharpCompress and finding it useful. Please give me feedback on what you'd like to see changed especially as far as usability goes. New feature suggestions are always welcome as well. I would also like to know what projects SharpCompress is being used in. I like seeing how it is used to give me ideas for future versions. Thanks!
|
||||
|
||||
Please do not email me directly to ask for help. If you think there is a real issue, please report it here.
|
||||
|
||||
## Async Usage
|
||||
|
||||
SharpCompress now provides full async/await support for all I/O operations, allowing for better performance and scalability in modern applications.
|
||||
|
||||
### Async Reading Examples
|
||||
|
||||
Extract entries asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenRead("archive.zip"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
// Async extraction
|
||||
await reader.WriteEntryToDirectoryAsync(
|
||||
@"C:\temp",
|
||||
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Extract all entries to directory asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenRead("archive.tar.gz"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"C:\temp",
|
||||
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
Open entry stream asynchronously:
|
||||
```csharp
|
||||
using (var archive = ZipArchive.Open("archive.zip"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
|
||||
{
|
||||
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
|
||||
{
|
||||
// Process stream asynchronously
|
||||
await entryStream.CopyToAsync(outputStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Async Writing Examples
|
||||
|
||||
Write files asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenWrite("output.zip"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
await writer.WriteAsync("file1.txt", fileStream, DateTime.Now, cancellationToken);
|
||||
}
|
||||
```
|
||||
|
||||
Write all files from directory asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenWrite("output.tar.gz"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
|
||||
{
|
||||
await writer.WriteAllAsync(@"D:\files", "*", SearchOption.AllDirectories, cancellationToken);
|
||||
}
|
||||
```
|
||||
|
||||
All async methods support `CancellationToken` for graceful cancellation of long-running operations.
|
||||
|
||||
## Want to contribute?
|
||||
|
||||
I'm always looking for help or ideas. Please submit code or email with ideas. Unfortunately, just letting me know you'd like to help is not enough because I really have no overall plan of what needs to be done. I'll definitely accept code submissions and add you as a member of the project!
|
||||
@@ -40,6 +120,7 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
|
||||
* 7Zip writing
|
||||
* Zip64 (Need writing and extend Reading)
|
||||
* Multi-volume Zip support.
|
||||
* ZStandard writing
|
||||
|
||||
## Version Log
|
||||
|
||||
|
||||
@@ -17,8 +17,18 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
Directory.Build.props = Directory.Build.props
|
||||
global.json = global.json
|
||||
.editorconfig = .editorconfig
|
||||
Directory.Packages.props = Directory.Packages.props
|
||||
NuGet.config = NuGet.config
|
||||
.github\workflows\dotnetcore.yml = .github\workflows\dotnetcore.yml
|
||||
USAGE.md = USAGE.md
|
||||
README.md = README.md
|
||||
FORMATS.md = FORMATS.md
|
||||
AGENTS.md = AGENTS.md
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Performance", "tests\SharpCompress.Performance\SharpCompress.Performance.csproj", "{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -37,6 +47,10 @@ Global
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
@@ -44,5 +58,6 @@ Global
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
|
||||
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
||||
@@ -15,17 +15,17 @@
|
||||
|
||||
<s:String x:Key="/Default/CodeStyle/CodeCleanup/SilentCleanupProfile/@EntryValue">Basic Clean</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/APPLY_ON_COMPLETION/@EntryValue">True</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Named</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Positional</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOR/@EntryValue">Required</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOREACH/@EntryValue">Required</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_IFELSE/@EntryValue">Required</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_WHILE/@EntryValue">Required</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_LINQ_QUERY/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARGUMENT/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_CALLS_CHAIN/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXTENDS_LIST/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_FOR_STMT/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_PARAMETER/@EntryValue">True</s:Boolean>
|
||||
@@ -42,7 +42,7 @@
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_IFELSE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_USING_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_WHILE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">False</s:Boolean>
|
||||
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_CODE/@EntryValue">1</s:Int64>
|
||||
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_DECLARATIONS/@EntryValue">1</s:Int64>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
|
||||
@@ -50,12 +50,12 @@
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_CONSTRUCTOR_INITIALIZER_ON_SAME_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_EMBEDDED_STATEMENT_ON_SAME_LINE/@EntryValue">NEVER</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">False</s:Boolean>
|
||||
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">False</s:Boolean>
|
||||
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SIMPLE_EMBEDDED_STATEMENT_STYLE/@EntryValue">LINE_BREAK</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AFTER_TYPECAST_PARENTHESES/@EntryValue">False</s:Boolean>
|
||||
@@ -67,18 +67,22 @@
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_BEFORE_TYPEOF_PARENTHESES/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/STICK_COMMENT/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARGUMENTS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_ALWAYS</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_EXTENDS_LIST_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_LINES/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_PARAMETERS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVarWhenEvident</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVarWhenEvident</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVarWhenEvident</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVar</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVar</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVar</s:String>
|
||||
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateInstanceFields/@EntryIndexedValue"><Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=15b5b1f1_002D457c_002D4ca6_002Db278_002D5615aedc07d3/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static readonly fields (private)"><ElementKinds><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=4a98fdf6_002D7d98_002D4f5a_002Dafeb_002Dea44ad98c70c/@EntryIndexedValue"><Policy><Descriptor Staticness="Instance" AccessRightKinds="Private" Description="Instance fields (private)"><ElementKinds><Kind Name="FIELD" /><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=c873eafb_002Dd57f_002D481d_002D8c93_002D77f6863c2f88/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Protected, ProtectedInternal, Internal, Public, PrivateProtected" Description="Static readonly fields (not private)"><ElementKinds><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=f9fce829_002De6f4_002D4cb2_002D80f1_002D5497c44f51df/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static fields (private)"><ElementKinds><Kind Name="FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
@@ -118,6 +122,7 @@
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=NAMESPACE_005FALIAS/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FFIELD/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FRESOURCE/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CustomTools/CustomToolsData/@EntryValue"></s:String>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpAttributeForSingleLineMethodUpgrade/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpKeepExistingMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpPlaceEmbeddedOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
@@ -127,6 +132,7 @@
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EPredefinedNamingRulesToUserRulesUpgrade/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue"><SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml">
|
||||
<Solution />
|
||||
</SessionState></s:String></wpf:ResourceDictionary>
|
||||
|
||||
173
USAGE.md
173
USAGE.md
@@ -1,5 +1,18 @@
|
||||
# SharpCompress Usage
|
||||
|
||||
## Async/Await Support
|
||||
|
||||
SharpCompress now provides full async/await support for all I/O operations. All `Read`, `Write`, and extraction operations have async equivalents ending in `Async` that accept an optional `CancellationToken`. This enables better performance and scalability for I/O-bound operations.
|
||||
|
||||
**Key Async Methods:**
|
||||
- `reader.WriteEntryToAsync(stream, cancellationToken)` - Extract entry asynchronously
|
||||
- `reader.WriteAllToDirectoryAsync(path, options, cancellationToken)` - Extract all asynchronously
|
||||
- `writer.WriteAsync(filename, stream, modTime, cancellationToken)` - Write entry asynchronously
|
||||
- `writer.WriteAllAsync(directory, pattern, searchOption, cancellationToken)` - Write directory asynchronously
|
||||
- `entry.OpenEntryStreamAsync(cancellationToken)` - Open entry stream asynchronously
|
||||
|
||||
See [Async Examples](#async-examples) section below for usage patterns.
|
||||
|
||||
## Stream Rules (changed with 0.21)
|
||||
|
||||
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
|
||||
@@ -27,7 +40,7 @@ To deal with the "correct" rules as well as the expectations of users, I've deci
|
||||
|
||||
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
|
||||
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
|
||||
## Samples
|
||||
|
||||
@@ -71,18 +84,34 @@ using (var archive = ZipArchive.Create())
|
||||
memoryStream.Position = 0;
|
||||
```
|
||||
|
||||
### Extract all files from a Rar file to a directory using RarArchive
|
||||
### Extract all files from a rar file to a directory using RarArchive
|
||||
|
||||
Note: Extracting a solid rar or 7z file needs to be done in sequential order to get acceptable decompression speed.
|
||||
It is explicitly recommended to use `ExtractAllEntries` when extracting an entire `IArchive` instead of iterating over all its `Entries`.
|
||||
Alternatively, use `IArchive.WriteToDirectory`.
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.Open("Test.rar"))
|
||||
{
|
||||
using (var reader = archive.ExtractAllEntries())
|
||||
{
|
||||
reader.WriteAllToDirectory(@"D:\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Iterate over all files from a Rar file using RarArchive
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.Open("Test.rar"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
|
||||
{
|
||||
entry.WriteToDirectory("D:\\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
Console.WriteLine($"{entry.Key}: {entry.Size} bytes");
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -156,3 +185,133 @@ foreach(var entry in tr.Entries)
|
||||
Console.WriteLine($"{entry.Key}");
|
||||
}
|
||||
```
|
||||
|
||||
## Async Examples
|
||||
|
||||
### Async Reader Examples
|
||||
|
||||
**Extract single entry asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("archive.zip"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
{
|
||||
using (var outputStream = File.Create("output.bin"))
|
||||
{
|
||||
await reader.WriteEntryToAsync(outputStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Extract all entries asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("archive.tar.gz"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"D:\temp",
|
||||
new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
},
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
**Open and process entry stream asynchronously:**
|
||||
```C#
|
||||
using (var archive = ZipArchive.Open("archive.zip"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
|
||||
{
|
||||
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
|
||||
{
|
||||
// Process the decompressed stream asynchronously
|
||||
await ProcessStreamAsync(entryStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Async Writer Examples
|
||||
|
||||
**Write single file asynchronously:**
|
||||
```C#
|
||||
using (Stream archiveStream = File.OpenWrite("output.zip"))
|
||||
using (var writer = WriterFactory.Open(archiveStream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
using (Stream fileStream = File.OpenRead("input.txt"))
|
||||
{
|
||||
await writer.WriteAsync("entry.txt", fileStream, DateTime.Now, cancellationToken);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Write entire directory asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenWrite("backup.tar.gz"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
|
||||
{
|
||||
await writer.WriteAllAsync(
|
||||
@"D:\files",
|
||||
"*",
|
||||
SearchOption.AllDirectories,
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
**Write with progress tracking and cancellation:**
|
||||
```C#
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
// Set timeout or cancel from UI
|
||||
cts.CancelAfter(TimeSpan.FromMinutes(5));
|
||||
|
||||
using (Stream stream = File.OpenWrite("archive.zip"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
try
|
||||
{
|
||||
await writer.WriteAllAsync(@"D:\data", "*", SearchOption.AllDirectories, cts.Token);
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Console.WriteLine("Operation was cancelled");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Archive Async Examples
|
||||
|
||||
**Extract from archive asynchronously:**
|
||||
```C#
|
||||
using (var archive = ZipArchive.Open("archive.zip"))
|
||||
{
|
||||
using (var reader = archive.ExtractAllEntries())
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits of Async Operations:**
|
||||
- Non-blocking I/O for better application responsiveness
|
||||
- Improved scalability for server applications
|
||||
- Support for cancellation via CancellationToken
|
||||
- Better resource utilization in async/await contexts
|
||||
- Compatible with modern .NET async patterns
|
||||
|
||||
@@ -15,7 +15,7 @@ const string Publish = "publish";
|
||||
|
||||
Target(
|
||||
Clean,
|
||||
ForEach("**/bin", "**/obj"),
|
||||
["**/bin", "**/obj"],
|
||||
dir =>
|
||||
{
|
||||
IEnumerable<string> GetDirectories(string d)
|
||||
@@ -44,14 +44,14 @@ Target(
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "tool restore");
|
||||
Run("dotnet", "csharpier --check .");
|
||||
Run("dotnet", "csharpier check .");
|
||||
}
|
||||
);
|
||||
Target(Restore, DependsOn(Format), () => Run("dotnet", "restore"));
|
||||
Target(Restore, [Format], () => Run("dotnet", "restore"));
|
||||
|
||||
Target(
|
||||
Build,
|
||||
DependsOn(Restore),
|
||||
[Restore],
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release --no-restore");
|
||||
@@ -60,8 +60,8 @@ Target(
|
||||
|
||||
Target(
|
||||
Test,
|
||||
DependsOn(Build),
|
||||
ForEach("net8.0", "net462"),
|
||||
[Build],
|
||||
["net8.0", "net48"],
|
||||
framework =>
|
||||
{
|
||||
IEnumerable<string> GetFiles(string d)
|
||||
@@ -69,7 +69,7 @@ Target(
|
||||
return Glob.Files(".", d);
|
||||
}
|
||||
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net462")
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net48")
|
||||
{
|
||||
return;
|
||||
}
|
||||
@@ -83,13 +83,13 @@ Target(
|
||||
|
||||
Target(
|
||||
Publish,
|
||||
DependsOn(Test),
|
||||
[Test],
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
|
||||
}
|
||||
);
|
||||
|
||||
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
|
||||
Target("default", [Publish], () => Console.WriteLine("Done!"));
|
||||
|
||||
await RunTargetsAndExitAsync(args);
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net7.0</TargetFramework>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Bullseye" Version="4.2.1" />
|
||||
<PackageReference Include="Glob" Version="1.1.9" />
|
||||
<PackageReference Include="SimpleExec" Version="11.0.0" />
|
||||
<PackageReference Include="Bullseye" />
|
||||
<PackageReference Include="Glob" />
|
||||
<PackageReference Include="SimpleExec" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
25
build/packages.lock.json
Normal file
25
build/packages.lock.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"version": 2,
|
||||
"dependencies": {
|
||||
"net8.0": {
|
||||
"Bullseye": {
|
||||
"type": "Direct",
|
||||
"requested": "[6.0.0, )",
|
||||
"resolved": "6.0.0",
|
||||
"contentHash": "vgwwXfzs7jJrskWH7saHRMgPzziq/e86QZNWY1MnMxd7e+De7E7EX4K3C7yrvaK9y02SJoLxNxcLG/q5qUAghw=="
|
||||
},
|
||||
"Glob": {
|
||||
"type": "Direct",
|
||||
"requested": "[1.1.9, )",
|
||||
"resolved": "1.1.9",
|
||||
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
|
||||
},
|
||||
"SimpleExec": {
|
||||
"type": "Direct",
|
||||
"requested": "[12.0.0, )",
|
||||
"resolved": "12.0.0",
|
||||
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -141,7 +141,7 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
|
||||
4,
|
||||
3,
|
||||
2,
|
||||
1 // tap2
|
||||
1, // tap2
|
||||
};
|
||||
#endif
|
||||
|
||||
|
||||
@@ -12,39 +12,35 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
|
||||
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
|
||||
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
|
||||
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
|
||||
private bool _disposed;
|
||||
private readonly SourceStream? _sourceStream;
|
||||
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
|
||||
|
||||
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
|
||||
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
|
||||
|
||||
protected ReaderOptions ReaderOptions { get; }
|
||||
|
||||
private bool disposed;
|
||||
protected SourceStream SrcStream;
|
||||
|
||||
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
|
||||
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
|
||||
{
|
||||
Type = type;
|
||||
ReaderOptions = srcStream.ReaderOptions;
|
||||
SrcStream = srcStream;
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
ReaderOptions = sourceStream.ReaderOptions;
|
||||
_sourceStream = sourceStream;
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
}
|
||||
|
||||
#nullable disable
|
||||
internal AbstractArchive(ArchiveType type)
|
||||
{
|
||||
Type = type;
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
|
||||
ReaderOptions = new();
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
|
||||
}
|
||||
|
||||
#nullable enable
|
||||
|
||||
public ArchiveType Type { get; }
|
||||
|
||||
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
|
||||
@@ -57,7 +53,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
{
|
||||
if (!stream.CanSeek || !stream.CanRead)
|
||||
{
|
||||
throw new ArgumentException("Archive streams must be Readable and Seekable");
|
||||
throw new ArchiveException("Archive streams must be Readable and Seekable");
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
@@ -65,12 +61,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public virtual ICollection<TEntry> Entries => lazyEntries;
|
||||
public virtual ICollection<TEntry> Entries => _lazyEntries;
|
||||
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public ICollection<TVolume> Volumes => lazyVolumes;
|
||||
public ICollection<TVolume> Volumes => _lazyVolumes;
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files compressed in the archive.
|
||||
@@ -84,29 +80,29 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
public virtual long TotalUncompressSize =>
|
||||
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
|
||||
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
|
||||
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
|
||||
|
||||
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
|
||||
|
||||
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
|
||||
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
|
||||
|
||||
public virtual void Dispose()
|
||||
{
|
||||
if (!disposed)
|
||||
if (!_disposed)
|
||||
{
|
||||
lazyVolumes.ForEach(v => v.Dispose());
|
||||
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
|
||||
SrcStream?.Dispose();
|
||||
_lazyVolumes.ForEach(v => v.Dispose());
|
||||
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
|
||||
_sourceStream?.Dispose();
|
||||
|
||||
disposed = true;
|
||||
_disposed = true;
|
||||
}
|
||||
}
|
||||
|
||||
void IArchiveExtractionListener.EnsureEntriesLoaded()
|
||||
{
|
||||
lazyEntries.EnsureFullyLoaded();
|
||||
lazyVolumes.EnsureFullyLoaded();
|
||||
_lazyEntries.EnsureFullyLoaded();
|
||||
_lazyVolumes.EnsureFullyLoaded();
|
||||
}
|
||||
|
||||
void IExtractionListener.FireCompressedBytesRead(
|
||||
@@ -148,6 +144,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
/// <returns></returns>
|
||||
public IReader ExtractAllEntries()
|
||||
{
|
||||
if (!IsSolid && Type != ArchiveType.SevenZip)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
|
||||
);
|
||||
}
|
||||
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
|
||||
return CreateReaderForSolidExtraction();
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Writers;
|
||||
@@ -41,8 +43,8 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
internal AbstractWritableArchive(ArchiveType type)
|
||||
: base(type) { }
|
||||
|
||||
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
|
||||
: base(type, srcStream) { }
|
||||
internal AbstractWritableArchive(ArchiveType type, SourceStream sourceStream)
|
||||
: base(type, sourceStream) { }
|
||||
|
||||
public override ICollection<TEntry> Entries
|
||||
{
|
||||
@@ -94,6 +96,9 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
DateTime? modified
|
||||
) => AddEntry(key, source, closeStream, size, modified);
|
||||
|
||||
IArchiveEntry IWritableArchive.AddDirectoryEntry(string key, DateTime? modified) =>
|
||||
AddDirectoryEntry(key, modified);
|
||||
|
||||
public TEntry AddEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
@@ -120,6 +125,10 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
{
|
||||
foreach (var path in Entries.Select(x => x.Key))
|
||||
{
|
||||
if (path is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
var p = path.Replace('/', '\\');
|
||||
if (p.Length > 0 && p[0] == '\\')
|
||||
{
|
||||
@@ -130,6 +139,22 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
return false;
|
||||
}
|
||||
|
||||
public TEntry AddDirectoryEntry(string key, DateTime? modified = null)
|
||||
{
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
{
|
||||
key = key.Substring(1);
|
||||
}
|
||||
if (DoesKeyMatchExisting(key))
|
||||
{
|
||||
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
|
||||
}
|
||||
var entry = CreateDirectoryEntry(key, modified);
|
||||
newEntries.Add(entry);
|
||||
RebuildModifiedCollection();
|
||||
return entry;
|
||||
}
|
||||
|
||||
public void SaveTo(Stream stream, WriterOptions options)
|
||||
{
|
||||
//reset streams of new entries
|
||||
@@ -137,6 +162,18 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
SaveTo(stream, options, OldEntries, newEntries);
|
||||
}
|
||||
|
||||
public async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
//reset streams of new entries
|
||||
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
|
||||
await SaveToAsync(stream, options, OldEntries, newEntries, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
protected TEntry CreateEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
@@ -147,7 +184,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
{
|
||||
if (!source.CanRead || !source.CanSeek)
|
||||
{
|
||||
throw new ArgumentException(
|
||||
throw new ArchiveException(
|
||||
"Streams must be readable and seekable to use the Writing Archive API"
|
||||
);
|
||||
}
|
||||
@@ -162,6 +199,8 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
bool closeStream
|
||||
);
|
||||
|
||||
protected abstract TEntry CreateDirectoryEntry(string key, DateTime? modified);
|
||||
|
||||
protected abstract void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
@@ -169,6 +208,14 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
IEnumerable<TEntry> newEntries
|
||||
);
|
||||
|
||||
protected abstract Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<TEntry> oldEntries,
|
||||
IEnumerable<TEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
base.Dispose();
|
||||
|
||||
@@ -4,6 +4,7 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -19,7 +20,7 @@ public static class ArchiveFactory
|
||||
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
readerOptions ??= new ReaderOptions();
|
||||
|
||||
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
|
||||
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
|
||||
}
|
||||
|
||||
@@ -44,7 +45,7 @@ public static class ArchiveFactory
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), options);
|
||||
}
|
||||
|
||||
@@ -67,7 +68,7 @@ public static class ArchiveFactory
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var filesArray = fileInfos.ToArray();
|
||||
if (filesArray.Length == 0)
|
||||
{
|
||||
@@ -80,7 +81,7 @@ public static class ArchiveFactory
|
||||
return Open(fileInfo, options);
|
||||
}
|
||||
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
|
||||
@@ -93,7 +94,7 @@ public static class ArchiveFactory
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var streamsArray = streams.ToArray();
|
||||
if (streamsArray.Length == 0)
|
||||
{
|
||||
@@ -106,7 +107,7 @@ public static class ArchiveFactory
|
||||
return Open(firstStream, options);
|
||||
}
|
||||
|
||||
firstStream.CheckNotNull(nameof(firstStream));
|
||||
firstStream.NotNull(nameof(firstStream));
|
||||
options ??= new ReaderOptions();
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
|
||||
@@ -122,16 +123,13 @@ public static class ArchiveFactory
|
||||
)
|
||||
{
|
||||
using var archive = Open(sourceArchive);
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
entry.WriteToDirectory(destinationDirectory, options);
|
||||
}
|
||||
archive.WriteToDirectory(destinationDirectory, options);
|
||||
}
|
||||
|
||||
private static T FindFactory<T>(FileInfo finfo)
|
||||
where T : IFactory
|
||||
{
|
||||
finfo.CheckNotNull(nameof(finfo));
|
||||
finfo.NotNull(nameof(finfo));
|
||||
using Stream stream = finfo.OpenRead();
|
||||
return FindFactory<T>(stream);
|
||||
}
|
||||
@@ -139,7 +137,7 @@ public static class ArchiveFactory
|
||||
private static T FindFactory<T>(Stream stream)
|
||||
where T : IFactory
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
stream.NotNull(nameof(stream));
|
||||
if (!stream.CanRead || !stream.CanSeek)
|
||||
{
|
||||
throw new ArgumentException("Stream should be readable and seekable");
|
||||
@@ -168,17 +166,25 @@ public static class ArchiveFactory
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsArchive(string filePath, out ArchiveType? type)
|
||||
public static bool IsArchive(
|
||||
string filePath,
|
||||
out ArchiveType? type,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
using Stream s = File.OpenRead(filePath);
|
||||
return IsArchive(s, out type);
|
||||
return IsArchive(s, out type, bufferSize);
|
||||
}
|
||||
|
||||
public static bool IsArchive(Stream stream, out ArchiveType? type)
|
||||
public static bool IsArchive(
|
||||
Stream stream,
|
||||
out ArchiveType? type,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
type = null;
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (!stream.CanRead || !stream.CanSeek)
|
||||
{
|
||||
@@ -189,9 +195,10 @@ public static class ArchiveFactory
|
||||
|
||||
foreach (var factory in Factory.Factories)
|
||||
{
|
||||
var isArchive = factory.IsArchive(stream);
|
||||
stream.Position = startPosition;
|
||||
|
||||
if (factory.IsArchive(stream, null))
|
||||
if (isArchive)
|
||||
{
|
||||
type = factory.KnownArchiveType;
|
||||
return true;
|
||||
@@ -208,7 +215,7 @@ public static class ArchiveFactory
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<string> GetFileParts(string part1)
|
||||
{
|
||||
part1.CheckNotNullOrEmpty(nameof(part1));
|
||||
part1.NotNullOrEmpty(nameof(part1));
|
||||
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
|
||||
}
|
||||
|
||||
@@ -219,7 +226,7 @@ public static class ArchiveFactory
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
|
||||
{
|
||||
part1.CheckNotNull(nameof(part1));
|
||||
part1.NotNull(nameof(part1));
|
||||
yield return part1;
|
||||
|
||||
foreach (var factory in Factory.Factories.OfType<IFactory>())
|
||||
@@ -239,4 +246,6 @@ public static class ArchiveFactory
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
|
||||
}
|
||||
|
||||
30
src/SharpCompress/Archives/AutoArchiveFactory.cs
Normal file
30
src/SharpCompress/Archives/AutoArchiveFactory.cs
Normal file
@@ -0,0 +1,30 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
class AutoArchiveFactory : IArchiveFactory
|
||||
{
|
||||
public string Name => nameof(AutoArchiveFactory);
|
||||
|
||||
public ArchiveType? KnownArchiveType => null;
|
||||
|
||||
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
|
||||
|
||||
public bool IsArchive(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
) => throw new NotSupportedException();
|
||||
|
||||
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
|
||||
|
||||
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(stream, readerOptions);
|
||||
|
||||
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(fileInfo, readerOptions);
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.IO;
|
||||
@@ -21,7 +23,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
@@ -32,7 +34,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
@@ -52,7 +54,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
@@ -70,7 +72,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
@@ -88,9 +90,15 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new GZipArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
@@ -99,16 +107,14 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal GZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Tar, srcStream) { }
|
||||
/// <param name="sourceStream"></param>
|
||||
private GZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.GZip, sourceStream) { }
|
||||
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
srcStream.LoadAllParts();
|
||||
var idx = 0;
|
||||
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, idx++));
|
||||
sourceStream.LoadAllParts();
|
||||
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
|
||||
@@ -132,6 +138,16 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
SaveTo(stream, new WriterOptions(CompressionType.GZip));
|
||||
}
|
||||
|
||||
public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
|
||||
SaveToAsync(new FileInfo(filePath), cancellationToken);
|
||||
|
||||
public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(Stream stream)
|
||||
{
|
||||
// read the header on the first read
|
||||
@@ -164,11 +180,16 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
{
|
||||
if (Entries.Any())
|
||||
{
|
||||
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
}
|
||||
|
||||
protected override GZipArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => throw new NotSupportedException("GZip archives do not support directory entries.");
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
@@ -178,13 +199,39 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
{
|
||||
if (Entries.Count > 1)
|
||||
{
|
||||
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IEnumerable<GZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (Entries.Count > 1)
|
||||
{
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,23 +1,32 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
|
||||
{
|
||||
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
|
||||
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
|
||||
: base(part) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream()
|
||||
{
|
||||
//this is to reset the stream to be read multiple times
|
||||
var part = (GZipFilePart)Parts.Single();
|
||||
if (part.GetRawStream().Position != part.EntryStartPosition)
|
||||
var rawStream = part.GetRawStream();
|
||||
if (rawStream.CanSeek && rawStream.Position != part.EntryStartPosition)
|
||||
{
|
||||
part.GetRawStream().Position = part.EntryStartPosition;
|
||||
rawStream.Position = part.EntryStartPosition;
|
||||
}
|
||||
return Parts.Single().GetCompressedStream();
|
||||
return Parts.Single().GetCompressedStream().NotNull();
|
||||
}
|
||||
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
// GZip synchronous implementation is fast enough, just wrap it
|
||||
return Task.FromResult(OpenEntryStream());
|
||||
}
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -32,7 +30,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
|
||||
|
||||
public override long Crc => 0;
|
||||
|
||||
public override string Key { get; }
|
||||
public override string? Key { get; }
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
@@ -60,7 +58,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
|
||||
{
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return NonDisposingStream.Create(stream);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -11,6 +13,12 @@ public interface IArchiveEntry : IEntry
|
||||
/// </summary>
|
||||
Stream OpenEntryStream();
|
||||
|
||||
/// <summary>
|
||||
/// Opens the current entry as a stream that will decompress as it is read asynchronously.
|
||||
/// Read the entire stream or use SkipEntry on EntryStream.
|
||||
/// </summary>
|
||||
Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to extract this entry.
|
||||
/// </summary>
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -17,19 +19,43 @@ public static class IArchiveEntryExtensions
|
||||
streamListener.EnsureEntriesLoaded();
|
||||
streamListener.FireEntryExtractionBegin(archiveEntry);
|
||||
streamListener.FireFilePartExtractionBegin(
|
||||
archiveEntry.Key,
|
||||
archiveEntry.Key ?? "Key",
|
||||
archiveEntry.Size,
|
||||
archiveEntry.CompressedSize
|
||||
);
|
||||
var entryStream = archiveEntry.OpenEntryStream();
|
||||
if (entryStream is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
using (entryStream)
|
||||
{
|
||||
using Stream s = new ListeningStream(streamListener, entryStream);
|
||||
s.TransferTo(streamToWriteTo);
|
||||
s.CopyTo(streamToWriteTo);
|
||||
}
|
||||
streamListener.FireEntryExtractionEnd(archiveEntry);
|
||||
}
|
||||
|
||||
public static async Task WriteToAsync(
|
||||
this IArchiveEntry archiveEntry,
|
||||
Stream streamToWriteTo,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (archiveEntry.IsDirectory)
|
||||
{
|
||||
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
|
||||
}
|
||||
|
||||
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
|
||||
streamListener.EnsureEntriesLoaded();
|
||||
streamListener.FireEntryExtractionBegin(archiveEntry);
|
||||
streamListener.FireFilePartExtractionBegin(
|
||||
archiveEntry.Key ?? "Key",
|
||||
archiveEntry.Size,
|
||||
archiveEntry.CompressedSize
|
||||
);
|
||||
var entryStream = archiveEntry.OpenEntryStream();
|
||||
using (entryStream)
|
||||
{
|
||||
using Stream s = new ListeningStream(streamListener, entryStream);
|
||||
await s.CopyToAsync(streamToWriteTo, 81920, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
streamListener.FireEntryExtractionEnd(archiveEntry);
|
||||
}
|
||||
@@ -49,6 +75,23 @@ public static class IArchiveEntryExtensions
|
||||
entry.WriteToFile
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory asynchronously, retaining filename
|
||||
/// </summary>
|
||||
public static Task WriteToDirectoryAsync(
|
||||
this IArchiveEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToDirectoryAsync(
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
(x, opt) => entry.WriteToFileAsync(x, opt, cancellationToken),
|
||||
cancellationToken
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file
|
||||
/// </summary>
|
||||
@@ -67,4 +110,24 @@ public static class IArchiveEntryExtensions
|
||||
entry.WriteTo(fs);
|
||||
}
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file asynchronously
|
||||
/// </summary>
|
||||
public static Task WriteToFileAsync(
|
||||
this IArchiveEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToFileAsync(
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
async (x, fm) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3,8 +3,8 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
@@ -19,10 +19,8 @@ public static class IArchiveExtensions
|
||||
ExtractionOptions? options = null
|
||||
)
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(x => !x.IsDirectory))
|
||||
{
|
||||
entry.WriteToDirectory(destinationDirectory, options);
|
||||
}
|
||||
using var reader = archive.ExtractAllEntries();
|
||||
reader.WriteAllToDirectory(destinationDirectory, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -47,27 +45,37 @@ public static class IArchiveExtensions
|
||||
var seenDirectories = new HashSet<string>();
|
||||
|
||||
// Extract
|
||||
var entries = archive.ExtractAllEntries();
|
||||
while (entries.MoveToNextEntry())
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var entry = entries.Entry;
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (
|
||||
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
|
||||
&& seenDirectories.Add(dirPath)
|
||||
)
|
||||
{
|
||||
Directory.CreateDirectory(emptyDirectory);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create each directory
|
||||
var path = Path.Combine(destination, entry.Key);
|
||||
if (Path.GetDirectoryName(path) is { } directory && seenDirectories.Add(path))
|
||||
// Create each directory if not already created
|
||||
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (Path.GetDirectoryName(path) is { } directory)
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
seenDirectories.Add(directory);
|
||||
}
|
||||
}
|
||||
|
||||
// Write file
|
||||
using var fs = File.OpenWrite(path);
|
||||
entries.WriteEntryTo(fs);
|
||||
entry.WriteTo(fs);
|
||||
|
||||
// Update progress
|
||||
bytesRead += entry.Size;
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -16,8 +18,16 @@ public interface IWritableArchive : IArchive
|
||||
DateTime? modified = null
|
||||
);
|
||||
|
||||
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
|
||||
|
||||
void SaveTo(Stream stream, WriterOptions options);
|
||||
|
||||
Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
|
||||
/// </summary>
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -42,6 +44,24 @@ public static class IWritableArchiveExtensions
|
||||
writableArchive.SaveTo(stream, options);
|
||||
}
|
||||
|
||||
public static Task SaveToAsync(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
) => writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
|
||||
|
||||
public static async Task SaveToAsync(
|
||||
this IWritableArchive writableArchive,
|
||||
FileInfo fileInfo,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await writableArchive.SaveToAsync(stream, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public static void AddAllFromDirectory(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Common.Rar;
|
||||
@@ -13,7 +14,7 @@ namespace SharpCompress.Archives.Rar;
|
||||
/// </summary>
|
||||
internal class FileInfoRarArchiveVolume : RarVolume
|
||||
{
|
||||
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index = 0)
|
||||
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
|
||||
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
|
||||
{
|
||||
FileInfo = fileInfo;
|
||||
|
||||
@@ -14,6 +14,7 @@ namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
{
|
||||
private bool _disposed;
|
||||
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
|
||||
new(() => new Compressors.Rar.UnpackV2017.Unpack());
|
||||
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
|
||||
@@ -21,39 +22,61 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal RarArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Rar, srcStream) { }
|
||||
/// <param name="sourceStream"></param>
|
||||
private RarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Rar, sourceStream) { }
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
if (!_disposed)
|
||||
{
|
||||
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
|
||||
{
|
||||
unpackV1.Dispose();
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
base.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
|
||||
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
|
||||
|
||||
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
|
||||
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
var streams = SrcStream.Streams.ToArray();
|
||||
var idx = 0;
|
||||
sourceStream.LoadAllParts(); //request all streams
|
||||
var streams = sourceStream.Streams.ToArray();
|
||||
var i = 0;
|
||||
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
|
||||
{
|
||||
SrcStream.IsVolumes = true;
|
||||
sourceStream.IsVolumes = true;
|
||||
streams[1].Position = 0;
|
||||
SrcStream.Position = 0;
|
||||
sourceStream.Position = 0;
|
||||
|
||||
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(
|
||||
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
|
||||
a,
|
||||
ReaderOptions,
|
||||
idx++
|
||||
i++
|
||||
));
|
||||
}
|
||||
else //split mode or single file
|
||||
{
|
||||
return new StreamRarArchiveVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
if (this.IsMultipartVolume())
|
||||
{
|
||||
var streams = Volumes.Select(volume =>
|
||||
{
|
||||
volume.Stream.Position = 0;
|
||||
return volume.Stream;
|
||||
});
|
||||
return RarReader.Open(streams, ReaderOptions);
|
||||
}
|
||||
|
||||
var stream = Volumes.First().Stream;
|
||||
stream.Position = 0;
|
||||
return RarReader.Open(stream, ReaderOptions);
|
||||
@@ -72,7 +95,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
@@ -90,7 +113,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
@@ -107,8 +130,14 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -121,7 +150,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
@@ -139,7 +168,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
@@ -42,7 +44,7 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
{
|
||||
CheckIncomplete();
|
||||
return BitConverter.ToUInt32(
|
||||
parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc,
|
||||
parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc.NotNull(),
|
||||
0
|
||||
);
|
||||
}
|
||||
@@ -68,20 +70,50 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
|
||||
public Stream OpenEntryStream()
|
||||
{
|
||||
RarStream stream;
|
||||
if (IsRarV3)
|
||||
{
|
||||
return new RarStream(
|
||||
stream = new RarStream(
|
||||
archive.UnpackV1.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
|
||||
return new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
stream.Initialize();
|
||||
return stream;
|
||||
}
|
||||
|
||||
public async Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
RarStream stream;
|
||||
if (IsRarV3)
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV1.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
|
||||
await stream.InitializeAsync(cancellationToken);
|
||||
return stream;
|
||||
}
|
||||
|
||||
public bool IsComplete
|
||||
|
||||
@@ -6,8 +6,8 @@ namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class SeekableFilePart : RarFilePart
|
||||
{
|
||||
private readonly Stream stream;
|
||||
private readonly string? password;
|
||||
private readonly Stream _stream;
|
||||
private readonly string? _password;
|
||||
|
||||
internal SeekableFilePart(
|
||||
MarkHeader mh,
|
||||
@@ -18,27 +18,27 @@ internal class SeekableFilePart : RarFilePart
|
||||
)
|
||||
: base(mh, fh, index)
|
||||
{
|
||||
this.stream = stream;
|
||||
this.password = password;
|
||||
_stream = stream;
|
||||
_password = password;
|
||||
}
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
stream.Position = FileHeader.DataStartPosition;
|
||||
_stream.Position = FileHeader.DataStartPosition;
|
||||
|
||||
if (FileHeader.R4Salt != null)
|
||||
{
|
||||
var cryptKey = new CryptKey3(password!);
|
||||
return new RarCryptoWrapper(stream, FileHeader.R4Salt, cryptKey);
|
||||
var cryptKey = new CryptKey3(_password!);
|
||||
return new RarCryptoWrapper(_stream, FileHeader.R4Salt, cryptKey);
|
||||
}
|
||||
|
||||
if (FileHeader.Rar5CryptoInfo != null)
|
||||
{
|
||||
var cryptKey = new CryptKey5(password!, FileHeader.Rar5CryptoInfo);
|
||||
return new RarCryptoWrapper(stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
|
||||
var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo);
|
||||
return new RarCryptoWrapper(_stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
|
||||
}
|
||||
|
||||
return stream;
|
||||
return _stream;
|
||||
}
|
||||
|
||||
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
|
||||
|
||||
@@ -9,7 +9,7 @@ namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class StreamRarArchiveVolume : RarVolume
|
||||
{
|
||||
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index = 0)
|
||||
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
|
||||
: base(StreamingMode.Seekable, stream, options, index) { }
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -14,16 +12,16 @@ namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
|
||||
{
|
||||
private ArchiveDatabase database;
|
||||
private ArchiveDatabase? _database;
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
|
||||
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty("filePath");
|
||||
filePath.NotNullOrEmpty("filePath");
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
@@ -32,9 +30,9 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
|
||||
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull("fileInfo");
|
||||
fileInfo.NotNull("fileInfo");
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
@@ -51,10 +49,10 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions readerOptions = null
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
@@ -72,10 +70,10 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions readerOptions = null
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
@@ -91,27 +89,31 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
|
||||
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull("stream");
|
||||
stream.NotNull("stream");
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal SevenZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.SevenZip, srcStream) { }
|
||||
/// <param name="sourceStream"></param>
|
||||
private SevenZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.SevenZip, sourceStream) { }
|
||||
|
||||
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
var idx = 0;
|
||||
return new SevenZipVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
|
||||
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
|
||||
@@ -135,13 +137,17 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
LoadFactory(stream);
|
||||
var entries = new SevenZipArchiveEntry[database._files.Count];
|
||||
for (var i = 0; i < database._files.Count; i++)
|
||||
if (_database is null)
|
||||
{
|
||||
var file = database._files[i];
|
||||
return Enumerable.Empty<SevenZipArchiveEntry>();
|
||||
}
|
||||
var entries = new SevenZipArchiveEntry[_database._files.Count];
|
||||
for (var i = 0; i < _database._files.Count; i++)
|
||||
{
|
||||
var file = _database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(
|
||||
this,
|
||||
new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
@@ -159,12 +165,12 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
|
||||
private void LoadFactory(Stream stream)
|
||||
{
|
||||
if (database is null)
|
||||
if (_database is null)
|
||||
{
|
||||
stream.Position = 0;
|
||||
var reader = new ArchiveReader();
|
||||
reader.Open(stream);
|
||||
database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
|
||||
reader.Open(stream, lookForHeader: ReaderOptions.LookForHeader);
|
||||
_database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -180,46 +186,43 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlySpan<byte> SIGNATURE =>
|
||||
private static ReadOnlySpan<byte> Signature =>
|
||||
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
|
||||
|
||||
private static bool SignatureMatch(Stream stream)
|
||||
{
|
||||
var reader = new BinaryReader(stream);
|
||||
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
|
||||
return signatureBytes.SequenceEqual(SIGNATURE);
|
||||
return signatureBytes.SequenceEqual(Signature);
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction() =>
|
||||
new SevenZipReader(ReaderOptions, this);
|
||||
|
||||
public override bool IsSolid =>
|
||||
Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1;
|
||||
Entries
|
||||
.Where(x => !x.IsDirectory)
|
||||
.GroupBy(x => x.FilePart.Folder)
|
||||
.Any(folder => folder.Count() > 1);
|
||||
|
||||
public override long TotalSize
|
||||
{
|
||||
get
|
||||
{
|
||||
var i = Entries.Count;
|
||||
return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize);
|
||||
}
|
||||
}
|
||||
public override long TotalSize =>
|
||||
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
|
||||
|
||||
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
|
||||
{
|
||||
private readonly SevenZipArchive archive;
|
||||
private CFolder currentFolder;
|
||||
private Stream currentStream;
|
||||
private CFileItem currentItem;
|
||||
private readonly SevenZipArchive _archive;
|
||||
private CFolder? _currentFolder;
|
||||
private Stream? _currentStream;
|
||||
private CFileItem? _currentItem;
|
||||
|
||||
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
|
||||
: base(readerOptions, ArchiveType.SevenZip) => this.archive = archive;
|
||||
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
|
||||
|
||||
public override SevenZipVolume Volume => archive.Volumes.Single();
|
||||
public override SevenZipVolume Volume => _archive.Volumes.Single();
|
||||
|
||||
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
|
||||
{
|
||||
var entries = archive.Entries.ToList();
|
||||
var entries = _archive.Entries.ToList();
|
||||
stream.Position = 0;
|
||||
foreach (var dir in entries.Where(x => x.IsDirectory))
|
||||
{
|
||||
@@ -229,37 +232,42 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
|
||||
)
|
||||
{
|
||||
currentFolder = group.Key;
|
||||
_currentFolder = group.Key;
|
||||
if (group.Key is null)
|
||||
{
|
||||
currentStream = Stream.Null;
|
||||
_currentStream = Stream.Null;
|
||||
}
|
||||
else
|
||||
{
|
||||
currentStream = archive.database.GetFolderStream(
|
||||
_currentStream = _archive._database?.GetFolderStream(
|
||||
stream,
|
||||
currentFolder,
|
||||
_currentFolder,
|
||||
new PasswordProvider(Options.Password)
|
||||
);
|
||||
}
|
||||
foreach (var entry in group)
|
||||
{
|
||||
currentItem = entry.FilePart.Header;
|
||||
_currentItem = entry.FilePart.Header;
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override EntryStream GetEntryStream() =>
|
||||
CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
|
||||
CreateEntryStream(
|
||||
new ReadOnlySubStream(
|
||||
_currentStream.NotNull("currentStream is not null"),
|
||||
_currentItem?.Size ?? 0
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
private class PasswordProvider : IPasswordProvider
|
||||
{
|
||||
private readonly string _password;
|
||||
private readonly string? _password;
|
||||
|
||||
public PasswordProvider(string password) => _password = password;
|
||||
public PasswordProvider(string? password) => _password = password;
|
||||
|
||||
public string CryptoGetTextPassword() => _password;
|
||||
public string? CryptoGetTextPassword() => _password;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.SevenZip;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
@@ -10,6 +12,9 @@ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
|
||||
|
||||
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
|
||||
|
||||
public Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(OpenEntryStream());
|
||||
|
||||
public IArchive Archive { get; }
|
||||
|
||||
public bool IsComplete => true;
|
||||
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
@@ -22,7 +24,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
@@ -33,7 +35,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
@@ -53,7 +55,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
@@ -71,7 +73,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
@@ -89,7 +91,13 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new TarArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
@@ -114,7 +122,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
|
||||
var isEmptyArchive =
|
||||
tarHeader.Name.Length == 0
|
||||
tarHeader.Name?.Length == 0
|
||||
&& tarHeader.Size == 0
|
||||
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
@@ -123,22 +131,20 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
return false;
|
||||
}
|
||||
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
var idx = 0;
|
||||
return new TarVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
|
||||
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal TarArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Tar, srcStream) { }
|
||||
/// <param name="sourceStream"></param>
|
||||
private TarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Tar, sourceStream) { }
|
||||
|
||||
internal TarArchive()
|
||||
private TarArchive()
|
||||
: base(ArchiveType.Tar) { }
|
||||
|
||||
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
|
||||
@@ -174,7 +180,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
using var memoryStream = new MemoryStream();
|
||||
entryStream.TransferTo(memoryStream);
|
||||
entryStream.CopyTo(memoryStream);
|
||||
memoryStream.Position = 0;
|
||||
var bytes = memoryStream.ToArray();
|
||||
|
||||
@@ -192,6 +198,10 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new IncompleteArchiveException("Failed to read TAR header");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -214,6 +224,11 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
closeStream
|
||||
);
|
||||
|
||||
protected override TarArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => new TarWritableArchiveEntry(this, directoryPath, modified);
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
@@ -222,10 +237,62 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
writer.WriteDirectory(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<TarArchiveEntry> oldEntries,
|
||||
IEnumerable<TarArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
|
||||
@@ -7,10 +9,14 @@ namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public class TarArchiveEntry : TarEntry, IArchiveEntry
|
||||
{
|
||||
internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType)
|
||||
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
|
||||
: base(part, compressionType) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream();
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
|
||||
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => Task.FromResult(OpenEntryStream());
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -11,7 +9,8 @@ namespace SharpCompress.Archives.Tar;
|
||||
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
|
||||
{
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
private readonly Stream? stream;
|
||||
private readonly bool isDirectory;
|
||||
|
||||
internal TarWritableArchiveEntry(
|
||||
TarArchive archive,
|
||||
@@ -29,6 +28,22 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
isDirectory = false;
|
||||
}
|
||||
|
||||
internal TarWritableArchiveEntry(
|
||||
TarArchive archive,
|
||||
string directoryPath,
|
||||
DateTime? lastModified
|
||||
)
|
||||
: base(archive, null, CompressionType.None)
|
||||
{
|
||||
stream = null;
|
||||
Key = directoryPath;
|
||||
Size = 0;
|
||||
LastModifiedTime = lastModified;
|
||||
closeStream = false;
|
||||
isDirectory = true;
|
||||
}
|
||||
|
||||
public override long Crc => 0;
|
||||
@@ -49,23 +64,27 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
public override bool IsDirectory => isDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
{
|
||||
if (stream is null)
|
||||
{
|
||||
return Stream.Null;
|
||||
}
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return NonDisposingStream.Create(stream);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream)
|
||||
if (closeStream && stream is not null)
|
||||
{
|
||||
stream.Dispose();
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
@@ -16,10 +18,7 @@ namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
{
|
||||
#nullable disable
|
||||
private readonly SeekableZipHeaderFactory headerFactory;
|
||||
|
||||
#nullable enable
|
||||
private readonly SeekableZipHeaderFactory? headerFactory;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the compression level applied to files added to the archive,
|
||||
@@ -30,13 +29,13 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="sourceStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal ZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Zip, srcStream) =>
|
||||
internal ZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Zip, sourceStream) =>
|
||||
headerFactory = new SeekableZipHeaderFactory(
|
||||
srcStream.ReaderOptions.Password,
|
||||
srcStream.ReaderOptions.ArchiveEncoding
|
||||
sourceStream.ReaderOptions.Password,
|
||||
sourceStream.ReaderOptions.ArchiveEncoding
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
@@ -46,7 +45,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
@@ -57,7 +56,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
@@ -77,7 +76,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
@@ -95,7 +94,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
@@ -113,30 +112,52 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new ZipArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(string filePath, string? password = null) =>
|
||||
IsZipFile(new FileInfo(filePath), password);
|
||||
public static bool IsZipFile(
|
||||
string filePath,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
|
||||
|
||||
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
|
||||
public static bool IsZipFile(
|
||||
FileInfo fileInfo,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsZipFile(stream, password);
|
||||
return IsZipFile(stream, password, bufferSize);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(Stream stream, string? password = null)
|
||||
public static bool IsZipFile(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
@@ -156,11 +177,20 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsZipMulti(Stream stream, string? password = null)
|
||||
public static bool IsZipMulti(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
@@ -189,21 +219,21 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
}
|
||||
}
|
||||
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
|
||||
{
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
SrcStream.Position = 0;
|
||||
stream.LoadAllParts(); //request all streams
|
||||
stream.Position = 0;
|
||||
|
||||
var streams = SrcStream.Streams.ToList();
|
||||
var streams = stream.Streams.ToList();
|
||||
var idx = 0;
|
||||
if (streams.Count > 1) //test part 2 - true = multipart not split
|
||||
if (streams.Count() > 1) //test part 2 - true = multipart not split
|
||||
{
|
||||
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
var isZip = IsZipFile(streams[1], ReaderOptions.Password);
|
||||
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
|
||||
streams[1].Position -= 4;
|
||||
if (isZip)
|
||||
{
|
||||
SrcStream.IsVolumes = true;
|
||||
stream.IsVolumes = true;
|
||||
|
||||
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
|
||||
streams.RemoveAt(0);
|
||||
@@ -215,7 +245,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new ZipVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
|
||||
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
|
||||
}
|
||||
|
||||
internal ZipArchive()
|
||||
@@ -224,14 +254,13 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
|
||||
{
|
||||
var vols = volumes.ToArray();
|
||||
foreach (var h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
|
||||
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
|
||||
{
|
||||
if (h != null)
|
||||
{
|
||||
switch (h.ZipHeaderType)
|
||||
{
|
||||
case ZipHeaderType.DirectoryEntry:
|
||||
|
||||
{
|
||||
var deh = (DirectoryEntryHeader)h;
|
||||
Stream s;
|
||||
@@ -254,14 +283,14 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
|
||||
yield return new ZipArchiveEntry(
|
||||
this,
|
||||
new SeekableZipFilePart(headerFactory, deh, s)
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
|
||||
);
|
||||
}
|
||||
break;
|
||||
case ZipHeaderType.DirectoryEnd:
|
||||
{
|
||||
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
|
||||
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
vols.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
@@ -279,10 +308,59 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
writer.WriteDirectory(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IEnumerable<ZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -294,12 +372,17 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
bool closeStream
|
||||
) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
|
||||
protected override ZipArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => new ZipWritableArchiveEntry(this, directoryPath, modified);
|
||||
|
||||
public static ZipArchive Create() => new();
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
((IStreamStack)stream).StackSeek(0);
|
||||
return ZipReader.Open(stream, ReaderOptions, Entries);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
@@ -9,7 +11,11 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
|
||||
: base(part) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream();
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
|
||||
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => Task.FromResult(OpenEntryStream());
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
@@ -18,6 +24,4 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
public bool IsComplete => true;
|
||||
|
||||
#endregion
|
||||
|
||||
public string? Comment => ((SeekableZipFilePart)Parts.Single()).Comment;
|
||||
}
|
||||
|
||||
@@ -9,7 +9,8 @@ namespace SharpCompress.Archives.Zip;
|
||||
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
{
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
private readonly Stream? stream;
|
||||
private readonly bool isDirectory;
|
||||
private bool isDisposed;
|
||||
|
||||
internal ZipWritableArchiveEntry(
|
||||
@@ -27,6 +28,22 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
isDirectory = false;
|
||||
}
|
||||
|
||||
internal ZipWritableArchiveEntry(
|
||||
ZipArchive archive,
|
||||
string directoryPath,
|
||||
DateTime? lastModified
|
||||
)
|
||||
: base(archive, null)
|
||||
{
|
||||
stream = null;
|
||||
Key = directoryPath;
|
||||
Size = 0;
|
||||
LastModifiedTime = lastModified;
|
||||
closeStream = false;
|
||||
isDirectory = true;
|
||||
}
|
||||
|
||||
public override long Crc => 0;
|
||||
@@ -47,24 +64,28 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
public override bool IsDirectory => isDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
{
|
||||
if (stream is null)
|
||||
{
|
||||
return Stream.Null;
|
||||
}
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return NonDisposingStream.Create(stream);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream && !isDisposed)
|
||||
if (closeStream && !isDisposed && stream is not null)
|
||||
{
|
||||
stream.Dispose();
|
||||
isDisposed = true;
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
using System;
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: CLSCompliant(true)]
|
||||
[assembly: InternalsVisibleTo(
|
||||
"SharpCompress.Test,PublicKey=0024000004800000940000000602000000240000525341310004000001000100158bebf1433f76dffc356733c138babea7a47536c65ed8009b16372c6f4edbb20554db74a62687f56b97c20a6ce8c4b123280279e33c894e7b3aa93ab3c573656fde4db576cfe07dba09619ead26375b25d2c4a8e43f7be257d712b0dd2eb546f67adb09281338618a58ac834fc038dd7e2740a7ab3591826252e4f4516306dc"
|
||||
)]
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
using System.Buffers;
|
||||
|
||||
namespace SharpCompress;
|
||||
|
||||
internal static class BufferPool
|
||||
{
|
||||
/// <summary>
|
||||
/// gets a buffer from the pool
|
||||
/// </summary>
|
||||
/// <param name="bufferSize">size of the buffer</param>
|
||||
/// <returns>the buffer</returns>
|
||||
public static byte[] Rent(int bufferSize)
|
||||
{
|
||||
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
|
||||
return ArrayPool<byte>.Shared.Rent(bufferSize);
|
||||
#else
|
||||
return new byte[bufferSize];
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// returns a buffer to the pool
|
||||
/// </summary>
|
||||
/// <param name="buffer">the buffer to return</param>
|
||||
public static void Return(byte[] buffer)
|
||||
{
|
||||
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
|
||||
ArrayPool<byte>.Shared.Return(buffer);
|
||||
#else
|
||||
// no-op
|
||||
#endif
|
||||
}
|
||||
}
|
||||
60
src/SharpCompress/Common/Arc/ArcEntry.cs
Normal file
60
src/SharpCompress/Common/Arc/ArcEntry.cs
Normal file
@@ -0,0 +1,60 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.Common.Tar;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcEntry : Entry
|
||||
{
|
||||
private readonly ArcFilePart? _filePart;
|
||||
|
||||
internal ArcEntry(ArcFilePart? filePart)
|
||||
{
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart == null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
return _filePart.Header.Crc16;
|
||||
}
|
||||
}
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType =>
|
||||
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
|
||||
|
||||
public override long Size => throw new NotImplementedException();
|
||||
|
||||
public override DateTime? LastModifiedTime => null;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
}
|
||||
}
|
||||
76
src/SharpCompress/Common/Arc/ArcEntryHeader.cs
Normal file
76
src/SharpCompress/Common/Arc/ArcEntryHeader.cs
Normal file
@@ -0,0 +1,76 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcEntryHeader
|
||||
{
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public CompressionType CompressionMethod { get; private set; }
|
||||
public string? Name { get; private set; }
|
||||
public long CompressedSize { get; private set; }
|
||||
public DateTime DateTime { get; private set; }
|
||||
public int Crc16 { get; private set; }
|
||||
public long OriginalSize { get; private set; }
|
||||
public long DataStartPosition { get; private set; }
|
||||
|
||||
public ArcEntryHeader(ArchiveEncoding archiveEncoding)
|
||||
{
|
||||
this.ArchiveEncoding = archiveEncoding;
|
||||
}
|
||||
|
||||
public ArcEntryHeader? ReadHeader(Stream stream)
|
||||
{
|
||||
byte[] headerBytes = new byte[29];
|
||||
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
DataStartPosition = stream.Position;
|
||||
return LoadFrom(headerBytes);
|
||||
}
|
||||
|
||||
public ArcEntryHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
CompressionMethod = GetCompressionType(headerBytes[1]);
|
||||
|
||||
// Read name
|
||||
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
|
||||
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
|
||||
|
||||
int offset = 15;
|
||||
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
|
||||
offset += 4;
|
||||
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
|
||||
DateTime = ConvertToDateTime(rawDateTime);
|
||||
offset += 4;
|
||||
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
|
||||
offset += 2;
|
||||
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
|
||||
return this;
|
||||
}
|
||||
|
||||
private CompressionType GetCompressionType(byte value)
|
||||
{
|
||||
return value switch
|
||||
{
|
||||
1 or 2 => CompressionType.None,
|
||||
3 => CompressionType.RLE90,
|
||||
4 => CompressionType.Squeezed,
|
||||
5 or 6 or 7 or 8 => CompressionType.Crunched,
|
||||
9 => CompressionType.Squashed,
|
||||
10 => CompressionType.Crushed,
|
||||
11 => CompressionType.Distilled,
|
||||
_ => CompressionType.Unknown,
|
||||
};
|
||||
}
|
||||
|
||||
public static DateTime ConvertToDateTime(long rawDateTime)
|
||||
{
|
||||
// Convert Unix timestamp to DateTime (UTC)
|
||||
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
|
||||
}
|
||||
}
|
||||
}
|
||||
75
src/SharpCompress/Common/Arc/ArcFilePart.cs
Normal file
75
src/SharpCompress/Common/Arc/ArcFilePart.cs
Normal file
@@ -0,0 +1,75 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Compressors.Lzw;
|
||||
using SharpCompress.Compressors.RLE90;
|
||||
using SharpCompress.Compressors.Squeezed;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcFilePart : FilePart
|
||||
{
|
||||
private readonly Stream? _stream;
|
||||
|
||||
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
|
||||
: base(localArcHeader.ArchiveEncoding)
|
||||
{
|
||||
_stream = seekableStream;
|
||||
Header = localArcHeader;
|
||||
}
|
||||
|
||||
internal ArcEntryHeader Header { get; set; }
|
||||
|
||||
internal override string? FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case CompressionType.None:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.RLE90:
|
||||
compressedStream = new RunLength90Stream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.Squeezed:
|
||||
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
|
||||
break;
|
||||
case CompressionType.Crunched:
|
||||
compressedStream = new ArcLzwStream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize,
|
||||
true
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream? GetRawStream() => _stream;
|
||||
}
|
||||
}
|
||||
16
src/SharpCompress/Common/Arc/ArcVolume.cs
Normal file
16
src/SharpCompress/Common/Arc/ArcVolume.cs
Normal file
@@ -0,0 +1,16 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcVolume : Volume
|
||||
{
|
||||
public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
}
|
||||
}
|
||||
@@ -8,12 +8,12 @@ public class ArchiveEncoding
|
||||
/// <summary>
|
||||
/// Default encoding to use when archive format doesn't specify one.
|
||||
/// </summary>
|
||||
public Encoding Default { get; set; }
|
||||
public Encoding? Default { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
|
||||
/// </summary>
|
||||
public Encoding Password { get; set; }
|
||||
public Encoding? Password { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this encoding when you want to force it for all encoding operations.
|
||||
@@ -50,6 +50,8 @@ public class ArchiveEncoding
|
||||
|
||||
public Encoding GetEncoding() => Forced ?? Default ?? Encoding.UTF8;
|
||||
|
||||
public Encoding GetPasswordEncoding() => Password ?? Encoding.UTF8;
|
||||
|
||||
public Func<byte[], int, int, string> GetDecoder() =>
|
||||
CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
|
||||
}
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class ArchiveException : Exception
|
||||
{
|
||||
public ArchiveException(string message)
|
||||
: base(message) { }
|
||||
}
|
||||
@@ -6,5 +6,7 @@ public enum ArchiveType
|
||||
Zip,
|
||||
Tar,
|
||||
SevenZip,
|
||||
GZip
|
||||
GZip,
|
||||
Arc,
|
||||
Arj,
|
||||
}
|
||||
|
||||
58
src/SharpCompress/Common/Arj/ArjEntry.cs
Normal file
58
src/SharpCompress/Common/Arj/ArjEntry.cs
Normal file
@@ -0,0 +1,58 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arc;
|
||||
using SharpCompress.Common.Arj.Headers;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
{
|
||||
public class ArjEntry : Entry
|
||||
{
|
||||
private readonly ArjFilePart _filePart;
|
||||
|
||||
internal ArjEntry(ArjFilePart filePart)
|
||||
{
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc => _filePart.Header.OriginalCrc32;
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart.Header.CompressionMethod == CompressionMethod.Stored)
|
||||
{
|
||||
return CompressionType.None;
|
||||
}
|
||||
return CompressionType.ArjLZ77;
|
||||
}
|
||||
}
|
||||
|
||||
public override long Size => _filePart?.Header.OriginalSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.Header.DateTimeModified.DateTime;
|
||||
|
||||
public override DateTime? CreatedTime => _filePart.Header.DateTimeCreated.DateTime;
|
||||
|
||||
public override DateTime? LastAccessedTime => _filePart.Header.DateTimeAccessed.DateTime;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => _filePart.Header.FileType == FileType.Directory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
}
|
||||
}
|
||||
72
src/SharpCompress/Common/Arj/ArjFilePart.cs
Normal file
72
src/SharpCompress/Common/Arj/ArjFilePart.cs
Normal file
@@ -0,0 +1,72 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arj.Headers;
|
||||
using SharpCompress.Compressors.Arj;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
{
|
||||
public class ArjFilePart : FilePart
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
internal ArjLocalHeader Header { get; set; }
|
||||
|
||||
internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream)
|
||||
: base(localArjHeader.ArchiveEncoding)
|
||||
{
|
||||
_stream = seekableStream;
|
||||
Header = localArjHeader;
|
||||
}
|
||||
|
||||
internal override string? FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case CompressionMethod.Stored:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedMost:
|
||||
case CompressionMethod.Compressed:
|
||||
case CompressionMethod.CompressedFaster:
|
||||
if (Header.CompressedSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new LhaStream<Lh7DecoderCfg>(
|
||||
_stream,
|
||||
(int)Header.OriginalSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedFastest:
|
||||
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream GetRawStream() => _stream;
|
||||
}
|
||||
}
|
||||
36
src/SharpCompress/Common/Arj/ArjVolume.cs
Normal file
36
src/SharpCompress/Common/Arj/ArjVolume.cs
Normal file
@@ -0,0 +1,36 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
{
|
||||
public class ArjVolume : Volume
|
||||
{
|
||||
public ArjVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
|
||||
public override bool IsFirstVolume
|
||||
{
|
||||
get { return true; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ArjArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public override bool IsMultiVolume
|
||||
{
|
||||
get { return false; }
|
||||
}
|
||||
|
||||
internal IEnumerable<ArjFilePart> GetVolumeFileParts()
|
||||
{
|
||||
return new List<ArjFilePart>();
|
||||
}
|
||||
}
|
||||
}
|
||||
142
src/SharpCompress/Common/Arj/Headers/ArjHeader.cs
Normal file
142
src/SharpCompress/Common/Arj/Headers/ArjHeader.cs
Normal file
@@ -0,0 +1,142 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum ArjHeaderType
|
||||
{
|
||||
MainHeader,
|
||||
LocalHeader,
|
||||
}
|
||||
|
||||
public abstract class ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
|
||||
public ArjHeader(ArjHeaderType type)
|
||||
{
|
||||
ArjHeaderType = type;
|
||||
}
|
||||
|
||||
public ArjHeaderType ArjHeaderType { get; }
|
||||
public byte Flags { get; set; }
|
||||
public FileType FileType { get; set; }
|
||||
|
||||
public abstract ArjHeader? Read(Stream reader);
|
||||
|
||||
public byte[] ReadHeader(Stream stream)
|
||||
{
|
||||
// check for magic bytes
|
||||
Span<byte> magic = stackalloc byte[2];
|
||||
if (stream.Read(magic) != 2)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
var magicValue = (ushort)(magic[0] | magic[1] << 8);
|
||||
if (magicValue != ARJ_MAGIC)
|
||||
{
|
||||
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
|
||||
}
|
||||
|
||||
// read header_size
|
||||
byte[] headerBytes = new byte[2];
|
||||
stream.Read(headerBytes, 0, 2);
|
||||
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
|
||||
if (headerSize < 1)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
var body = new byte[headerSize];
|
||||
var read = stream.Read(body, 0, headerSize);
|
||||
if (read < headerSize)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
byte[] crc = new byte[4];
|
||||
read = stream.Read(crc, 0, 4);
|
||||
var checksum = Crc32Stream.Compute(body);
|
||||
// Compute the hash value
|
||||
if (checksum != BitConverter.ToUInt32(crc, 0))
|
||||
{
|
||||
throw new InvalidDataException("Header checksum is invalid");
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
protected List<byte[]> ReadExtendedHeaders(Stream reader)
|
||||
{
|
||||
List<byte[]> extendedHeader = new List<byte[]>();
|
||||
byte[] buffer = new byte[2];
|
||||
|
||||
while (true)
|
||||
{
|
||||
int bytesRead = reader.Read(buffer, 0, 2);
|
||||
if (bytesRead < 2)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header size."
|
||||
);
|
||||
}
|
||||
|
||||
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
|
||||
if (extHeaderSize == 0)
|
||||
{
|
||||
return extendedHeader;
|
||||
}
|
||||
|
||||
byte[] header = new byte[extHeaderSize];
|
||||
bytesRead = reader.Read(header, 0, extHeaderSize);
|
||||
if (bytesRead < extHeaderSize)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header data."
|
||||
);
|
||||
}
|
||||
|
||||
byte[] crc = new byte[4];
|
||||
bytesRead = reader.Read(crc, 0, 4);
|
||||
if (bytesRead < 4)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header CRC."
|
||||
);
|
||||
}
|
||||
|
||||
var checksum = Crc32Stream.Compute(header);
|
||||
if (checksum != BitConverter.ToUInt32(crc, 0))
|
||||
{
|
||||
throw new InvalidDataException("Extended header checksum is invalid");
|
||||
}
|
||||
|
||||
extendedHeader.Add(header);
|
||||
}
|
||||
}
|
||||
|
||||
// Flag helpers
|
||||
public bool IsGabled => (Flags & 0x01) != 0;
|
||||
public bool IsAnsiPage => (Flags & 0x02) != 0;
|
||||
public bool IsVolume => (Flags & 0x04) != 0;
|
||||
public bool IsArjProtected => (Flags & 0x08) != 0;
|
||||
public bool IsPathSym => (Flags & 0x10) != 0;
|
||||
public bool IsBackup => (Flags & 0x20) != 0;
|
||||
public bool IsSecured => (Flags & 0x40) != 0;
|
||||
public bool IsAltName => (Flags & 0x80) != 0;
|
||||
|
||||
public static FileType FileTypeFromByte(byte value)
|
||||
{
|
||||
return Enum.IsDefined(typeof(FileType), value)
|
||||
? (FileType)value
|
||||
: Headers.FileType.Unknown;
|
||||
}
|
||||
}
|
||||
}
|
||||
161
src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.cs
Normal file
161
src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.cs
Normal file
@@ -0,0 +1,161 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public class ArjLocalHeader : ArjHeader
|
||||
{
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public long DataStartPosition { get; protected set; }
|
||||
|
||||
public byte ArchiverVersionNumber { get; set; }
|
||||
public byte MinVersionToExtract { get; set; }
|
||||
public HostOS HostOS { get; set; }
|
||||
public CompressionMethod CompressionMethod { get; set; }
|
||||
public DosDateTime DateTimeModified { get; set; } = new DosDateTime(0);
|
||||
public long CompressedSize { get; set; }
|
||||
public long OriginalSize { get; set; }
|
||||
public long OriginalCrc32 { get; set; }
|
||||
public int FileSpecPosition { get; set; }
|
||||
public int FileAccessMode { get; set; }
|
||||
public byte FirstChapter { get; set; }
|
||||
public byte LastChapter { get; set; }
|
||||
public long ExtendedFilePosition { get; set; }
|
||||
public DosDateTime DateTimeAccessed { get; set; } = new DosDateTime(0);
|
||||
public DosDateTime DateTimeCreated { get; set; } = new DosDateTime(0);
|
||||
public long OriginalSizeEvenForVolumes { get; set; }
|
||||
public string Name { get; set; } = string.Empty;
|
||||
public string Comment { get; set; } = string.Empty;
|
||||
|
||||
private const byte StdHdrSize = 30;
|
||||
private const byte R9HdrSize = 46;
|
||||
|
||||
public ArjLocalHeader(ArchiveEncoding archiveEncoding)
|
||||
: base(ArjHeaderType.LocalHeader)
|
||||
{
|
||||
ArchiveEncoding =
|
||||
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
|
||||
}
|
||||
|
||||
public override ArjHeader? Read(Stream stream)
|
||||
{
|
||||
var body = ReadHeader(stream);
|
||||
if (body.Length > 0)
|
||||
{
|
||||
ReadExtendedHeaders(stream);
|
||||
var header = LoadFrom(body);
|
||||
header.DataStartPosition = stream.Position;
|
||||
return header;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public ArjLocalHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
int offset = 0;
|
||||
|
||||
int ReadInt16()
|
||||
{
|
||||
if (offset + 1 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
|
||||
offset += 2;
|
||||
return v;
|
||||
}
|
||||
long ReadInt32()
|
||||
{
|
||||
if (offset + 3 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
long v =
|
||||
headerBytes[offset] & 0xFF
|
||||
| (headerBytes[offset + 1] & 0xFF) << 8
|
||||
| (headerBytes[offset + 2] & 0xFF) << 16
|
||||
| (headerBytes[offset + 3] & 0xFF) << 24;
|
||||
offset += 4;
|
||||
return v;
|
||||
}
|
||||
|
||||
byte headerSize = headerBytes[offset++];
|
||||
ArchiverVersionNumber = headerBytes[offset++];
|
||||
MinVersionToExtract = headerBytes[offset++];
|
||||
HostOS hostOS = (HostOS)headerBytes[offset++];
|
||||
Flags = headerBytes[offset++];
|
||||
CompressionMethod = CompressionMethodFromByte(headerBytes[offset++]);
|
||||
FileType = FileTypeFromByte(headerBytes[offset++]);
|
||||
|
||||
offset++; // Skip 1 byte
|
||||
|
||||
var rawTimestamp = ReadInt32();
|
||||
DateTimeModified =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
|
||||
CompressedSize = ReadInt32();
|
||||
OriginalSize = ReadInt32();
|
||||
OriginalCrc32 = ReadInt32();
|
||||
FileSpecPosition = ReadInt16();
|
||||
FileAccessMode = ReadInt16();
|
||||
|
||||
FirstChapter = headerBytes[offset++];
|
||||
LastChapter = headerBytes[offset++];
|
||||
|
||||
ExtendedFilePosition = 0;
|
||||
OriginalSizeEvenForVolumes = 0;
|
||||
|
||||
if (headerSize > StdHdrSize)
|
||||
{
|
||||
ExtendedFilePosition = ReadInt32();
|
||||
|
||||
if (headerSize >= R9HdrSize)
|
||||
{
|
||||
rawTimestamp = ReadInt32();
|
||||
DateTimeAccessed =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
rawTimestamp = ReadInt32();
|
||||
DateTimeCreated =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
OriginalSizeEvenForVolumes = ReadInt32();
|
||||
}
|
||||
}
|
||||
|
||||
Name = Encoding.ASCII.GetString(
|
||||
headerBytes,
|
||||
offset,
|
||||
Array.IndexOf(headerBytes, (byte)0, offset) - offset
|
||||
);
|
||||
offset += Name.Length + 1;
|
||||
|
||||
Comment = Encoding.ASCII.GetString(
|
||||
headerBytes,
|
||||
offset,
|
||||
Array.IndexOf(headerBytes, (byte)0, offset) - offset
|
||||
);
|
||||
offset += Comment.Length + 1;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public static CompressionMethod CompressionMethodFromByte(byte value)
|
||||
{
|
||||
return value switch
|
||||
{
|
||||
0 => CompressionMethod.Stored,
|
||||
1 => CompressionMethod.CompressedMost,
|
||||
2 => CompressionMethod.Compressed,
|
||||
3 => CompressionMethod.CompressedFaster,
|
||||
4 => CompressionMethod.CompressedFastest,
|
||||
8 => CompressionMethod.NoDataNoCrc,
|
||||
9 => CompressionMethod.NoData,
|
||||
_ => CompressionMethod.Unknown,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
138
src/SharpCompress/Common/Arj/Headers/ArjMainHeader.cs
Normal file
138
src/SharpCompress/Common/Arj/Headers/ArjMainHeader.cs
Normal file
@@ -0,0 +1,138 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public class ArjMainHeader : ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
public int ArchiverVersionNumber { get; private set; }
|
||||
public int MinVersionToExtract { get; private set; }
|
||||
public HostOS HostOs { get; private set; }
|
||||
public int SecurityVersion { get; private set; }
|
||||
public DosDateTime CreationDateTime { get; private set; } = new DosDateTime(0);
|
||||
public long CompressedSize { get; private set; }
|
||||
public long ArchiveSize { get; private set; }
|
||||
public long SecurityEnvelope { get; private set; }
|
||||
public int FileSpecPosition { get; private set; }
|
||||
public int SecurityEnvelopeLength { get; private set; }
|
||||
public int EncryptionVersion { get; private set; }
|
||||
public int LastChapter { get; private set; }
|
||||
|
||||
public int ArjProtectionFactor { get; private set; }
|
||||
public int Flags2 { get; private set; }
|
||||
public string Name { get; private set; } = string.Empty;
|
||||
public string Comment { get; private set; } = string.Empty;
|
||||
|
||||
public ArjMainHeader(ArchiveEncoding archiveEncoding)
|
||||
: base(ArjHeaderType.MainHeader)
|
||||
{
|
||||
ArchiveEncoding =
|
||||
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
|
||||
}
|
||||
|
||||
public override ArjHeader? Read(Stream stream)
|
||||
{
|
||||
var body = ReadHeader(stream);
|
||||
ReadExtendedHeaders(stream);
|
||||
return LoadFrom(body);
|
||||
}
|
||||
|
||||
public ArjMainHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
var offset = 1;
|
||||
|
||||
byte ReadByte()
|
||||
{
|
||||
if (offset >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
return (byte)(headerBytes[offset++] & 0xFF);
|
||||
}
|
||||
|
||||
int ReadInt16()
|
||||
{
|
||||
if (offset + 1 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
|
||||
offset += 2;
|
||||
return v;
|
||||
}
|
||||
|
||||
long ReadInt32()
|
||||
{
|
||||
if (offset + 3 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
long v =
|
||||
headerBytes[offset] & 0xFF
|
||||
| (headerBytes[offset + 1] & 0xFF) << 8
|
||||
| (headerBytes[offset + 2] & 0xFF) << 16
|
||||
| (headerBytes[offset + 3] & 0xFF) << 24;
|
||||
offset += 4;
|
||||
return v;
|
||||
}
|
||||
string ReadNullTerminatedString(byte[] x, int startIndex)
|
||||
{
|
||||
var result = new StringBuilder();
|
||||
int i = startIndex;
|
||||
|
||||
while (i < x.Length && x[i] != 0)
|
||||
{
|
||||
result.Append((char)x[i]);
|
||||
i++;
|
||||
}
|
||||
|
||||
// Skip the null terminator
|
||||
i++;
|
||||
if (i < x.Length)
|
||||
{
|
||||
byte[] remainder = new byte[x.Length - i];
|
||||
Array.Copy(x, i, remainder, 0, remainder.Length);
|
||||
x = remainder;
|
||||
}
|
||||
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
ArchiverVersionNumber = ReadByte();
|
||||
MinVersionToExtract = ReadByte();
|
||||
|
||||
var hostOsByte = ReadByte();
|
||||
HostOs = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
|
||||
|
||||
Flags = ReadByte();
|
||||
SecurityVersion = ReadByte();
|
||||
FileType = FileTypeFromByte(ReadByte());
|
||||
|
||||
offset++; // skip reserved
|
||||
|
||||
CreationDateTime = new DosDateTime((int)ReadInt32());
|
||||
CompressedSize = ReadInt32();
|
||||
ArchiveSize = ReadInt32();
|
||||
|
||||
SecurityEnvelope = ReadInt32();
|
||||
FileSpecPosition = ReadInt16();
|
||||
SecurityEnvelopeLength = ReadInt16();
|
||||
|
||||
EncryptionVersion = ReadByte();
|
||||
LastChapter = ReadByte();
|
||||
|
||||
Name = ReadNullTerminatedString(headerBytes, offset);
|
||||
Comment = ReadNullTerminatedString(headerBytes, offset + 1 + Name.Length);
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
20
src/SharpCompress/Common/Arj/Headers/CompressionMethod.cs
Normal file
20
src/SharpCompress/Common/Arj/Headers/CompressionMethod.cs
Normal file
@@ -0,0 +1,20 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum CompressionMethod
|
||||
{
|
||||
Stored = 0,
|
||||
CompressedMost = 1,
|
||||
Compressed = 2,
|
||||
CompressedFaster = 3,
|
||||
CompressedFastest = 4,
|
||||
NoDataNoCrc = 8,
|
||||
NoData = 9,
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
37
src/SharpCompress/Common/Arj/Headers/DosDateTime.cs
Normal file
37
src/SharpCompress/Common/Arj/Headers/DosDateTime.cs
Normal file
@@ -0,0 +1,37 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public class DosDateTime
|
||||
{
|
||||
public DateTime DateTime { get; }
|
||||
|
||||
public DosDateTime(long dosValue)
|
||||
{
|
||||
// Ensure only the lower 32 bits are used
|
||||
int value = unchecked((int)(dosValue & 0xFFFFFFFF));
|
||||
|
||||
var date = (value >> 16) & 0xFFFF;
|
||||
var time = value & 0xFFFF;
|
||||
|
||||
var day = date & 0x1F;
|
||||
var month = (date >> 5) & 0x0F;
|
||||
var year = ((date >> 9) & 0x7F) + 1980;
|
||||
|
||||
var second = (time & 0x1F) * 2;
|
||||
var minute = (time >> 5) & 0x3F;
|
||||
var hour = (time >> 11) & 0x1F;
|
||||
|
||||
try
|
||||
{
|
||||
DateTime = new DateTime(year, month, day, hour, minute, second);
|
||||
}
|
||||
catch
|
||||
{
|
||||
DateTime = DateTime.MinValue;
|
||||
}
|
||||
}
|
||||
|
||||
public override string ToString() => DateTime.ToString("yyyy-MM-dd HH:mm:ss");
|
||||
}
|
||||
}
|
||||
13
src/SharpCompress/Common/Arj/Headers/FileType.cs
Normal file
13
src/SharpCompress/Common/Arj/Headers/FileType.cs
Normal file
@@ -0,0 +1,13 @@
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum FileType : byte
|
||||
{
|
||||
Binary = 0,
|
||||
Text7Bit = 1,
|
||||
CommentHeader = 2,
|
||||
Directory = 3,
|
||||
VolumeLabel = 4,
|
||||
ChapterLabel = 5,
|
||||
Unknown = 255,
|
||||
}
|
||||
}
|
||||
19
src/SharpCompress/Common/Arj/Headers/HostOS.cs
Normal file
19
src/SharpCompress/Common/Arj/Headers/HostOS.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum HostOS
|
||||
{
|
||||
MsDos = 0,
|
||||
PrimOS = 1,
|
||||
Unix = 2,
|
||||
Amiga = 3,
|
||||
MacOs = 4,
|
||||
OS2 = 5,
|
||||
AppleGS = 6,
|
||||
AtariST = 7,
|
||||
NeXT = 8,
|
||||
VaxVMS = 9,
|
||||
Win95 = 10,
|
||||
Win32 = 11,
|
||||
Unknown = 255,
|
||||
}
|
||||
}
|
||||
@@ -16,5 +16,18 @@ public enum CompressionType
|
||||
Unknown,
|
||||
Deflate64,
|
||||
Shrink,
|
||||
Lzw
|
||||
Lzw,
|
||||
Reduce1,
|
||||
Reduce2,
|
||||
Reduce3,
|
||||
Reduce4,
|
||||
Explode,
|
||||
Squeezed,
|
||||
RLE90,
|
||||
Crunched,
|
||||
Squashed,
|
||||
Crushed,
|
||||
Distilled,
|
||||
ZStandard,
|
||||
ArjLZ77,
|
||||
}
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class CryptographicException : Exception
|
||||
{
|
||||
public CryptographicException(string message)
|
||||
: base(message) { }
|
||||
}
|
||||
@@ -14,7 +14,7 @@ public abstract class Entry : IEntry
|
||||
/// <summary>
|
||||
/// The string key of the file internal to the Archive.
|
||||
/// </summary>
|
||||
public abstract string Key { get; }
|
||||
public abstract string? Key { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
|
||||
@@ -71,11 +71,11 @@ public abstract class Entry : IEntry
|
||||
/// </summary>
|
||||
public abstract bool IsSplitAfter { get; }
|
||||
|
||||
public int VolumeIndexFirst => Parts?.FirstOrDefault()?.Index ?? 0;
|
||||
public int VolumeIndexLast => Parts?.LastOrDefault()?.Index ?? 0;
|
||||
public int VolumeIndexFirst => Parts.FirstOrDefault()?.Index ?? 0;
|
||||
public int VolumeIndexLast => Parts.LastOrDefault()?.Index ?? 0;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string ToString() => Key;
|
||||
public override string ToString() => Key ?? "Entry";
|
||||
|
||||
internal abstract IEnumerable<FilePart> Parts { get; }
|
||||
|
||||
|
||||
@@ -1,11 +1,35 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class EntryStream : Stream
|
||||
public class EntryStream : Stream, IStreamStack
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
long IStreamStack.InstanceId { get; set; }
|
||||
#endif
|
||||
int IStreamStack.DefaultBufferSize { get; set; }
|
||||
|
||||
Stream IStreamStack.BaseStream() => _stream;
|
||||
|
||||
int IStreamStack.BufferSize
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
int IStreamStack.BufferPosition
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
|
||||
void IStreamStack.SetPosition(long position) { }
|
||||
|
||||
private readonly IReader _reader;
|
||||
private readonly Stream _stream;
|
||||
private bool _completed;
|
||||
@@ -15,6 +39,9 @@ public class EntryStream : Stream
|
||||
{
|
||||
_reader = reader;
|
||||
_stream = stream;
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugConstruct(typeof(EntryStream));
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -26,21 +53,79 @@ public class EntryStream : Stream
|
||||
_completed = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Asynchronously skip the rest of the entry stream.
|
||||
/// </summary>
|
||||
public async Task SkipEntryAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
await this.SkipAsync(cancellationToken).ConfigureAwait(false);
|
||||
_completed = true;
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
SkipEntry();
|
||||
}
|
||||
if (_isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
SkipEntry();
|
||||
}
|
||||
|
||||
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
|
||||
if (_stream is IStreamStack ss)
|
||||
{
|
||||
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
|
||||
{
|
||||
deflateStream.Flush(); //Deflate over reads. Knock it back
|
||||
}
|
||||
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
|
||||
{
|
||||
lzmaStream.Flush(); //Lzma over reads. Knock it back
|
||||
}
|
||||
}
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(EntryStream));
|
||||
#endif
|
||||
base.Dispose(disposing);
|
||||
_stream.Dispose();
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
await SkipEntryAsync().ConfigureAwait(false);
|
||||
}
|
||||
|
||||
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
|
||||
if (_stream is IStreamStack ss)
|
||||
{
|
||||
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
|
||||
{
|
||||
await deflateStream.FlushAsync().ConfigureAwait(false);
|
||||
}
|
||||
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
|
||||
{
|
||||
await lzmaStream.FlushAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(EntryStream));
|
||||
#endif
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
await _stream.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
#endif
|
||||
|
||||
public override bool CanRead => true;
|
||||
|
||||
public override bool CanSeek => false;
|
||||
@@ -49,11 +134,13 @@ public class EntryStream : Stream
|
||||
|
||||
public override void Flush() { }
|
||||
|
||||
public override Task FlushAsync(CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
|
||||
public override long Length => _stream.Length;
|
||||
|
||||
public override long Position
|
||||
{
|
||||
get => throw new NotSupportedException();
|
||||
get => _stream.Position; //throw new NotSupportedException();
|
||||
set => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
@@ -67,6 +154,38 @@ public class EntryStream : Stream
|
||||
return read;
|
||||
}
|
||||
|
||||
public override async Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var read = await _stream
|
||||
.ReadAsync(buffer, offset, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override async ValueTask<int> ReadAsync(
|
||||
Memory<byte> buffer,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var read = await _stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
#endif
|
||||
|
||||
public override int ReadByte()
|
||||
{
|
||||
var value = _stream.ReadByte();
|
||||
@@ -83,4 +202,11 @@ public class EntryStream : Stream
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override Task WriteAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken
|
||||
) => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class ExtractionException : Exception
|
||||
{
|
||||
public ExtractionException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public ExtractionException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
@@ -1,10 +1,22 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
internal static class ExtractionMethods
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the appropriate StringComparison for path checks based on the file system.
|
||||
/// Windows uses case-insensitive file systems, while Unix-like systems use case-sensitive file systems.
|
||||
/// </summary>
|
||||
private static StringComparison PathComparison =>
|
||||
RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
|
||||
? StringComparison.OrdinalIgnoreCase
|
||||
: StringComparison.Ordinal;
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
@@ -36,15 +48,17 @@ internal static class ExtractionMethods
|
||||
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
var file = Path.GetFileName(entry.Key);
|
||||
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
|
||||
file = Utility.ReplaceInvalidFileNameChars(file);
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
var folder = Path.GetDirectoryName(entry.Key)!;
|
||||
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
|
||||
.NotNull("Directory is null");
|
||||
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to create a directory outside of the destination directory."
|
||||
@@ -64,12 +78,7 @@ internal static class ExtractionMethods
|
||||
{
|
||||
destinationFileName = Path.GetFullPath(destinationFileName);
|
||||
|
||||
if (
|
||||
!destinationFileName.StartsWith(
|
||||
fullDestinationDirectoryPath,
|
||||
StringComparison.Ordinal
|
||||
)
|
||||
)
|
||||
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to write a file outside of the destination directory."
|
||||
@@ -114,4 +123,110 @@ internal static class ExtractionMethods
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task WriteEntryToDirectoryAsync(
|
||||
IEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
Func<string, ExtractionOptions?, Task> writeAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
string destinationFileName;
|
||||
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
|
||||
|
||||
//check for trailing slash.
|
||||
if (
|
||||
fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1]
|
||||
!= Path.DirectorySeparatorChar
|
||||
)
|
||||
{
|
||||
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
|
||||
}
|
||||
|
||||
if (!Directory.Exists(fullDestinationDirectoryPath))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
$"Directory does not exist to extract to: {fullDestinationDirectoryPath}"
|
||||
);
|
||||
}
|
||||
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
|
||||
file = Utility.ReplaceInvalidFileNameChars(file);
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
|
||||
.NotNull("Directory is null");
|
||||
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to create a directory outside of the destination directory."
|
||||
);
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(destdir);
|
||||
}
|
||||
destinationFileName = Path.Combine(destdir, file);
|
||||
}
|
||||
else
|
||||
{
|
||||
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
|
||||
}
|
||||
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
destinationFileName = Path.GetFullPath(destinationFileName);
|
||||
|
||||
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to write a file outside of the destination directory."
|
||||
);
|
||||
}
|
||||
await writeAsync(destinationFileName, options).ConfigureAwait(false);
|
||||
}
|
||||
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
|
||||
{
|
||||
Directory.CreateDirectory(destinationFileName);
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task WriteEntryToFileAsync(
|
||||
IEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options,
|
||||
Func<string, FileMode, Task> openAndWriteAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (entry.LinkTarget != null)
|
||||
{
|
||||
if (options?.WriteSymbolicLink is null)
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
|
||||
);
|
||||
}
|
||||
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
|
||||
}
|
||||
else
|
||||
{
|
||||
var fm = FileMode.Create;
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
if (!options.Overwrite)
|
||||
{
|
||||
fm = FileMode.CreateNew;
|
||||
}
|
||||
|
||||
await openAndWriteAsync(destinationFileName, fm).ConfigureAwait(false);
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,10 +8,10 @@ public abstract class FilePart
|
||||
|
||||
internal ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
internal abstract string FilePartName { get; }
|
||||
internal abstract string? FilePartName { get; }
|
||||
public int Index { get; set; }
|
||||
|
||||
internal abstract Stream GetCompressedStream();
|
||||
internal abstract Stream? GetCompressedStream();
|
||||
internal abstract Stream? GetRawStream();
|
||||
internal bool Skipped { get; set; }
|
||||
}
|
||||
|
||||
@@ -6,23 +6,23 @@ namespace SharpCompress.Common.GZip;
|
||||
|
||||
public class GZipEntry : Entry
|
||||
{
|
||||
private readonly GZipFilePart _filePart;
|
||||
private readonly GZipFilePart? _filePart;
|
||||
|
||||
internal GZipEntry(GZipFilePart filePart) => _filePart = filePart;
|
||||
internal GZipEntry(GZipFilePart? filePart) => _filePart = filePart;
|
||||
|
||||
public override CompressionType CompressionType => CompressionType.GZip;
|
||||
|
||||
public override long Crc => _filePart.Crc ?? 0;
|
||||
public override long Crc => _filePart?.Crc ?? 0;
|
||||
|
||||
public override string Key => _filePart.FilePartName;
|
||||
public override string? Key => _filePart?.FilePartName;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size => _filePart.UncompressedSize ?? 0;
|
||||
public override long Size => _filePart?.UncompressedSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.DateModified;
|
||||
public override DateTime? LastModifiedTime => _filePart?.DateModified;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
@@ -36,7 +36,7 @@ public class GZipEntry : Entry
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
|
||||
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
|
||||
{
|
||||
|
||||
@@ -24,8 +24,14 @@ internal sealed class GZipFilePart : FilePart
|
||||
stream.Position = stream.Length - 8;
|
||||
ReadTrailer();
|
||||
stream.Position = position;
|
||||
EntryStartPosition = position;
|
||||
}
|
||||
else
|
||||
{
|
||||
// For non-seekable streams, we can't read the trailer or track position.
|
||||
// Set to 0 since the stream will be read sequentially from its current position.
|
||||
EntryStartPosition = 0;
|
||||
}
|
||||
EntryStartPosition = stream.Position;
|
||||
}
|
||||
|
||||
internal long EntryStartPosition { get; }
|
||||
@@ -34,7 +40,7 @@ internal sealed class GZipFilePart : FilePart
|
||||
internal uint? Crc { get; private set; }
|
||||
internal uint? UncompressedSize { get; private set; }
|
||||
|
||||
internal override string FilePartName => _name!;
|
||||
internal override string? FilePartName => _name;
|
||||
|
||||
internal override Stream GetCompressedStream() =>
|
||||
new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
|
||||
|
||||
@@ -5,7 +5,7 @@ namespace SharpCompress.Common.GZip;
|
||||
|
||||
public class GZipVolume : Volume
|
||||
{
|
||||
public GZipVolume(Stream stream, ReaderOptions options, int index = 0)
|
||||
public GZipVolume(Stream stream, ReaderOptions? options, int index)
|
||||
: base(stream, options, index) { }
|
||||
|
||||
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
|
||||
|
||||
@@ -9,7 +9,7 @@ public interface IEntry
|
||||
long CompressedSize { get; }
|
||||
long Crc { get; }
|
||||
DateTime? CreatedTime { get; }
|
||||
string Key { get; }
|
||||
string? Key { get; }
|
||||
string? LinkTarget { get; }
|
||||
bool IsDirectory { get; }
|
||||
bool IsEncrypted { get; }
|
||||
|
||||
@@ -6,5 +6,5 @@ public interface IVolume : IDisposable
|
||||
{
|
||||
int Index { get; }
|
||||
|
||||
string FileName { get; }
|
||||
string? FileName { get; }
|
||||
}
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class IncompleteArchiveException : ArchiveException
|
||||
{
|
||||
public IncompleteArchiveException(string message)
|
||||
: base(message) { }
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class InvalidFormatException : ExtractionException
|
||||
{
|
||||
public InvalidFormatException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public InvalidFormatException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class MultiVolumeExtractionException : ExtractionException
|
||||
{
|
||||
public MultiVolumeExtractionException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public MultiVolumeExtractionException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class MultipartStreamRequiredException : ExtractionException
|
||||
{
|
||||
public MultipartStreamRequiredException(string message)
|
||||
: base(message) { }
|
||||
}
|
||||
@@ -17,7 +17,7 @@ internal class CryptKey5 : ICryptKey
|
||||
private byte[] _pswCheck = { };
|
||||
private byte[] _hashKey = { };
|
||||
|
||||
public CryptKey5(string password, Rar5CryptoInfo rar5CryptoInfo)
|
||||
public CryptKey5(string? password, Rar5CryptoInfo rar5CryptoInfo)
|
||||
{
|
||||
_password = password ?? "";
|
||||
_cryptoInfo = rar5CryptoInfo;
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Security.Cryptography;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Rar.Headers;
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using SharpCompress.IO;
|
||||
#if !Rar2017_64bit
|
||||
@@ -18,7 +15,7 @@ namespace SharpCompress.Common.Rar.Headers;
|
||||
|
||||
internal class FileHeader : RarHeader
|
||||
{
|
||||
private byte[] _hash;
|
||||
private byte[]? _hash;
|
||||
|
||||
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
|
||||
: base(header, reader, headerType) { }
|
||||
@@ -121,7 +118,6 @@ internal class FileHeader : RarHeader
|
||||
switch (type)
|
||||
{
|
||||
case FHEXTRA_CRYPT: // file encryption
|
||||
|
||||
{
|
||||
Rar5CryptoInfo = new Rar5CryptoInfo(reader, true);
|
||||
|
||||
@@ -132,7 +128,6 @@ internal class FileHeader : RarHeader
|
||||
}
|
||||
break;
|
||||
case FHEXTRA_HASH:
|
||||
|
||||
{
|
||||
const uint FHEXTRA_HASH_BLAKE2 = 0x0;
|
||||
// const uint HASH_BLAKE2 = 0x03;
|
||||
@@ -146,7 +141,6 @@ internal class FileHeader : RarHeader
|
||||
}
|
||||
break;
|
||||
case FHEXTRA_HTIME: // file time
|
||||
|
||||
{
|
||||
var flags = reader.ReadRarVIntUInt16();
|
||||
var isWindowsTime = (flags & 1) == 0;
|
||||
@@ -171,7 +165,6 @@ internal class FileHeader : RarHeader
|
||||
// }
|
||||
// break;
|
||||
case FHEXTRA_REDIR: // file system redirection
|
||||
|
||||
{
|
||||
RedirType = reader.ReadRarVIntByte();
|
||||
RedirFlags = reader.ReadRarVIntByte();
|
||||
@@ -284,7 +277,6 @@ internal class FileHeader : RarHeader
|
||||
switch (HeaderCode)
|
||||
{
|
||||
case HeaderCodeV.RAR4_FILE_HEADER:
|
||||
|
||||
{
|
||||
if (HasFlag(FileFlagsV4.UNICODE))
|
||||
{
|
||||
@@ -311,7 +303,6 @@ internal class FileHeader : RarHeader
|
||||
}
|
||||
break;
|
||||
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
|
||||
|
||||
{
|
||||
var datasize = HeaderSize - newLhdSize - nameSize;
|
||||
if (HasFlag(FileFlagsV4.SALT))
|
||||
@@ -325,6 +316,10 @@ internal class FileHeader : RarHeader
|
||||
|
||||
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
|
||||
{
|
||||
if (SubData is null)
|
||||
{
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
RecoverySectors =
|
||||
SubData[8]
|
||||
+ (SubData[9] << 8)
|
||||
@@ -346,12 +341,16 @@ internal class FileHeader : RarHeader
|
||||
if (RemainingHeaderBytes(reader) >= 2)
|
||||
{
|
||||
var extendedFlags = reader.ReadUInt16();
|
||||
FileLastModifiedTime = ProcessExtendedTimeV4(
|
||||
extendedFlags,
|
||||
FileLastModifiedTime,
|
||||
reader,
|
||||
0
|
||||
);
|
||||
if (FileLastModifiedTime is not null)
|
||||
{
|
||||
FileLastModifiedTime = ProcessExtendedTimeV4(
|
||||
extendedFlags,
|
||||
FileLastModifiedTime,
|
||||
reader,
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
FileCreatedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 1);
|
||||
FileLastAccessedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 2);
|
||||
FileArchivedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 3);
|
||||
@@ -383,7 +382,7 @@ internal class FileHeader : RarHeader
|
||||
var dosTime = reader.ReadUInt32();
|
||||
time = Utility.DosDateToDateTime(dosTime);
|
||||
}
|
||||
if ((rmode & 4) == 0)
|
||||
if ((rmode & 4) == 0 && time is not null)
|
||||
{
|
||||
time = time.Value.AddSeconds(1);
|
||||
}
|
||||
@@ -396,7 +395,11 @@ internal class FileHeader : RarHeader
|
||||
}
|
||||
|
||||
//10^-7 to 10^-3
|
||||
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
|
||||
if (time is not null)
|
||||
{
|
||||
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string ConvertPathV4(string path)
|
||||
@@ -412,13 +415,13 @@ internal class FileHeader : RarHeader
|
||||
return path;
|
||||
}
|
||||
|
||||
public override string ToString() => FileName;
|
||||
public override string ToString() => FileName ?? "FileHeader";
|
||||
|
||||
private ushort Flags { get; set; }
|
||||
|
||||
private bool HasFlag(ushort flag) => (Flags & flag) == flag;
|
||||
|
||||
internal byte[] FileCrc
|
||||
internal byte[]? FileCrc
|
||||
{
|
||||
get => _hash;
|
||||
private set => _hash = value;
|
||||
@@ -447,22 +450,22 @@ internal class FileHeader : RarHeader
|
||||
public bool IsRedir => RedirType != 0;
|
||||
public byte RedirFlags { get; private set; }
|
||||
public bool IsRedirDirectory => (RedirFlags & RedirFlagV5.DIRECTORY) != 0;
|
||||
public string RedirTargetName { get; private set; }
|
||||
public string? RedirTargetName { get; private set; }
|
||||
|
||||
// unused for UnpackV1 implementation (limitation)
|
||||
internal size_t WindowSize { get; private set; }
|
||||
|
||||
internal byte[] R4Salt { get; private set; }
|
||||
internal Rar5CryptoInfo Rar5CryptoInfo { get; private set; }
|
||||
internal byte[]? R4Salt { get; private set; }
|
||||
internal Rar5CryptoInfo? Rar5CryptoInfo { get; private set; }
|
||||
private byte HostOs { get; set; }
|
||||
internal uint FileAttributes { get; private set; }
|
||||
internal long CompressedSize { get; private set; }
|
||||
internal long UncompressedSize { get; private set; }
|
||||
internal string FileName { get; private set; }
|
||||
internal byte[] SubData { get; private set; }
|
||||
internal string? FileName { get; private set; }
|
||||
internal byte[]? SubData { get; private set; }
|
||||
internal int RecoverySectors { get; private set; }
|
||||
internal long DataStartPosition { get; set; }
|
||||
public Stream PackedStream { get; set; }
|
||||
public Stream? PackedStream { get; set; }
|
||||
|
||||
public bool IsSplitBefore =>
|
||||
IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_BEFORE) : HasFlag(FileFlagsV4.SPLIT_BEFORE);
|
||||
|
||||
@@ -13,7 +13,7 @@ public enum HeaderType : byte
|
||||
Sign,
|
||||
NewSub,
|
||||
EndArchive,
|
||||
Crypt
|
||||
Crypt,
|
||||
}
|
||||
|
||||
internal static class HeaderCodeV
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Rar.Headers;
|
||||
@@ -21,7 +20,7 @@ internal class RarHeader : IRarHeader
|
||||
{
|
||||
return new RarHeader(reader, isRar5, archiveEncoding);
|
||||
}
|
||||
catch (EndOfStreamException)
|
||||
catch (InvalidFormatException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
@@ -98,13 +96,11 @@ public class RarHeaderFactory
|
||||
switch (StreamingMode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
|
||||
{
|
||||
reader.BaseStream.Position += ph.DataSize;
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
{
|
||||
reader.BaseStream.Skip(ph.DataSize);
|
||||
}
|
||||
@@ -146,14 +142,12 @@ public class RarHeaderFactory
|
||||
switch (StreamingMode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
|
||||
{
|
||||
fh.DataStartPosition = reader.BaseStream.Position;
|
||||
reader.BaseStream.Position += fh.CompressedSize;
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
{
|
||||
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
|
||||
if (fh.R4Salt is null && fh.Rar5CryptoInfo is null)
|
||||
@@ -164,10 +158,15 @@ public class RarHeaderFactory
|
||||
{
|
||||
fh.PackedStream = new RarCryptoWrapper(
|
||||
ms,
|
||||
fh.R4Salt is null ? fh.Rar5CryptoInfo.Salt : fh.R4Salt,
|
||||
fh.R4Salt is null
|
||||
? new CryptKey5(Options.Password!, fh.Rar5CryptoInfo)
|
||||
: new CryptKey3(Options.Password!)
|
||||
? fh.Rar5CryptoInfo.NotNull().Salt
|
||||
: fh.R4Salt,
|
||||
fh.R4Salt is null
|
||||
? new CryptKey5(
|
||||
Options.Password,
|
||||
fh.Rar5CryptoInfo.NotNull()
|
||||
)
|
||||
: new CryptKey3(Options.Password)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -204,14 +203,12 @@ public class RarHeaderFactory
|
||||
switch (StreamingMode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
|
||||
{
|
||||
fh.DataStartPosition = reader.BaseStream.Position;
|
||||
reader.BaseStream.Position += fh.CompressedSize;
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
{
|
||||
//skip the data because it's useless?
|
||||
reader.BaseStream.Skip(fh.CompressedSize);
|
||||
|
||||
@@ -70,11 +70,11 @@ internal sealed class RarCryptoWrapper : Stream
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (_rijndael != null)
|
||||
if (disposing)
|
||||
{
|
||||
_rijndael.Dispose();
|
||||
_rijndael = null!;
|
||||
}
|
||||
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,12 +20,12 @@ public abstract class RarEntry : Entry
|
||||
/// <summary>
|
||||
/// The File's 32 bit CRC Hash
|
||||
/// </summary>
|
||||
public override long Crc => BitConverter.ToUInt32(FileHeader.FileCrc, 0);
|
||||
public override long Crc => BitConverter.ToUInt32(FileHeader.FileCrc.NotNull(), 0);
|
||||
|
||||
/// <summary>
|
||||
/// The path of the file internal to the Rar Archive.
|
||||
/// </summary>
|
||||
public override string Key => FileHeader.FileName;
|
||||
public override string? Key => FileHeader.FileName;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
@@ -68,7 +68,7 @@ public abstract class RarEntry : Entry
|
||||
|
||||
public bool IsRedir => FileHeader.IsRedir;
|
||||
|
||||
public string RedirTargetName => FileHeader.RedirTargetName;
|
||||
public string? RedirTargetName => FileHeader.RedirTargetName;
|
||||
|
||||
public override string ToString() =>
|
||||
string.Format(
|
||||
|
||||
@@ -15,17 +15,14 @@ namespace SharpCompress.Common.Rar;
|
||||
public abstract class RarVolume : Volume
|
||||
{
|
||||
private readonly RarHeaderFactory _headerFactory;
|
||||
internal int _maxCompressionAlgorithm;
|
||||
private int _maxCompressionAlgorithm;
|
||||
|
||||
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index = 0)
|
||||
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index)
|
||||
: base(stream, options, index) => _headerFactory = new RarHeaderFactory(mode, options);
|
||||
|
||||
#nullable disable
|
||||
internal ArchiveHeader ArchiveHeader { get; private set; }
|
||||
private ArchiveHeader? ArchiveHeader { get; set; }
|
||||
|
||||
#nullable enable
|
||||
|
||||
internal StreamingMode Mode => _headerFactory.StreamingMode;
|
||||
private StreamingMode Mode => _headerFactory.StreamingMode;
|
||||
|
||||
internal abstract IEnumerable<RarFilePart> ReadFileParts();
|
||||
|
||||
@@ -39,19 +36,16 @@ public abstract class RarVolume : Volume
|
||||
switch (header.HeaderType)
|
||||
{
|
||||
case HeaderType.Mark:
|
||||
|
||||
{
|
||||
lastMarkHeader = (MarkHeader)header;
|
||||
}
|
||||
break;
|
||||
case HeaderType.Archive:
|
||||
|
||||
{
|
||||
ArchiveHeader = (ArchiveHeader)header;
|
||||
}
|
||||
break;
|
||||
case HeaderType.File:
|
||||
|
||||
{
|
||||
var fh = (FileHeader)header;
|
||||
if (_maxCompressionAlgorithm < fh.CompressionAlgorithm)
|
||||
@@ -63,14 +57,12 @@ public abstract class RarVolume : Volume
|
||||
}
|
||||
break;
|
||||
case HeaderType.Service:
|
||||
|
||||
{
|
||||
var fh = (FileHeader)header;
|
||||
if (fh.FileName == "CMT")
|
||||
{
|
||||
var part = CreateFilePart(lastMarkHeader!, fh);
|
||||
var buffer = new byte[fh.CompressedSize];
|
||||
part.GetCompressedStream().Read(buffer, 0, buffer.Length);
|
||||
fh.PackedStream.NotNull().ReadFully(buffer);
|
||||
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
|
||||
}
|
||||
}
|
||||
@@ -105,7 +97,7 @@ public abstract class RarVolume : Volume
|
||||
get
|
||||
{
|
||||
EnsureArchiveHeaderLoaded();
|
||||
return ArchiveHeader.IsFirstVolume;
|
||||
return ArchiveHeader?.IsFirstVolume ?? false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,7 +109,7 @@ public abstract class RarVolume : Volume
|
||||
get
|
||||
{
|
||||
EnsureArchiveHeaderLoaded();
|
||||
return ArchiveHeader.IsVolume;
|
||||
return ArchiveHeader?.IsVolume ?? false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,7 +122,7 @@ public abstract class RarVolume : Volume
|
||||
get
|
||||
{
|
||||
EnsureArchiveHeaderLoaded();
|
||||
return ArchiveHeader.IsSolid;
|
||||
return ArchiveHeader?.IsSolid ?? false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class ReaderCancelledException : Exception
|
||||
{
|
||||
public ReaderCancelledException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public ReaderCancelledException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
@@ -35,7 +35,7 @@ internal class ArchiveDatabase
|
||||
_packSizes.Clear();
|
||||
_packCrCs.Clear();
|
||||
_folders.Clear();
|
||||
_numUnpackStreamsVector = null!;
|
||||
_numUnpackStreamsVector = null;
|
||||
_files.Clear();
|
||||
|
||||
_packStreamStartPositions.Clear();
|
||||
|
||||
@@ -784,7 +784,7 @@ internal class ArchiveReader
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new InvalidOperationException();
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -843,7 +843,7 @@ internal class ArchiveReader
|
||||
outStream.ReadExact(data, 0, data.Length);
|
||||
if (outStream.ReadByte() >= 0)
|
||||
{
|
||||
throw new InvalidOperationException("Decoded stream is longer than expected.");
|
||||
throw new InvalidFormatException("Decoded stream is longer than expected.");
|
||||
}
|
||||
dataVector.Add(data);
|
||||
|
||||
@@ -854,7 +854,7 @@ internal class ArchiveReader
|
||||
!= folder._unpackCrc
|
||||
)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
throw new InvalidFormatException(
|
||||
"Decoded stream does not match expected CRC."
|
||||
);
|
||||
}
|
||||
@@ -996,6 +996,11 @@ internal class ArchiveReader
|
||||
numFiles,
|
||||
delegate(int i, uint? attr)
|
||||
{
|
||||
// Keep the original attribute value because it could potentially get
|
||||
// modified in the logic that follows. Some callers of the library may
|
||||
// find the original value useful.
|
||||
db._files[i].ExtendedAttrib = attr;
|
||||
|
||||
// Some third party implementations established an unofficial extension
|
||||
// of the 7z archive format by placing posix file attributes in the high
|
||||
// bits of the windows file attributes. This makes use of the fact that
|
||||
@@ -1220,23 +1225,46 @@ internal class ArchiveReader
|
||||
|
||||
#region Public Methods
|
||||
|
||||
public void Open(Stream stream)
|
||||
public void Open(Stream stream, bool lookForHeader)
|
||||
{
|
||||
Close();
|
||||
|
||||
_streamOrigin = stream.Position;
|
||||
_streamEnding = stream.Length;
|
||||
|
||||
// TODO: Check Signature!
|
||||
_header = new byte[0x20];
|
||||
for (var offset = 0; offset < 0x20; )
|
||||
var canScan = lookForHeader ? 0x80000 - 20 : 0;
|
||||
while (true)
|
||||
{
|
||||
var delta = stream.Read(_header, offset, 0x20 - offset);
|
||||
if (delta == 0)
|
||||
// TODO: Check Signature!
|
||||
_header = new byte[0x20];
|
||||
for (var offset = 0; offset < 0x20; )
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
var delta = stream.Read(_header, offset, 0x20 - offset);
|
||||
if (delta == 0)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
|
||||
offset += delta;
|
||||
}
|
||||
offset += delta;
|
||||
|
||||
if (
|
||||
!lookForHeader
|
||||
|| _header
|
||||
.AsSpan(0, length: 6)
|
||||
.SequenceEqual<byte>([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
|
||||
)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (canScan == 0)
|
||||
{
|
||||
throw new InvalidFormatException("Unable to find 7z signature");
|
||||
}
|
||||
|
||||
canScan--;
|
||||
stream.Position = ++_streamOrigin;
|
||||
}
|
||||
|
||||
_stream = stream;
|
||||
@@ -1435,7 +1463,7 @@ internal class ArchiveReader
|
||||
#if DEBUG
|
||||
Log.WriteLine(_db._files[index].Name);
|
||||
#endif
|
||||
if (_db._files[index].CrcDefined)
|
||||
if (_db._files[index].Crc.HasValue)
|
||||
{
|
||||
_stream = new CrcCheckStream(_db._files[index].Crc.Value);
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user