mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-04 13:34:59 +00:00
Compare commits
800 Commits
0.32
...
copilot/fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f59b14a278 | ||
|
|
3870cc8d34 | ||
|
|
242e442a8c | ||
|
|
d95d1e928b | ||
|
|
2a3086a0d7 | ||
|
|
41c3cc1a18 | ||
|
|
1b1df86a11 | ||
|
|
e0660e7775 | ||
|
|
99a6c4de88 | ||
|
|
ffa765bd97 | ||
|
|
b1696524b3 | ||
|
|
6a37c55085 | ||
|
|
9c1c6fff9f | ||
|
|
db8c6f4bcb | ||
|
|
ff17ecda7d | ||
|
|
692058677c | ||
|
|
1e90d69912 | ||
|
|
64a1cc68e1 | ||
|
|
20353f35ff | ||
|
|
e44a43d2b1 | ||
|
|
8997f00b9b | ||
|
|
c5da416764 | ||
|
|
840e58fc03 | ||
|
|
7f911c5219 | ||
|
|
a887390c23 | ||
|
|
f4dddcec8e | ||
|
|
0d9d82d7e6 | ||
|
|
d34a47c148 | ||
|
|
5aa216bd21 | ||
|
|
8af47548fe | ||
|
|
131bd2b7b8 | ||
|
|
1993673a22 | ||
|
|
30e036f9ec | ||
|
|
095c871174 | ||
|
|
6d73c5b295 | ||
|
|
cc4d28193c | ||
|
|
9433e06b93 | ||
|
|
a92aaa51d5 | ||
|
|
d41908adeb | ||
|
|
81ca15b567 | ||
|
|
b81d0fd730 | ||
|
|
3a1bb187e8 | ||
|
|
3fee14a070 | ||
|
|
5bf789ac65 | ||
|
|
be06049db3 | ||
|
|
a0435f6a60 | ||
|
|
2321e2c90b | ||
|
|
97e98d8629 | ||
|
|
d96e7362d2 | ||
|
|
7dd46fe5ed | ||
|
|
04c044cb2b | ||
|
|
cc10a12fbc | ||
|
|
8b0a1c699f | ||
|
|
15ca7c9807 | ||
|
|
2b4da7e39b | ||
|
|
31f81f38af | ||
|
|
72cf77b7c7 | ||
|
|
0fe48c647e | ||
|
|
7b06652bff | ||
|
|
434ce05416 | ||
|
|
0698031ed4 | ||
|
|
51237a34eb | ||
|
|
b8264a8131 | ||
|
|
cad923018e | ||
|
|
db94b49941 | ||
|
|
72d15d9cbf | ||
|
|
e0186eadc0 | ||
|
|
4cfa5b04af | ||
|
|
f2c54b1f8b | ||
|
|
d7d0bc6582 | ||
|
|
dd9dc2500b | ||
|
|
4efb109da8 | ||
|
|
bcf9a6bdf1 | ||
|
|
e3a25ecdc0 | ||
|
|
783521928d | ||
|
|
9a876abd31 | ||
|
|
97f58b412e | ||
|
|
4c61628078 | ||
|
|
99a8b0f750 | ||
|
|
a9017d7c25 | ||
|
|
d9e4b26648 | ||
|
|
0d03bafe49 | ||
|
|
fee15a31f9 | ||
|
|
997d3910d4 | ||
|
|
a3918cc0d7 | ||
|
|
f056986b07 | ||
|
|
59c1f02f98 | ||
|
|
3a71a2b1f8 | ||
|
|
2ef1215b49 | ||
|
|
130ac83076 | ||
|
|
dd606a0702 | ||
|
|
84cd772f50 | ||
|
|
fa1d7af22f | ||
|
|
a771ba3bc0 | ||
|
|
8b612c658d | ||
|
|
7dd0da5fd7 | ||
|
|
f7b3525c4e | ||
|
|
de83bdae48 | ||
|
|
d90b610767 | ||
|
|
2d41de6b72 | ||
|
|
f391c3caf3 | ||
|
|
9bdf150676 | ||
|
|
0c199609eb | ||
|
|
6eff9d3753 | ||
|
|
7ab16457c7 | ||
|
|
e7ad8132b5 | ||
|
|
da87e45534 | ||
|
|
2ffaef5563 | ||
|
|
55cb350d2c | ||
|
|
7fa271a1b4 | ||
|
|
c53ca372f2 | ||
|
|
75bc8501f4 | ||
|
|
1e22b47fe1 | ||
|
|
74e2dca207 | ||
|
|
a669de24b7 | ||
|
|
e1e9c449e9 | ||
|
|
60e1dc0239 | ||
|
|
10eb94fd82 | ||
|
|
ccc8587e5f | ||
|
|
53c96193c1 | ||
|
|
d4f11e00b1 | ||
|
|
321233b82c | ||
|
|
eb188051d4 | ||
|
|
a136084e11 | ||
|
|
bc06f3179d | ||
|
|
ee84d971b2 | ||
|
|
264d80ef4c | ||
|
|
dba68187ac | ||
|
|
ca4a1936b3 | ||
|
|
77c8d31a90 | ||
|
|
ab7196f86c | ||
|
|
88b3a66bf9 | ||
|
|
ea77666b4a | ||
|
|
db98e5f39b | ||
|
|
df59c5cb9d | ||
|
|
e786e95358 | ||
|
|
75ada5623c | ||
|
|
ad5c655c45 | ||
|
|
65e607454e | ||
|
|
f238be6003 | ||
|
|
dc31e4c5fa | ||
|
|
665d8cd266 | ||
|
|
8324114e84 | ||
|
|
b83e6ee4ce | ||
|
|
58bab0d310 | ||
|
|
1af51aaaba | ||
|
|
a09327b831 | ||
|
|
16543bf74c | ||
|
|
aa4cd373ac | ||
|
|
351e294362 | ||
|
|
a0c5b1cd9d | ||
|
|
df2ed1e584 | ||
|
|
b354f7a3a5 | ||
|
|
bb53d1e1c6 | ||
|
|
2aabd8d0e1 | ||
|
|
aca97c2c6c | ||
|
|
8e7d959cf4 | ||
|
|
b23f031db9 | ||
|
|
1ba529a9d5 | ||
|
|
3d29c183ef | ||
|
|
8a108b590d | ||
|
|
bca0f67344 | ||
|
|
f3dad51134 | ||
|
|
f51840829c | ||
|
|
aa1c0d0870 | ||
|
|
dee5ee6589 | ||
|
|
b799f479c4 | ||
|
|
b4352fefa5 | ||
|
|
77d06fb60e | ||
|
|
00b647457c | ||
|
|
153d10a35c | ||
|
|
06713c641e | ||
|
|
210978ec2d | ||
|
|
42f7d43139 | ||
|
|
19967f5ad7 | ||
|
|
a1de3eb47d | ||
|
|
e88841bdec | ||
|
|
c8e4915f8e | ||
|
|
a93a3f0598 | ||
|
|
084f81fc8d | ||
|
|
d148f36e87 | ||
|
|
150d9c35b7 | ||
|
|
e11198616e | ||
|
|
2f27f1e6f9 | ||
|
|
5392ca9794 | ||
|
|
46672eb583 | ||
|
|
79653eee80 | ||
|
|
16ad86c52a | ||
|
|
6b7c6be5f5 | ||
|
|
fda1c2cc79 | ||
|
|
ef2fee0ee3 | ||
|
|
e287d0811d | ||
|
|
a7164f3c9f | ||
|
|
c55060039a | ||
|
|
c68d8deddd | ||
|
|
f6eabc5db1 | ||
|
|
72d5884db6 | ||
|
|
3595c89c79 | ||
|
|
9ebbc718c5 | ||
|
|
e862480b86 | ||
|
|
1f3d8fe6f1 | ||
|
|
41ae036ab4 | ||
|
|
588d176b96 | ||
|
|
f8697120a0 | ||
|
|
1a767105e6 | ||
|
|
4067b6ed2c | ||
|
|
b272dbfd1f | ||
|
|
48be7bbf86 | ||
|
|
51e22cea71 | ||
|
|
2241e27e68 | ||
|
|
11c90ae879 | ||
|
|
cf55125202 | ||
|
|
9cefb85905 | ||
|
|
fc672da0e0 | ||
|
|
25b297b142 | ||
|
|
ab03c12fa8 | ||
|
|
3095c805ad | ||
|
|
9c18daafb8 | ||
|
|
16182417fb | ||
|
|
9af35201e4 | ||
|
|
f21b982955 | ||
|
|
b3a20d05c5 | ||
|
|
4cd024a2b2 | ||
|
|
63d08ebfd2 | ||
|
|
c696197b03 | ||
|
|
738a72228b | ||
|
|
90641f4488 | ||
|
|
a4cc7eaf9b | ||
|
|
fdca728fdc | ||
|
|
d2c4ae8cdf | ||
|
|
f3d3ac30a6 | ||
|
|
f8cc4ade8a | ||
|
|
b3975b7bbd | ||
|
|
4f1b61f5bc | ||
|
|
beeb37b4fd | ||
|
|
43aa2bad22 | ||
|
|
1b2ba921bb | ||
|
|
f543da0ea8 | ||
|
|
e60c9efa84 | ||
|
|
c52fc6f240 | ||
|
|
ee136b024a | ||
|
|
699bc5f34b | ||
|
|
9eed8e842c | ||
|
|
6d652a12ee | ||
|
|
e043e06656 | ||
|
|
14b52599f4 | ||
|
|
e3e2c0c567 | ||
|
|
4fc5d60f03 | ||
|
|
c37a9e0f82 | ||
|
|
fed17ebb96 | ||
|
|
eeac678872 | ||
|
|
f9ed0f2df9 | ||
|
|
0ddbacac85 | ||
|
|
f0d28aa5cf | ||
|
|
cc84f6fee4 | ||
|
|
00e6eef369 | ||
|
|
1ae71907bc | ||
|
|
3ff688fba2 | ||
|
|
bb59b3d456 | ||
|
|
186ea74ada | ||
|
|
c108f2dcf3 | ||
|
|
4cca232d83 | ||
|
|
1db511e9cb | ||
|
|
76afa7d3bf | ||
|
|
3513f7b1cd | ||
|
|
4531fe39e6 | ||
|
|
8d276a85bc | ||
|
|
5f0d042bc3 | ||
|
|
408f07e3c4 | ||
|
|
d1a540c90c | ||
|
|
00df8e930e | ||
|
|
3b768b1b77 | ||
|
|
42a7ececa0 | ||
|
|
e8867de049 | ||
|
|
a1dfa3dfa3 | ||
|
|
83917d4f79 | ||
|
|
513cd4f905 | ||
|
|
eda0309df3 | ||
|
|
74e27c028e | ||
|
|
36c06c4089 | ||
|
|
249b8a9cdd | ||
|
|
62bee15f00 | ||
|
|
d8797b69e4 | ||
|
|
084fe72b02 | ||
|
|
c823acaa3f | ||
|
|
e0d6cd9cb7 | ||
|
|
01021e102b | ||
|
|
6de738ff17 | ||
|
|
c0612547eb | ||
|
|
e960907698 | ||
|
|
84e03b1b27 | ||
|
|
f1a80da34b | ||
|
|
5a5a55e556 | ||
|
|
e1f132b45b | ||
|
|
087011aede | ||
|
|
1430bf9b31 | ||
|
|
4e5de817ef | ||
|
|
5d6b94f8c3 | ||
|
|
8dfbe56f42 | ||
|
|
df79d983d7 | ||
|
|
6c23a28826 | ||
|
|
f72289570a | ||
|
|
51bc9dc20e | ||
|
|
e45ac6bfa9 | ||
|
|
44d1cbdb0c | ||
|
|
3fa7e854d3 | ||
|
|
0f5c080be6 | ||
|
|
871009ef8d | ||
|
|
89a565c6c4 | ||
|
|
6ed60e4d5f | ||
|
|
4dab1df3ae | ||
|
|
7b7aa2cdf0 | ||
|
|
d1e1a65f32 | ||
|
|
9d332b4ac5 | ||
|
|
fc2462e281 | ||
|
|
fb2cddabf0 | ||
|
|
33481a9465 | ||
|
|
45de87cb97 | ||
|
|
553c533ada | ||
|
|
634e562b93 | ||
|
|
c789ead590 | ||
|
|
d23560a441 | ||
|
|
9f306966db | ||
|
|
8eea2cef97 | ||
|
|
3abbb89c2e | ||
|
|
76de7d54c7 | ||
|
|
35aee6e066 | ||
|
|
8a0152fe7c | ||
|
|
7769435fc8 | ||
|
|
ae311ea66e | ||
|
|
d15816f162 | ||
|
|
5cbb31c559 | ||
|
|
b7f5b36f2b | ||
|
|
e9dd413de6 | ||
|
|
c5de3d8cc1 | ||
|
|
624c385e27 | ||
|
|
893890c985 | ||
|
|
e12efc8b52 | ||
|
|
015dfa41b1 | ||
|
|
5e72ce5fbe | ||
|
|
236e653176 | ||
|
|
3946c3ba03 | ||
|
|
5bdd39e6eb | ||
|
|
46267c0531 | ||
|
|
7b96eb7704 | ||
|
|
60d5955101 | ||
|
|
587675e488 | ||
|
|
c0ae319c63 | ||
|
|
d810f8d8db | ||
|
|
a88390a546 | ||
|
|
8575e5615d | ||
|
|
5fe9516c09 | ||
|
|
938775789d | ||
|
|
f37bebe51f | ||
|
|
21f14cd3f2 | ||
|
|
4e7baeb2c9 | ||
|
|
d78a682dd8 | ||
|
|
44021b7abc | ||
|
|
7f9e543213 | ||
|
|
c489e5a59b | ||
|
|
8de30db7f9 | ||
|
|
415f0c6774 | ||
|
|
aa9cf195a5 | ||
|
|
6412fc8135 | ||
|
|
8b1ba9a00c | ||
|
|
b02584ef9e | ||
|
|
88cd6bfd1a | ||
|
|
89a3da14c9 | ||
|
|
619d987492 | ||
|
|
744e410a1a | ||
|
|
6e51967993 | ||
|
|
7989ab2e28 | ||
|
|
a3570a568d | ||
|
|
c0cd998836 | ||
|
|
1a452acd1c | ||
|
|
6c54083b08 | ||
|
|
76105ebdaf | ||
|
|
2c452201fa | ||
|
|
9fc7fc73f9 | ||
|
|
e7417e35ba | ||
|
|
19eb4c7cba | ||
|
|
1f39a0c9da | ||
|
|
a480a8893c | ||
|
|
d3a9e341a5 | ||
|
|
95caffe607 | ||
|
|
fdeca61284 | ||
|
|
45dc653191 | ||
|
|
6e48302b7b | ||
|
|
a7918d7b11 | ||
|
|
ec21253af9 | ||
|
|
97cdda8663 | ||
|
|
35ac2b9d71 | ||
|
|
14affd8ffa | ||
|
|
c48409c903 | ||
|
|
227f66f299 | ||
|
|
c2d9bf94d1 | ||
|
|
8f03841161 | ||
|
|
344a1ed912 | ||
|
|
ff71993b31 | ||
|
|
18bb773b2c | ||
|
|
7f905c7940 | ||
|
|
8a4ba6fc56 | ||
|
|
e0b275c01c | ||
|
|
5926db85df | ||
|
|
a4715a10e7 | ||
|
|
de0f5c0fcb | ||
|
|
88d85ce6ac | ||
|
|
131c171d3e | ||
|
|
c5ddef6ef7 | ||
|
|
f36486d006 | ||
|
|
eaf466c5c3 | ||
|
|
b41bcc349e | ||
|
|
825c61bdcd | ||
|
|
f13f49bd71 | ||
|
|
88f5d4544b | ||
|
|
b7432a20f0 | ||
|
|
ab57c85e66 | ||
|
|
c7c41bc0d8 | ||
|
|
c66f9b06a4 | ||
|
|
a3ac7e7ca7 | ||
|
|
187b762f8a | ||
|
|
3a7fbdfa52 | ||
|
|
bbeb46b37f | ||
|
|
5450b9a700 | ||
|
|
353b28647c | ||
|
|
2411f4f870 | ||
|
|
34cd059423 | ||
|
|
4b4ec12c87 | ||
|
|
441147c0dc | ||
|
|
08ceac9f46 | ||
|
|
6bc690b50b | ||
|
|
c41888b338 | ||
|
|
6d3c980b39 | ||
|
|
eb5db176fa | ||
|
|
a77c03fcaf | ||
|
|
04b25011e9 | ||
|
|
96b34aec10 | ||
|
|
d560b46c85 | ||
|
|
94789ce455 | ||
|
|
55797e5873 | ||
|
|
675cab3074 | ||
|
|
8d63ab646e | ||
|
|
37a2fa1cdc | ||
|
|
79ed9650b3 | ||
|
|
b6dc58164e | ||
|
|
f9a974c1fe | ||
|
|
91e672befb | ||
|
|
c14d18b9df | ||
|
|
d2cfc1844c | ||
|
|
2fb3243a1a | ||
|
|
6f3124a84f | ||
|
|
59eb5bd9c4 | ||
|
|
ad2b6bb4f7 | ||
|
|
51d64ee10e | ||
|
|
1159efc6cc | ||
|
|
3cfb76a56f | ||
|
|
7a1ad6688a | ||
|
|
4f4af6e3fd | ||
|
|
2736b79097 | ||
|
|
8da2ffa433 | ||
|
|
5bf3d6dc32 | ||
|
|
30d4380afe | ||
|
|
c8b5aad482 | ||
|
|
fa1d440947 | ||
|
|
a89fc3a276 | ||
|
|
3875f62453 | ||
|
|
23e1447ab6 | ||
|
|
91364c6a7c | ||
|
|
a070493fba | ||
|
|
ca0a6ab72c | ||
|
|
10e0562a82 | ||
|
|
44998a2a2b | ||
|
|
f8e033e560 | ||
|
|
5842da84af | ||
|
|
10cc270170 | ||
|
|
f51bdd56aa | ||
|
|
7f79c49f93 | ||
|
|
e4920255f0 | ||
|
|
f8e8273d39 | ||
|
|
ceddab98d1 | ||
|
|
0faa176791 | ||
|
|
9b0e0ee536 | ||
|
|
881d2756db | ||
|
|
ab5af40999 | ||
|
|
6aeef8dcf9 | ||
|
|
5d62196dde | ||
|
|
7fe27ac310 | ||
|
|
1e300349ce | ||
|
|
6b01a7b08e | ||
|
|
34d948df18 | ||
|
|
27091c4f1d | ||
|
|
970a3d7f2a | ||
|
|
2bedbbfc54 | ||
|
|
8de33f0db3 | ||
|
|
df4eab67dc | ||
|
|
2d13bc0046 | ||
|
|
704a0cb35d | ||
|
|
06a983e445 | ||
|
|
2d10df8b87 | ||
|
|
baf66db9dc | ||
|
|
3545693999 | ||
|
|
84fb99c2c8 | ||
|
|
21e2983ae1 | ||
|
|
004e0941d5 | ||
|
|
188a426dde | ||
|
|
6fcfae8bfe | ||
|
|
9515350f52 | ||
|
|
6b88f82656 | ||
|
|
e42d953f47 | ||
|
|
471a3f63fe | ||
|
|
9c257faf26 | ||
|
|
d18cad6d76 | ||
|
|
061273be22 | ||
|
|
b89de6caad | ||
|
|
9bc0a1d7c7 | ||
|
|
eee518b7fa | ||
|
|
b7b78edaa3 | ||
|
|
3eaac68ab4 | ||
|
|
a7672190e9 | ||
|
|
4e4e89b6eb | ||
|
|
33dd519f56 | ||
|
|
5c1149aa8b | ||
|
|
9061e92af6 | ||
|
|
49f5ceaa9b | ||
|
|
525b309d37 | ||
|
|
bdb3a787fc | ||
|
|
a9601ef848 | ||
|
|
6fc4b045fd | ||
|
|
446852c7d0 | ||
|
|
c635f00899 | ||
|
|
1393629bc5 | ||
|
|
49ce17b759 | ||
|
|
74888021c8 | ||
|
|
9483856439 | ||
|
|
dbbc7c8132 | ||
|
|
5d9c99508d | ||
|
|
e4d5b56951 | ||
|
|
e31238d121 | ||
|
|
a283d99e1b | ||
|
|
8cb621ebed | ||
|
|
b203d165f5 | ||
|
|
c695e1136d | ||
|
|
d847202308 | ||
|
|
9d24e0a4b8 | ||
|
|
745fe1eb9f | ||
|
|
3e52b85e9d | ||
|
|
d26f020b50 | ||
|
|
095b5f702c | ||
|
|
9622853b8d | ||
|
|
b94e75fabe | ||
|
|
23dd041e2e | ||
|
|
c73ca21b4d | ||
|
|
7ebdc85ad2 | ||
|
|
99e2c8c90d | ||
|
|
f24bfdf945 | ||
|
|
7963233702 | ||
|
|
b550df2038 | ||
|
|
fb55624f5f | ||
|
|
e96366f489 | ||
|
|
900190cf54 | ||
|
|
2af744b474 | ||
|
|
11153084e2 | ||
|
|
4b9c814bfc | ||
|
|
1b5d3a3b6e | ||
|
|
373637e6a7 | ||
|
|
cb223217c1 | ||
|
|
eab97a3f8b | ||
|
|
fdfaa8ab45 | ||
|
|
2321d9dbee | ||
|
|
bf74dd887a | ||
|
|
3612035894 | ||
|
|
6553e9b0cd | ||
|
|
09f2410170 | ||
|
|
fb73d8c0a7 | ||
|
|
f2b0368078 | ||
|
|
02301ecf6d | ||
|
|
bcb61ee3e4 | ||
|
|
6a824429d0 | ||
|
|
6a52f9097f | ||
|
|
3fa85fc516 | ||
|
|
498d132d8a | ||
|
|
b6340f1458 | ||
|
|
4afc7ae2e4 | ||
|
|
95975a4c33 | ||
|
|
198a0673a2 | ||
|
|
94d1503c64 | ||
|
|
5f13e245f0 | ||
|
|
2715ae645d | ||
|
|
0299232cb5 | ||
|
|
93e181cfd9 | ||
|
|
8072eb1212 | ||
|
|
226ce340f2 | ||
|
|
ab5535eba3 | ||
|
|
8da2499495 | ||
|
|
c057ffb153 | ||
|
|
fe13d29549 | ||
|
|
225aaab4f4 | ||
|
|
14c973558b | ||
|
|
f515ff36b6 | ||
|
|
ed57cfd2f9 | ||
|
|
d69559e9c7 | ||
|
|
396717efd1 | ||
|
|
284fa24464 | ||
|
|
0a20b9179a | ||
|
|
a0d5037885 | ||
|
|
4477833b1d | ||
|
|
e0a5ed4bdb | ||
|
|
46d4b26eba | ||
|
|
f7c6edf849 | ||
|
|
6c157def4b | ||
|
|
741712f89f | ||
|
|
4f749da628 | ||
|
|
8b02795d69 | ||
|
|
f8a0069a5d | ||
|
|
388bbe047e | ||
|
|
2d4ce30e58 | ||
|
|
d4fb17cf66 | ||
|
|
372a2c8375 | ||
|
|
8f27121f21 | ||
|
|
b986bf675f | ||
|
|
80718a461b | ||
|
|
2d14ecf58b | ||
|
|
32aa9877c0 | ||
|
|
cee3a9c11d | ||
|
|
b78643f2d8 | ||
|
|
30a31de45b | ||
|
|
e4c4db534c | ||
|
|
4f7a0d3ad0 | ||
|
|
ea3a96eead | ||
|
|
c0e01ac132 | ||
|
|
28ea50bca4 | ||
|
|
619e44b30f | ||
|
|
d678275dee | ||
|
|
08eed53595 | ||
|
|
ff40f7d262 | ||
|
|
3c1ae51dae | ||
|
|
8a59fc9aaf | ||
|
|
b7ea9dd841 | ||
|
|
0320db6b4a | ||
|
|
18c7f58093 | ||
|
|
7f6f7b1436 | ||
|
|
ca49176b97 | ||
|
|
67be0cd9d7 | ||
|
|
902fadef83 | ||
|
|
2777b6411f | ||
|
|
e3235d7f04 | ||
|
|
dc89c8858e | ||
|
|
d28a278d63 | ||
|
|
7080c2abd0 | ||
|
|
43f86bcab8 | ||
|
|
7d9c875c4d | ||
|
|
ed4099eb12 | ||
|
|
632b83f75d | ||
|
|
66c92637f9 | ||
|
|
6bcaebc471 | ||
|
|
7feee1027c | ||
|
|
4fd8c77fa9 | ||
|
|
bc3bb2d323 | ||
|
|
7764684c68 | ||
|
|
feb2c38572 | ||
|
|
6a97e82a2e | ||
|
|
57c0d19cde | ||
|
|
e14ed89f3d | ||
|
|
6a14893a23 | ||
|
|
0f19735d33 | ||
|
|
6a859ac65d | ||
|
|
4d9c24244c | ||
|
|
cdeb288c4f | ||
|
|
7f8016497b | ||
|
|
ec01225225 | ||
|
|
4e23b84999 | ||
|
|
c7c143fed9 | ||
|
|
4932171834 | ||
|
|
00fc6c8e2f | ||
|
|
1b73dab341 | ||
|
|
99e99c1ccd | ||
|
|
14ce479ab7 | ||
|
|
e7e873a1b2 | ||
|
|
5a57428ec0 | ||
|
|
18e8e6ee98 | ||
|
|
6bf6e51740 | ||
|
|
b52a899a18 | ||
|
|
cf722a7120 | ||
|
|
33cd1f3db8 | ||
|
|
b7d2715ffd | ||
|
|
fe63466d67 | ||
|
|
0fb63eea99 | ||
|
|
59552804f6 | ||
|
|
579d6d73f8 | ||
|
|
f83e3022ba | ||
|
|
bf93bbf5f8 | ||
|
|
fa2a52ff41 | ||
|
|
813d9eace3 | ||
|
|
16e8dd447b | ||
|
|
0129b31dec | ||
|
|
9544960314 | ||
|
|
23a9ca3140 | ||
|
|
dfa4bb6ca4 | ||
|
|
8aac3320be | ||
|
|
42ddb0d5ed | ||
|
|
f60728b537 | ||
|
|
9e6f9d50ef | ||
|
|
d76a473324 | ||
|
|
5a2d2b86d5 | ||
|
|
42e8f481d6 | ||
|
|
9ecf652745 | ||
|
|
8748314b5b | ||
|
|
606923b374 | ||
|
|
3ae830a637 | ||
|
|
92df1ecd5f | ||
|
|
b9d019561f | ||
|
|
48a341b79c | ||
|
|
d1ea8517d2 | ||
|
|
cd23844e35 | ||
|
|
3f24af3a99 | ||
|
|
78421683fe | ||
|
|
61bfbdb26e | ||
|
|
a7a5c41370 | ||
|
|
f0b4c13200 | ||
|
|
b01e97b168 | ||
|
|
7da10795a1 | ||
|
|
959bbdcf1b | ||
|
|
970e31a1b1 | ||
|
|
2dae2b7984 | ||
|
|
d6ac9a0363 | ||
|
|
18336c2b8e | ||
|
|
fc02d32ac3 | ||
|
|
891d5d3c35 | ||
|
|
17dab3df34 | ||
|
|
37c7251ec9 | ||
|
|
a22393075f | ||
|
|
ca3d088785 | ||
|
|
8775b65f58 | ||
|
|
c1204a5efb | ||
|
|
941db572c7 | ||
|
|
addb7ca95d | ||
|
|
5a930930da | ||
|
|
df877e87bf | ||
|
|
adb200d885 | ||
|
|
4f0840b8b8 | ||
|
|
c37ea5c890 | ||
|
|
cac9366c86 | ||
|
|
1fc28f8cd1 | ||
|
|
2109643ec0 | ||
|
|
3caf5a5a6d | ||
|
|
418c1256a8 | ||
|
|
3ae7ba89e8 | ||
|
|
191bb9d916 | ||
|
|
f1d8fadabf | ||
|
|
c898732739 | ||
|
|
6e4f54eaf6 | ||
|
|
4ba2963cfe | ||
|
|
20e6209036 | ||
|
|
44a5433af3 | ||
|
|
0f6f13ed3e | ||
|
|
0ae75634b1 | ||
|
|
9458ea81da | ||
|
|
9f539c1d08 | ||
|
|
37b03c7d5a | ||
|
|
3009e6dcfd | ||
|
|
70343b17bc | ||
|
|
3f6027ec2c | ||
|
|
5706732c55 | ||
|
|
ad633a9dd0 | ||
|
|
7c56df1237 | ||
|
|
c1110f2897 | ||
|
|
647642578b | ||
|
|
5ca4efac31 | ||
|
|
deddf12b70 | ||
|
|
109a7c12ea | ||
|
|
f955031e27 | ||
|
|
6a69c6cd02 | ||
|
|
c1d4ac45ab | ||
|
|
2946a35b0e | ||
|
|
c73a8cb18f | ||
|
|
574a093038 | ||
|
|
4eb1fe0b80 | ||
|
|
4c46cd725b | ||
|
|
fdbd0e1fba | ||
|
|
5801168ce0 | ||
|
|
d4c7551087 | ||
|
|
c9daf0c9f5 | ||
|
|
8cb566b031 | ||
|
|
089b16326e | ||
|
|
c0e43cc0e5 | ||
|
|
514c3539e6 | ||
|
|
62c94a178c | ||
|
|
9fee38b18d | ||
|
|
cd3114d39e | ||
|
|
12b4e15812 | ||
|
|
35336a0827 | ||
|
|
ece7cbfec3 | ||
|
|
a00075ee0d | ||
|
|
b6c4e28b4d | ||
|
|
8b55cce39a | ||
|
|
1db216e6ec | ||
|
|
20142f91fd | ||
|
|
b34be141b7 | ||
|
|
8ea8dcde19 | ||
|
|
b205e2a84f |
@@ -2,11 +2,12 @@
|
||||
"version": 1,
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"dotnet-format": {
|
||||
"version": "4.1.131201",
|
||||
"csharpier": {
|
||||
"version": "1.2.1",
|
||||
"commands": [
|
||||
"dotnet-format"
|
||||
]
|
||||
"csharpier"
|
||||
],
|
||||
"rollForward": false
|
||||
}
|
||||
}
|
||||
}
|
||||
7
.copilot-agent.yml
Normal file
7
.copilot-agent.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
enabled: true
|
||||
agent:
|
||||
name: copilot-coding-agent
|
||||
allow:
|
||||
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
|
||||
actions: ["create", "modify"]
|
||||
require_review_before_merge: true
|
||||
@@ -70,7 +70,7 @@ indent_style = tab
|
||||
|
||||
[*.{cs,csx,cake,vb,vbx}]
|
||||
# Default Severity for all .NET Code Style rules below
|
||||
dotnet_analyzer_diagnostic.severity = warning
|
||||
dotnet_analyzer_diagnostic.severity = silent
|
||||
|
||||
##########################################
|
||||
# File Header (Uncomment to support file headers)
|
||||
@@ -195,8 +195,6 @@ csharp_prefer_static_local_function = true:warning
|
||||
csharp_prefer_simple_using_statement = true:suggestion
|
||||
dotnet_diagnostic.IDE0063.severity = suggestion
|
||||
|
||||
csharp_style_namespace_declarations = file_scoped
|
||||
|
||||
##########################################
|
||||
# .NET Formatting Conventions
|
||||
# https://docs.microsoft.com/visualstudio/ide/editorconfig-code-style-settings-reference#formatting-conventions
|
||||
@@ -251,6 +249,8 @@ csharp_space_between_square_brackets = false
|
||||
csharp_preserve_single_line_statements = false
|
||||
csharp_preserve_single_line_blocks = true
|
||||
|
||||
csharp_style_namespace_declarations = file_scoped
|
||||
|
||||
##########################################
|
||||
# .NET Naming Conventions
|
||||
# https://docs.microsoft.com/visualstudio/ide/editorconfig-naming-conventions
|
||||
@@ -260,13 +260,17 @@ csharp_preserve_single_line_blocks = true
|
||||
dotnet_diagnostic.CA1000.severity = suggestion
|
||||
dotnet_diagnostic.CA1001.severity = error
|
||||
dotnet_diagnostic.CA1018.severity = error
|
||||
dotnet_diagnostic.CA1036.severity = silent
|
||||
dotnet_diagnostic.CA1051.severity = suggestion
|
||||
dotnet_diagnostic.CA1068.severity = error
|
||||
dotnet_diagnostic.CA1069.severity = error
|
||||
dotnet_diagnostic.CA1304.severity = error
|
||||
dotnet_diagnostic.CA1305.severity = suggestion
|
||||
dotnet_diagnostic.CA1307.severity = suggestion
|
||||
dotnet_diagnostic.CA1309.severity = suggestion
|
||||
dotnet_diagnostic.CA1310.severity = error
|
||||
dotnet_diagnostic.CA1507.severity = suggestion
|
||||
dotnet_diagnostic.CA1513.severity = suggestion
|
||||
dotnet_diagnostic.CA1707.severity = suggestion
|
||||
dotnet_diagnostic.CA1708.severity = suggestion
|
||||
dotnet_diagnostic.CA1711.severity = suggestion
|
||||
@@ -283,6 +287,8 @@ dotnet_diagnostic.CA1829.severity = suggestion
|
||||
dotnet_diagnostic.CA1834.severity = error
|
||||
dotnet_diagnostic.CA1845.severity = suggestion
|
||||
dotnet_diagnostic.CA1848.severity = suggestion
|
||||
dotnet_diagnostic.CA1852.severity = suggestion
|
||||
dotnet_diagnostic.CA1860.severity = silent
|
||||
dotnet_diagnostic.CA2016.severity = suggestion
|
||||
dotnet_diagnostic.CA2201.severity = error
|
||||
dotnet_diagnostic.CA2206.severity = error
|
||||
@@ -295,11 +301,12 @@ dotnet_diagnostic.CA2254.severity = suggestion
|
||||
|
||||
dotnet_diagnostic.CS0169.severity = error
|
||||
dotnet_diagnostic.CS0219.severity = error
|
||||
dotnet_diagnostic.CS0649.severity = suggestion
|
||||
dotnet_diagnostic.CS1998.severity = error
|
||||
dotnet_diagnostic.CS8602.severity = error
|
||||
dotnet_diagnostic.CS8604.severity = error
|
||||
dotnet_diagnostic.CS8618.severity = error
|
||||
dotnet_diagnostic.CS0618.severity = error
|
||||
dotnet_diagnostic.CS0618.severity = suggestion
|
||||
dotnet_diagnostic.CS1998.severity = error
|
||||
dotnet_diagnostic.CS4014.severity = error
|
||||
dotnet_diagnostic.CS8600.severity = error
|
||||
@@ -310,38 +317,56 @@ dotnet_diagnostic.BL0005.severity = suggestion
|
||||
|
||||
dotnet_diagnostic.MVC1000.severity = suggestion
|
||||
|
||||
dotnet_diagnostic.IDE0055.severity = suggestion # Fix formatting
|
||||
dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operators
|
||||
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
|
||||
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
|
||||
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
|
||||
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
|
||||
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
|
||||
dotnet_diagnostic.IDE0007.severity = error # Use var
|
||||
dotnet_diagnostic.IDE0160.severity = suggestion # Use block scoped
|
||||
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
|
||||
dotnet_diagnostic.IDE0057.severity = suggestion # substring can be simplified
|
||||
dotnet_diagnostic.RZ10012.severity = error
|
||||
|
||||
dotnet_diagnostic.IDE0004.severity = error # redundant cast
|
||||
dotnet_diagnostic.IDE0005.severity = suggestion
|
||||
dotnet_diagnostic.IDE0007.severity = error # Use var
|
||||
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
|
||||
dotnet_diagnostic.IDE0010.severity = silent # populate switch
|
||||
dotnet_diagnostic.IDE0017.severity = suggestion # initialization can be simplified
|
||||
dotnet_diagnostic.IDE0021.severity = silent # expression body for constructors
|
||||
dotnet_diagnostic.IDE0022.severity = silent # expression body for methods
|
||||
dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operators
|
||||
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
|
||||
dotnet_diagnostic.IDE0028.severity = silent
|
||||
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
|
||||
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
|
||||
dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
|
||||
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
|
||||
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
|
||||
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
|
||||
dotnet_diagnostic.IDE0040.severity = error # modifiers required
|
||||
dotnet_diagnostic.IDE0041.severity = error # simplify null
|
||||
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
|
||||
dotnet_diagnostic.IDE0044.severity = error # make field only when possible
|
||||
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
|
||||
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
|
||||
dotnet_diagnostic.IDE0047.severity = suggestion # parameter name
|
||||
dotnet_diagnostic.IDE0051.severity = error # unused field
|
||||
dotnet_diagnostic.IDE0052.severity = error # unused member
|
||||
dotnet_diagnostic.IDE0053.severity = suggestion # lambda not needed
|
||||
dotnet_diagnostic.IDE0055.severity = suggestion # Fix formatting
|
||||
dotnet_diagnostic.IDE0057.severity = suggestion # substring can be simplified
|
||||
dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters
|
||||
dotnet_diagnostic.IDE0061.severity = suggestion # local expression body
|
||||
dotnet_diagnostic.IDE0062.severity = suggestion # local to static
|
||||
dotnet_diagnostic.IDE0063.severity = error # simplify using
|
||||
dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
|
||||
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
|
||||
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
|
||||
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
|
||||
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
|
||||
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
|
||||
dotnet_diagnostic.IDE0161.severity = suggestion # Please use file namespaces
|
||||
dotnet_diagnostic.IDE0161.severity = error # Please use file namespaces
|
||||
dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
|
||||
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
|
||||
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
|
||||
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
|
||||
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
|
||||
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
|
||||
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
|
||||
|
||||
dotnet_diagnostic.NX0001.severity = error
|
||||
dotnet_diagnostic.NX0002.severity = silent
|
||||
dotnet_diagnostic.NX0003.severity = silent
|
||||
|
||||
##########################################
|
||||
# Styles
|
||||
|
||||
15
.github/COPILOT_AGENT_README.md
vendored
Normal file
15
.github/COPILOT_AGENT_README.md
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copilot Coding Agent Configuration
|
||||
|
||||
This repository includes a minimal opt-in configuration and CI workflow to allow the GitHub Copilot coding agent to open and validate PRs.
|
||||
|
||||
- .copilot-agent.yml: opt-in config for automated agents
|
||||
- .github/agents/copilot-agent.yml: detailed agent policy configuration
|
||||
- .github/workflows/dotnetcore.yml: CI runs on PRs touching the solution, source, or tests to validate changes
|
||||
- AGENTS.md: general instructions for Copilot coding agent with project-specific guidelines
|
||||
|
||||
Maintainers can adjust the allowed paths or disable the agent by editing or removing .copilot-agent.yml.
|
||||
|
||||
Notes:
|
||||
- The agent can create, modify, and delete files within the allowed paths (src, tests, README.md, AGENTS.md)
|
||||
- All changes require review before merge
|
||||
- If build/test paths are different, update the workflow accordingly; this workflow targets SharpCompress.sln and the SharpCompress.Test test project.
|
||||
17
.github/agents/copilot-agent.yml
vendored
Normal file
17
.github/agents/copilot-agent.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
enabled: true
|
||||
agent:
|
||||
name: copilot-coding-agent
|
||||
allow:
|
||||
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
|
||||
actions: ["create", "modify", "delete"]
|
||||
require_review_before_merge: true
|
||||
required_approvals: 1
|
||||
allowed_merge_strategies:
|
||||
- squash
|
||||
- merge
|
||||
auto_merge_on_green: false
|
||||
run_workflows: true
|
||||
notes: |
|
||||
- This manifest expresses the policy for the Copilot coding agent in this repository.
|
||||
- It does NOT install or authorize the agent; a repository admin must install the Copilot coding agent app and grant the repository the necessary permissions (contents: write, pull_requests: write, checks: write, actions: write/read, issues: write) to allow the agent to act.
|
||||
- Keep allow paths narrow and prefer require_review_before_merge during initial rollout.
|
||||
13
.github/dependabot.yml
vendored
13
.github/dependabot.yml
vendored
@@ -1,6 +1,13 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions" # search for actions - there are other options available
|
||||
directory: "/" # search in .github/workflows under root `/`
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly" # check for action update every week
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "nuget"
|
||||
directory: "/" # change to "/src/YourProject" if .csproj files are in subfolders
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 5
|
||||
# optional: target-branch: "master"
|
||||
|
||||
17
.github/workflows/dotnetcore.yml
vendored
17
.github/workflows/dotnetcore.yml
vendored
@@ -1,5 +1,10 @@
|
||||
name: SharpCompress
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types: [ opened, synchronize, reopened, ready_for_review ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -9,12 +14,12 @@ jobs:
|
||||
os: [windows-latest, ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-dotnet@v2
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: 6.0.x
|
||||
- run: dotnet run -p build/build.csproj
|
||||
- uses: actions/upload-artifact@v3
|
||||
dotnet-version: 10.0.x
|
||||
- run: dotnet run --project build/build.csproj
|
||||
- uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-sharpcompress.nupkg
|
||||
path: artifacts/*
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -11,11 +11,11 @@ TestResults/
|
||||
packages/*/
|
||||
project.lock.json
|
||||
tests/TestArchives/Scratch
|
||||
tests/TestArchives/*/Scratch
|
||||
tests/TestArchives/*/Scratch2
|
||||
.vs
|
||||
tools
|
||||
.vscode
|
||||
.idea/
|
||||
|
||||
.DS_Store
|
||||
*.snupkg
|
||||
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch
|
||||
|
||||
9
.vscode/extensions.json
vendored
Normal file
9
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-dotnettools.csdevkit",
|
||||
"ms-dotnettools.csharp",
|
||||
"ms-dotnettools.vscode-dotnet-runtime",
|
||||
"csharpier.csharpier-vscode",
|
||||
"formulahendry.dotnet-test-explorer"
|
||||
]
|
||||
}
|
||||
97
.vscode/launch.json
vendored
Normal file
97
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug Tests (net10.0)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net10.0",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Debug Specific Test (net10.0)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net10.0",
|
||||
"--no-build",
|
||||
"--filter",
|
||||
"FullyQualifiedName~${input:testName}"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Debug Performance Tests",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
|
||||
"--no-build"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Debug Build Script",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/build/build.csproj",
|
||||
"--",
|
||||
"${input:buildTarget}"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
}
|
||||
],
|
||||
"inputs": [
|
||||
{
|
||||
"id": "testName",
|
||||
"type": "promptString",
|
||||
"description": "Enter test name or pattern (e.g., TestMethodName or ClassName)",
|
||||
"default": ""
|
||||
},
|
||||
{
|
||||
"id": "buildTarget",
|
||||
"type": "pickString",
|
||||
"description": "Select build target",
|
||||
"options": [
|
||||
"clean",
|
||||
"restore",
|
||||
"build",
|
||||
"test",
|
||||
"format",
|
||||
"publish",
|
||||
"default"
|
||||
],
|
||||
"default": "build"
|
||||
}
|
||||
]
|
||||
}
|
||||
29
.vscode/settings.json
vendored
Normal file
29
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"dotnet.defaultSolution": "SharpCompress.sln",
|
||||
"files.exclude": {
|
||||
"**/bin": true,
|
||||
"**/obj": true
|
||||
},
|
||||
"files.watcherExclude": {
|
||||
"**/bin/**": true,
|
||||
"**/obj/**": true,
|
||||
"**/artifacts/**": true
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/bin": true,
|
||||
"**/obj": true,
|
||||
"**/artifacts": true
|
||||
},
|
||||
"editor.formatOnSave": false,
|
||||
"[csharp]": {
|
||||
"editor.defaultFormatter": "csharpier.csharpier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": "explicit"
|
||||
}
|
||||
},
|
||||
"csharpier.enableDebugLogs": false,
|
||||
"omnisharp.enableRoslynAnalyzers": true,
|
||||
"omnisharp.enableEditorConfigSupport": true,
|
||||
"dotnet-test-explorer.testProjectPath": "tests/**/*.csproj"
|
||||
}
|
||||
178
.vscode/tasks.json
vendored
Normal file
178
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "build",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/SharpCompress.sln",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary;ForceNoAlign"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "build-release",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/SharpCompress.sln",
|
||||
"-c",
|
||||
"Release",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary;ForceNoAlign"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "build-library",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary;ForceNoAlign"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "restore",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"restore",
|
||||
"${workspaceFolder}/SharpCompress.sln"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "clean",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"clean",
|
||||
"${workspaceFolder}/SharpCompress.sln"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "test",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"dependsOn": "build"
|
||||
},
|
||||
{
|
||||
"label": "test-net10",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net10.0",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "test",
|
||||
"dependsOn": "build"
|
||||
},
|
||||
{
|
||||
"label": "test-net48",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net48",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "test",
|
||||
"dependsOn": "build"
|
||||
},
|
||||
{
|
||||
"label": "format",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"csharpier",
|
||||
"."
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "format-check",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"csharpier",
|
||||
"check",
|
||||
"."
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "run-build-script",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/build/build.csproj"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "pack",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"pack",
|
||||
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
|
||||
"-c",
|
||||
"Release",
|
||||
"-o",
|
||||
"${workspaceFolder}/artifacts/"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"dependsOn": "build-release"
|
||||
},
|
||||
{
|
||||
"label": "performance-tests",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
|
||||
"-c",
|
||||
"Release"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
}
|
||||
]
|
||||
}
|
||||
157
AGENTS.md
Normal file
157
AGENTS.md
Normal file
@@ -0,0 +1,157 @@
|
||||
---
|
||||
description: 'Guidelines for building SharpCompress - A C# compression library'
|
||||
applyTo: '**/*.cs'
|
||||
---
|
||||
|
||||
# SharpCompress Development
|
||||
|
||||
## About SharpCompress
|
||||
SharpCompress is a pure C# compression library supporting multiple archive formats (Zip, Tar, GZip, BZip2, 7Zip, Rar, LZip, XZ, ZStandard) for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0. The library provides both seekable Archive APIs and forward-only Reader/Writer APIs for streaming scenarios.
|
||||
|
||||
## C# Instructions
|
||||
- Always use the latest version C#, currently C# 13 features.
|
||||
- Write clear and concise comments for each function.
|
||||
- Follow the existing code style and patterns in the codebase.
|
||||
|
||||
## General Instructions
|
||||
- Make only high confidence suggestions when reviewing code changes.
|
||||
- Write code with good maintainability practices, including comments on why certain design decisions were made.
|
||||
- Handle edge cases and write clear exception handling.
|
||||
- For libraries or external dependencies, mention their usage and purpose in comments.
|
||||
- Preserve backward compatibility when making changes to public APIs.
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
- Follow PascalCase for component names, method names, and public members.
|
||||
- Use camelCase for private fields and local variables.
|
||||
- Prefix interface names with "I" (e.g., IUserService).
|
||||
|
||||
## Code Formatting
|
||||
|
||||
- Use CSharpier for code formatting to ensure consistent style across the project
|
||||
- CSharpier is configured as a local tool in `.config/dotnet-tools.json`
|
||||
- Restore tools with: `dotnet tool restore`
|
||||
- Format files from the project root with: `dotnet csharpier .`
|
||||
- **Run `dotnet csharpier .` from the project root after making code changes before committing**
|
||||
- Configure your IDE to format on save using CSharpier for the best experience
|
||||
- The project also uses `.editorconfig` for editor settings (indentation, encoding, etc.)
|
||||
- Let CSharpier handle code style while `.editorconfig` handles editor behavior
|
||||
|
||||
## Project Setup and Structure
|
||||
|
||||
- The project targets multiple frameworks: .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0
|
||||
- Main library is in `src/SharpCompress/`
|
||||
- Tests are in `tests/SharpCompress.Test/`
|
||||
- Performance tests are in `tests/SharpCompress.Performance/`
|
||||
- Test archives are in `tests/TestArchives/`
|
||||
- Build project is in `build/`
|
||||
- Use `dotnet build` to build the solution
|
||||
- Use `dotnet test` to run tests
|
||||
- Solution file: `SharpCompress.sln`
|
||||
|
||||
### Directory Structure
|
||||
```
|
||||
src/SharpCompress/
|
||||
├── Archives/ # IArchive implementations (Zip, Tar, Rar, 7Zip, GZip)
|
||||
├── Readers/ # IReader implementations (forward-only)
|
||||
├── Writers/ # IWriter implementations (forward-only)
|
||||
├── Compressors/ # Low-level compression streams (BZip2, Deflate, LZMA, etc.)
|
||||
├── Factories/ # Format detection and factory pattern
|
||||
├── Common/ # Shared types (ArchiveType, Entry, Options)
|
||||
├── Crypto/ # Encryption implementations
|
||||
└── IO/ # Stream utilities and wrappers
|
||||
|
||||
tests/SharpCompress.Test/
|
||||
├── Zip/, Tar/, Rar/, SevenZip/, GZip/, BZip2/ # Format-specific tests
|
||||
├── TestBase.cs # Base test class with helper methods
|
||||
└── TestArchives/ # Test data (not checked into main test project)
|
||||
```
|
||||
|
||||
### Factory Pattern
|
||||
All format types implement factory interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) for auto-detection:
|
||||
- `ReaderFactory.Open()` - Auto-detects format by probing stream
|
||||
- `WriterFactory.Open()` - Creates writer for specified `ArchiveType`
|
||||
- Factories located in: `src/SharpCompress/Factories/`
|
||||
|
||||
## Nullable Reference Types
|
||||
|
||||
- Declare variables non-nullable, and check for `null` at entry points.
|
||||
- Always use `is null` or `is not null` instead of `== null` or `!= null`.
|
||||
- Trust the C# null annotations and don't add null checks when the type system says a value cannot be null.
|
||||
|
||||
## SharpCompress-Specific Guidelines
|
||||
|
||||
### Supported Formats
|
||||
SharpCompress supports multiple archive and compression formats:
|
||||
- **Archive Formats**: Zip, Tar, 7Zip, Rar (read-only)
|
||||
- **Compression**: DEFLATE, BZip2, LZMA/LZMA2, PPMd, ZStandard (decompress only), Deflate64 (decompress only)
|
||||
- **Combined Formats**: Tar.GZip, Tar.BZip2, Tar.LZip, Tar.XZ, Tar.ZStandard
|
||||
- See FORMATS.md for complete format support matrix
|
||||
|
||||
### Stream Handling Rules
|
||||
- **Disposal**: As of version 0.21, SharpCompress closes wrapped streams by default
|
||||
- Use `ReaderOptions` or `WriterOptions` with `LeaveStreamOpen = true` to control stream disposal
|
||||
- Use `NonDisposingStream` wrapper when working with compression streams directly to prevent disposal
|
||||
- Always dispose of readers, writers, and archives in `using` blocks
|
||||
- For forward-only operations, use Reader/Writer APIs; for random access, use Archive APIs
|
||||
|
||||
### Async/Await Patterns
|
||||
- All I/O operations support async/await with `CancellationToken`
|
||||
- Async methods follow the naming convention: `MethodNameAsync`
|
||||
- Key async methods:
|
||||
- `WriteEntryToAsync` - Extract entry asynchronously
|
||||
- `WriteAllToDirectoryAsync` - Extract all entries asynchronously
|
||||
- `WriteAsync` - Write entry asynchronously
|
||||
- `WriteAllAsync` - Write directory asynchronously
|
||||
- `OpenEntryStreamAsync` - Open entry stream asynchronously
|
||||
- Always provide `CancellationToken` parameter in async methods
|
||||
|
||||
### Archive APIs vs Reader/Writer APIs
|
||||
- **Archive API**: Use for random access with seekable streams (e.g., `ZipArchive`, `TarArchive`)
|
||||
- **Reader API**: Use for forward-only reading on non-seekable streams (e.g., `ZipReader`, `TarReader`)
|
||||
- **Writer API**: Use for forward-only writing on streams (e.g., `ZipWriter`, `TarWriter`)
|
||||
- 7Zip only supports Archive API due to format limitations
|
||||
|
||||
### Tar-Specific Considerations
|
||||
- Tar format requires file size in the header
|
||||
- If no size is specified to TarWriter and the stream is not seekable, an exception will be thrown
|
||||
- Tar combined with compression (GZip, BZip2, LZip, XZ) is supported
|
||||
|
||||
### Zip-Specific Considerations
|
||||
- Supports Zip64 for large files (seekable streams only)
|
||||
- Supports PKWare and WinZip AES encryption
|
||||
- Multiple compression methods: None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA, PPMd
|
||||
- Encrypted LZMA is not supported
|
||||
|
||||
### Performance Considerations
|
||||
- For large files, use Reader/Writer APIs with non-seekable streams to avoid loading entire file in memory
|
||||
- Leverage async I/O for better scalability
|
||||
- Consider compression level trade-offs (speed vs. size)
|
||||
- Use appropriate buffer sizes for stream operations
|
||||
|
||||
## Testing
|
||||
|
||||
- Always include test cases for critical paths of the application.
|
||||
- Test with multiple archive formats when making changes to core functionality.
|
||||
- Include tests for both Archive and Reader/Writer APIs when applicable.
|
||||
- Test async operations with cancellation tokens.
|
||||
- Do not emit "Act", "Arrange" or "Assert" comments.
|
||||
- Copy existing style in nearby files for test method names and capitalization.
|
||||
- Use test archives from `tests/TestArchives` directory for consistency.
|
||||
- Test stream disposal and `LeaveStreamOpen` behavior.
|
||||
- Test edge cases: empty archives, large files, corrupted archives, encrypted archives.
|
||||
|
||||
### Test Organization
|
||||
- Base class: `TestBase` - Provides `TEST_ARCHIVES_PATH`, `SCRATCH_FILES_PATH`, temp directory management
|
||||
- Framework: xUnit with AwesomeAssertions
|
||||
- Test archives: `tests/TestArchives/` - Use existing archives, don't create new ones unnecessarily
|
||||
- Match naming style of nearby test files
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Don't mix Archive and Reader APIs** - Archive needs seekable stream, Reader doesn't
|
||||
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
|
||||
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
|
||||
4. **Tar + non-seekable stream** - Must provide file size or it will throw
|
||||
5. **Multi-framework differences** - Some features differ between .NET Framework and modern .NET (e.g., Mono.Posix)
|
||||
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files
|
||||
16
Directory.Build.props
Normal file
16
Directory.Build.props
Normal file
@@ -0,0 +1,16 @@
|
||||
<Project>
|
||||
<PropertyGroup>
|
||||
<LangVersion>latest</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<AnalysisMode>Recommended</AnalysisMode>
|
||||
<WarningsAsErrors>true</WarningsAsErrors>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<CodeAnalysisTreatWarningsAsErrors>true</CodeAnalysisTreatWarningsAsErrors>
|
||||
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
|
||||
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
|
||||
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
|
||||
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
|
||||
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
|
||||
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
21
Directory.Packages.props
Normal file
21
Directory.Packages.props
Normal file
@@ -0,0 +1,21 @@
|
||||
<Project>
|
||||
<ItemGroup>
|
||||
<PackageVersion Include="Bullseye" Version="6.0.0" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
|
||||
<PackageVersion Include="Glob" Version="1.1.9" />
|
||||
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
|
||||
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
|
||||
<PackageVersion Include="SimpleExec" Version="12.0.0" />
|
||||
<PackageVersion Include="System.Buffers" Version="4.6.1" />
|
||||
<PackageVersion Include="System.Memory" Version="4.6.3" />
|
||||
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
|
||||
<PackageVersion Include="xunit" Version="2.9.3" />
|
||||
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
|
||||
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
|
||||
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="10.0.0" />
|
||||
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
11
FORMATS.md
11
FORMATS.md
@@ -11,21 +11,27 @@
|
||||
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
|
||||
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
|
||||
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
|
||||
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
|
||||
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
|
||||
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.Zstandard | ZStandard | Decompress | TarArchive | TarReader | N/A |
|
||||
| Tar.LZip | LZMA | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
|
||||
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
|
||||
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
|
||||
|
||||
1. SOLID Rars are only supported in the RarReader API.
|
||||
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.
|
||||
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading. See [Zip Format Notes](#zip-format-notes) for details on multi-volume archives and streaming behavior.
|
||||
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
|
||||
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
|
||||
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
|
||||
|
||||
### Zip Format Notes
|
||||
|
||||
- Multi-volume/split ZIP archives require ZipArchive (seekable streams) as ZipReader cannot seek across volume files.
|
||||
- ZipReader processes entries from LocalEntry headers (which include directory entries ending with `/`) and intentionally skips DirectoryEntry headers from the central directory, as they are redundant in streaming mode - all entry data comes from LocalEntry headers which ZipReader has already processed.
|
||||
|
||||
## Compression Streams
|
||||
|
||||
For those who want to directly compress/decompress bits. The single file formats are represented here as well. However, BZip2, LZip and XZ have no metadata (GZip has a little) so using them without something like a Tar file makes little sense.
|
||||
@@ -41,6 +47,7 @@ For those who want to directly compress/decompress bits. The single file formats
|
||||
| ADCStream | Decompress |
|
||||
| LZipStream | Both |
|
||||
| XZStream | Decompress |
|
||||
| ZStandardStream | Decompress |
|
||||
|
||||
## Archive Formats vs Compression
|
||||
|
||||
|
||||
9
NuGet.config
Normal file
9
NuGet.config
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<packageSourceMapping>
|
||||
<!-- key value for <packageSource> should match key values from <packageSources> element -->
|
||||
<packageSource key="nuget.org">
|
||||
<package pattern="*" />
|
||||
</packageSource>
|
||||
</packageSourceMapping>
|
||||
</configuration>
|
||||
92
README.md
92
README.md
@@ -1,11 +1,14 @@
|
||||
# SharpCompress
|
||||
|
||||
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.8, .NET 8.0 and .NET 10.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd, unarc and unarj with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
|
||||
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
|
||||
|
||||
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [Async Usage](#async-usage) section below.
|
||||
|
||||
GitHub Actions Build -
|
||||
[](https://circleci.com/gh/adamhathcock/sharpcompress)
|
||||
[](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
|
||||
[](https://dndocs.com/d/sharpcompress/api/index.html)
|
||||
|
||||
## Need Help?
|
||||
|
||||
@@ -19,29 +22,110 @@ In general, I recommend GZip (Deflate)/BZip2 (BZip)/LZip (LZMA) as the simplicit
|
||||
|
||||
Zip is okay, but it's a very hap-hazard format and the variation in headers and implementations makes it hard to get correct. Uses Deflate by default but supports a lot of compression methods.
|
||||
|
||||
RAR is not recommended as it's a propriatory format and the compression is closed source. Use Tar/LZip for LZMA
|
||||
RAR is not recommended as it's a proprietary format and the compression is closed source. Use Tar/LZip for LZMA
|
||||
|
||||
7Zip and XZ both are overly complicated. 7Zip does not support streamable formats. XZ has known holes explained here: (http://www.nongnu.org/lzip/xz_inadequate.html) Use Tar/LZip for LZMA compression instead.
|
||||
|
||||
ZStandard is an efficient format that works well for streaming with a flexible compression level to tweak the speed/performance trade off you are looking for. We currently only implement decompression for ZStandard but as we leverage the [ZstdSharp](https://github.com/oleg-st/ZstdSharp) library one could likely add compression support without much trouble (PRs are welcome!).
|
||||
|
||||
## A Simple Request
|
||||
|
||||
Hi everyone. I hope you're using SharpCompress and finding it useful. Please give me feedback on what you'd like to see changed especially as far as usability goes. New feature suggestions are always welcome as well. I would also like to know what projects SharpCompress is being used in. I like seeing how it is used to give me ideas for future versions. Thanks!
|
||||
|
||||
Please do not email me directly to ask for help. If you think there is a real issue, please report it here.
|
||||
|
||||
## Async Usage
|
||||
|
||||
SharpCompress now provides full async/await support for all I/O operations, allowing for better performance and scalability in modern applications.
|
||||
|
||||
### Async Reading Examples
|
||||
|
||||
Extract entries asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenRead("archive.zip"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
// Async extraction
|
||||
await reader.WriteEntryToDirectoryAsync(
|
||||
@"C:\temp",
|
||||
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Extract all entries to directory asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenRead("archive.tar.gz"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"C:\temp",
|
||||
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
Open entry stream asynchronously:
|
||||
```csharp
|
||||
using (var archive = ZipArchive.Open("archive.zip"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
|
||||
{
|
||||
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
|
||||
{
|
||||
// Process stream asynchronously
|
||||
await entryStream.CopyToAsync(outputStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Async Writing Examples
|
||||
|
||||
Write files asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenWrite("output.zip"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
await writer.WriteAsync("file1.txt", fileStream, DateTime.Now, cancellationToken);
|
||||
}
|
||||
```
|
||||
|
||||
Write all files from directory asynchronously:
|
||||
```csharp
|
||||
using (Stream stream = File.OpenWrite("output.tar.gz"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
|
||||
{
|
||||
await writer.WriteAllAsync(@"D:\files", "*", SearchOption.AllDirectories, cancellationToken);
|
||||
}
|
||||
```
|
||||
|
||||
All async methods support `CancellationToken` for graceful cancellation of long-running operations.
|
||||
|
||||
## Want to contribute?
|
||||
|
||||
I'm always looking for help or ideas. Please submit code or email with ideas. Unfortunately, just letting me know you'd like to help is not enough because I really have no overall plan of what needs to be done. I'll definitely accept code submissions and add you as a member of the project!
|
||||
|
||||
## TODOs (always lots)
|
||||
|
||||
* RAR 5 decryption support
|
||||
* RAR 5 decryption crc check support
|
||||
* 7Zip writing
|
||||
* Zip64 (Need writing and extend Reading)
|
||||
* Multi-volume Zip support.
|
||||
* ZStandard writing
|
||||
|
||||
## Version Log
|
||||
|
||||
* [Releases](https://github.com/adamhathcock/sharpcompress/releases)
|
||||
|
||||
### Version 0.18
|
||||
|
||||
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)
|
||||
|
||||
@@ -3,8 +3,6 @@ Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio 15
|
||||
VisualStudioVersion = 15.0.26430.6
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F18F1765-4A02-42FD-9BEF-F0E2FCBD9D17}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{3C5BE746-03E5-4895-9988-0B57F162F86C}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{0F0901FF-E8D9-426A-B5A2-17C7F47C1529}"
|
||||
@@ -15,6 +13,22 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "build", "build\build.csproj", "{D4D613CB-5E94-47FB-85BE-B8423D20C545}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB42573-7D22-4490-BA12-1B7FB99CE7FB}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
Directory.Build.props = Directory.Build.props
|
||||
global.json = global.json
|
||||
.editorconfig = .editorconfig
|
||||
Directory.Packages.props = Directory.Packages.props
|
||||
NuGet.config = NuGet.config
|
||||
.github\workflows\dotnetcore.yml = .github\workflows\dotnetcore.yml
|
||||
USAGE.md = USAGE.md
|
||||
README.md = README.md
|
||||
FORMATS.md = FORMATS.md
|
||||
AGENTS.md = AGENTS.md
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Performance", "tests\SharpCompress.Performance\SharpCompress.Performance.csproj", "{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -33,6 +47,10 @@ Global
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
@@ -40,5 +58,6 @@ Global
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
|
||||
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
|
||||
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
||||
@@ -15,17 +15,17 @@
|
||||
|
||||
<s:String x:Key="/Default/CodeStyle/CodeCleanup/SilentCleanupProfile/@EntryValue">Basic Clean</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/APPLY_ON_COMPLETION/@EntryValue">True</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Named</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Positional</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOR/@EntryValue">Required</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOREACH/@EntryValue">Required</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_IFELSE/@EntryValue">Required</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_WHILE/@EntryValue">Required</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_LINQ_QUERY/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARGUMENT/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_CALLS_CHAIN/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXTENDS_LIST/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_FOR_STMT/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_PARAMETER/@EntryValue">True</s:Boolean>
|
||||
@@ -42,7 +42,7 @@
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_IFELSE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_USING_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_WHILE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">False</s:Boolean>
|
||||
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_CODE/@EntryValue">1</s:Int64>
|
||||
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_DECLARATIONS/@EntryValue">1</s:Int64>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
|
||||
@@ -50,12 +50,12 @@
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_CONSTRUCTOR_INITIALIZER_ON_SAME_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_EMBEDDED_STATEMENT_ON_SAME_LINE/@EntryValue">NEVER</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">False</s:Boolean>
|
||||
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">False</s:Boolean>
|
||||
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SIMPLE_EMBEDDED_STATEMENT_STYLE/@EntryValue">LINE_BREAK</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AFTER_TYPECAST_PARENTHESES/@EntryValue">False</s:Boolean>
|
||||
@@ -67,18 +67,22 @@
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_BEFORE_TYPEOF_PARENTHESES/@EntryValue">False</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/STICK_COMMENT/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARGUMENTS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_ALWAYS</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_EXTENDS_LIST_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_LINES/@EntryValue">False</s:Boolean>
|
||||
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_PARAMETERS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVarWhenEvident</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVarWhenEvident</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVarWhenEvident</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVar</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVar</s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVar</s:String>
|
||||
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateInstanceFields/@EntryIndexedValue"><Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=15b5b1f1_002D457c_002D4ca6_002Db278_002D5615aedc07d3/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static readonly fields (private)"><ElementKinds><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=4a98fdf6_002D7d98_002D4f5a_002Dafeb_002Dea44ad98c70c/@EntryIndexedValue"><Policy><Descriptor Staticness="Instance" AccessRightKinds="Private" Description="Instance fields (private)"><ElementKinds><Kind Name="FIELD" /><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=c873eafb_002Dd57f_002D481d_002D8c93_002D77f6863c2f88/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Protected, ProtectedInternal, Internal, Public, PrivateProtected" Description="Static readonly fields (not private)"><ElementKinds><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=f9fce829_002De6f4_002D4cb2_002D80f1_002D5497c44f51df/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static fields (private)"><ElementKinds><Kind Name="FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
@@ -118,6 +122,7 @@
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=NAMESPACE_005FALIAS/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FFIELD/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FRESOURCE/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CustomTools/CustomToolsData/@EntryValue"></s:String>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpAttributeForSingleLineMethodUpgrade/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpKeepExistingMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpPlaceEmbeddedOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
@@ -127,6 +132,7 @@
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EPredefinedNamingRulesToUserRulesUpgrade/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue"><SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml">
|
||||
<Solution />
|
||||
</SessionState></s:String></wpf:ResourceDictionary>
|
||||
|
||||
187
USAGE.md
187
USAGE.md
@@ -1,5 +1,18 @@
|
||||
# SharpCompress Usage
|
||||
|
||||
## Async/Await Support
|
||||
|
||||
SharpCompress now provides full async/await support for all I/O operations. All `Read`, `Write`, and extraction operations have async equivalents ending in `Async` that accept an optional `CancellationToken`. This enables better performance and scalability for I/O-bound operations.
|
||||
|
||||
**Key Async Methods:**
|
||||
- `reader.WriteEntryToAsync(stream, cancellationToken)` - Extract entry asynchronously
|
||||
- `reader.WriteAllToDirectoryAsync(path, options, cancellationToken)` - Extract all asynchronously
|
||||
- `writer.WriteAsync(filename, stream, modTime, cancellationToken)` - Write entry asynchronously
|
||||
- `writer.WriteAllAsync(directory, pattern, searchOption, cancellationToken)` - Write directory asynchronously
|
||||
- `entry.OpenEntryStreamAsync(cancellationToken)` - Open entry stream asynchronously
|
||||
|
||||
See [Async Examples](#async-examples) section below for usage patterns.
|
||||
|
||||
## Stream Rules (changed with 0.21)
|
||||
|
||||
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
|
||||
@@ -27,12 +40,24 @@ To deal with the "correct" rules as well as the expectations of users, I've deci
|
||||
|
||||
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
|
||||
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
|
||||
## Samples
|
||||
|
||||
Also, look over the tests for more thorough [examples](https://github.com/adamhathcock/sharpcompress/tree/master/tests/SharpCompress.Test)
|
||||
|
||||
### Create Zip Archive from multiple files
|
||||
```C#
|
||||
using(var archive = ZipArchive.Create())
|
||||
{
|
||||
archive.AddEntry("file01.txt", "C:\\file01.txt");
|
||||
archive.AddEntry("file02.txt", "C:\\file02.txt");
|
||||
...
|
||||
|
||||
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
|
||||
}
|
||||
```
|
||||
|
||||
### Create Zip Archive from all files in a directory to a file
|
||||
|
||||
```C#
|
||||
@@ -59,18 +84,34 @@ using (var archive = ZipArchive.Create())
|
||||
memoryStream.Position = 0;
|
||||
```
|
||||
|
||||
### Extract all files from a Rar file to a directory using RarArchive
|
||||
### Extract all files from a rar file to a directory using RarArchive
|
||||
|
||||
Note: Extracting a solid rar or 7z file needs to be done in sequential order to get acceptable decompression speed.
|
||||
It is explicitly recommended to use `ExtractAllEntries` when extracting an entire `IArchive` instead of iterating over all its `Entries`.
|
||||
Alternatively, use `IArchive.WriteToDirectory`.
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.Open("Test.rar"))
|
||||
{
|
||||
using (var reader = archive.ExtractAllEntries())
|
||||
{
|
||||
reader.WriteAllToDirectory(@"D:\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Iterate over all files from a Rar file using RarArchive
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.Open("Test.rar"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
|
||||
{
|
||||
entry.WriteToDirectory("D:\\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
Console.WriteLine($"{entry.Key}: {entry.Size} bytes");
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -143,4 +184,134 @@ foreach(var entry in tr.Entries)
|
||||
{
|
||||
Console.WriteLine($"{entry.Key}");
|
||||
}
|
||||
```
|
||||
```
|
||||
|
||||
## Async Examples
|
||||
|
||||
### Async Reader Examples
|
||||
|
||||
**Extract single entry asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("archive.zip"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
{
|
||||
using (var outputStream = File.Create("output.bin"))
|
||||
{
|
||||
await reader.WriteEntryToAsync(outputStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Extract all entries asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("archive.tar.gz"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"D:\temp",
|
||||
new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
},
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
**Open and process entry stream asynchronously:**
|
||||
```C#
|
||||
using (var archive = ZipArchive.Open("archive.zip"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
|
||||
{
|
||||
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
|
||||
{
|
||||
// Process the decompressed stream asynchronously
|
||||
await ProcessStreamAsync(entryStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Async Writer Examples
|
||||
|
||||
**Write single file asynchronously:**
|
||||
```C#
|
||||
using (Stream archiveStream = File.OpenWrite("output.zip"))
|
||||
using (var writer = WriterFactory.Open(archiveStream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
using (Stream fileStream = File.OpenRead("input.txt"))
|
||||
{
|
||||
await writer.WriteAsync("entry.txt", fileStream, DateTime.Now, cancellationToken);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Write entire directory asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenWrite("backup.tar.gz"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
|
||||
{
|
||||
await writer.WriteAllAsync(
|
||||
@"D:\files",
|
||||
"*",
|
||||
SearchOption.AllDirectories,
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
**Write with progress tracking and cancellation:**
|
||||
```C#
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
// Set timeout or cancel from UI
|
||||
cts.CancelAfter(TimeSpan.FromMinutes(5));
|
||||
|
||||
using (Stream stream = File.OpenWrite("archive.zip"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
try
|
||||
{
|
||||
await writer.WriteAllAsync(@"D:\data", "*", SearchOption.AllDirectories, cts.Token);
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Console.WriteLine("Operation was cancelled");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Archive Async Examples
|
||||
|
||||
**Extract from archive asynchronously:**
|
||||
```C#
|
||||
using (var archive = ZipArchive.Open("archive.zip"))
|
||||
{
|
||||
using (var reader = archive.ExtractAllEntries())
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits of Async Operations:**
|
||||
- Non-blocking I/O for better application responsiveness
|
||||
- Improved scalability for server applications
|
||||
- Support for cancellation via CancellationToken
|
||||
- Better resource utilization in async/await contexts
|
||||
- Compatible with modern .NET async patterns
|
||||
|
||||
132
build/Program.cs
132
build/Program.cs
@@ -1,4 +1,4 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Runtime.InteropServices;
|
||||
@@ -7,77 +7,89 @@ using static Bullseye.Targets;
|
||||
using static SimpleExec.Command;
|
||||
|
||||
const string Clean = "clean";
|
||||
const string Format = "format";
|
||||
const string Restore = "restore";
|
||||
const string Build = "build";
|
||||
const string Test = "test";
|
||||
const string Format = "format";
|
||||
const string Publish = "publish";
|
||||
|
||||
Target(Clean,
|
||||
ForEach("**/bin", "**/obj"),
|
||||
dir =>
|
||||
{
|
||||
IEnumerable<string> GetDirectories(string d)
|
||||
{
|
||||
return Glob.Directories(".", d);
|
||||
}
|
||||
Target(
|
||||
Clean,
|
||||
["**/bin", "**/obj"],
|
||||
dir =>
|
||||
{
|
||||
IEnumerable<string> GetDirectories(string d)
|
||||
{
|
||||
return Glob.Directories(".", d);
|
||||
}
|
||||
|
||||
void RemoveDirectory(string d)
|
||||
{
|
||||
if (Directory.Exists(d))
|
||||
{
|
||||
Console.WriteLine(d);
|
||||
Directory.Delete(d, true);
|
||||
}
|
||||
}
|
||||
void RemoveDirectory(string d)
|
||||
{
|
||||
if (Directory.Exists(d))
|
||||
{
|
||||
Console.WriteLine(d);
|
||||
Directory.Delete(d, true);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var d in GetDirectories(dir))
|
||||
{
|
||||
RemoveDirectory(d);
|
||||
}
|
||||
});
|
||||
foreach (var d in GetDirectories(dir))
|
||||
{
|
||||
RemoveDirectory(d);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
Target(Format,
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "tool restore");
|
||||
Run("dotnet", "format --check");
|
||||
});
|
||||
Target(
|
||||
Format,
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "tool restore");
|
||||
Run("dotnet", "csharpier check .");
|
||||
}
|
||||
);
|
||||
Target(Restore, [Format], () => Run("dotnet", "restore"));
|
||||
|
||||
Target(Build,
|
||||
DependsOn(Format),
|
||||
framework =>
|
||||
{
|
||||
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
|
||||
});
|
||||
Target(
|
||||
Build,
|
||||
[Restore],
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release --no-restore");
|
||||
}
|
||||
);
|
||||
|
||||
Target(Test,
|
||||
DependsOn(Build),
|
||||
ForEach("net6.0", "net461"),
|
||||
framework =>
|
||||
{
|
||||
IEnumerable<string> GetFiles(string d)
|
||||
{
|
||||
return Glob.Files(".", d);
|
||||
}
|
||||
Target(
|
||||
Test,
|
||||
[Build],
|
||||
["net8.0", "net48"],
|
||||
framework =>
|
||||
{
|
||||
IEnumerable<string> GetFiles(string d)
|
||||
{
|
||||
return Glob.Files(".", d);
|
||||
}
|
||||
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net461")
|
||||
{
|
||||
return;
|
||||
}
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net48")
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var file in GetFiles("**/*.Test.csproj"))
|
||||
{
|
||||
Run("dotnet", $"test {file} -c Release -f {framework}");
|
||||
}
|
||||
});
|
||||
foreach (var file in GetFiles("**/*.Test.csproj"))
|
||||
{
|
||||
Run("dotnet", $"test {file} -c Release -f {framework} --no-restore --verbosity=normal");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
Target(Publish,
|
||||
DependsOn(Test),
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
|
||||
});
|
||||
Target(
|
||||
Publish,
|
||||
[Test],
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
|
||||
}
|
||||
);
|
||||
|
||||
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
|
||||
Target("default", [Publish], () => Console.WriteLine("Done!"));
|
||||
|
||||
await RunTargetsAndExitAsync(args);
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Bullseye" Version="4.0.0" />
|
||||
<PackageReference Include="Glob" Version="1.1.9" />
|
||||
<PackageReference Include="SimpleExec" Version="10.0.0" />
|
||||
<PackageReference Include="Bullseye" />
|
||||
<PackageReference Include="Glob" />
|
||||
<PackageReference Include="SimpleExec" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
25
build/packages.lock.json
Normal file
25
build/packages.lock.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"version": 2,
|
||||
"dependencies": {
|
||||
"net10.0": {
|
||||
"Bullseye": {
|
||||
"type": "Direct",
|
||||
"requested": "[6.0.0, )",
|
||||
"resolved": "6.0.0",
|
||||
"contentHash": "vgwwXfzs7jJrskWH7saHRMgPzziq/e86QZNWY1MnMxd7e+De7E7EX4K3C7yrvaK9y02SJoLxNxcLG/q5qUAghw=="
|
||||
},
|
||||
"Glob": {
|
||||
"type": "Direct",
|
||||
"requested": "[1.1.9, )",
|
||||
"resolved": "1.1.9",
|
||||
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
|
||||
},
|
||||
"SimpleExec": {
|
||||
"type": "Direct",
|
||||
"requested": "[12.0.0, )",
|
||||
"resolved": "12.0.0",
|
||||
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"sdk": {
|
||||
"version": "6.0.200",
|
||||
"version": "10.0.100",
|
||||
"rollForward": "latestFeature"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,305 +15,256 @@ using System.Runtime.Intrinsics.X86;
|
||||
#endif
|
||||
|
||||
#pragma warning disable IDE0007 // Use implicit type
|
||||
namespace SharpCompress.Algorithms
|
||||
namespace SharpCompress.Algorithms;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the 32 bit Adler checksum of a given buffer according to
|
||||
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
|
||||
/// </summary>
|
||||
internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Compression/Zlib/Adler32.cs
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculates the 32 bit Adler checksum of a given buffer according to
|
||||
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
|
||||
/// Global inlining options. Helps temporarily disable inlining for better profiler output.
|
||||
/// </summary>
|
||||
internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Compression/Zlib/Adler32.cs
|
||||
private static class InliningOptions // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/InliningOptions.cs
|
||||
{
|
||||
/// <summary>
|
||||
/// Global inlining options. Helps temporarily disable inlining for better profiler output.
|
||||
/// <see cref="MethodImplOptions.AggressiveInlining"/> regardless of the build conditions.
|
||||
/// </summary>
|
||||
private static class InliningOptions // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/InliningOptions.cs
|
||||
{
|
||||
/// <summary>
|
||||
/// <see cref="MethodImplOptions.AggressiveInlining"/> regardless of the build conditions.
|
||||
/// </summary>
|
||||
public const MethodImplOptions AlwaysInline = MethodImplOptions.AggressiveInlining;
|
||||
public const MethodImplOptions AlwaysInline = MethodImplOptions.AggressiveInlining;
|
||||
#if PROFILING
|
||||
public const MethodImplOptions HotPath = MethodImplOptions.NoInlining;
|
||||
public const MethodImplOptions ShortMethod = MethodImplOptions.NoInlining;
|
||||
public const MethodImplOptions HotPath = MethodImplOptions.NoInlining;
|
||||
public const MethodImplOptions ShortMethod = MethodImplOptions.NoInlining;
|
||||
#else
|
||||
#if SUPPORTS_HOTPATH
|
||||
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveOptimization;
|
||||
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveOptimization;
|
||||
#else
|
||||
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveInlining;
|
||||
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveInlining;
|
||||
#endif
|
||||
public const MethodImplOptions ShortMethod = MethodImplOptions.AggressiveInlining;
|
||||
public const MethodImplOptions ShortMethod = MethodImplOptions.AggressiveInlining;
|
||||
#endif
|
||||
public const MethodImplOptions ColdPath = MethodImplOptions.NoInlining;
|
||||
}
|
||||
public const MethodImplOptions ColdPath = MethodImplOptions.NoInlining;
|
||||
}
|
||||
|
||||
#if SUPPORTS_RUNTIME_INTRINSICS
|
||||
/// <summary>
|
||||
/// Provides optimized static methods for trigonometric, logarithmic,
|
||||
/// and other common mathematical functions.
|
||||
/// </summary>
|
||||
private static class Numerics // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/Numerics.cs
|
||||
{
|
||||
/// <summary>
|
||||
/// Provides optimized static methods for trigonometric, logarithmic,
|
||||
/// and other common mathematical functions.
|
||||
/// Reduces elements of the vector into one sum.
|
||||
/// </summary>
|
||||
private static class Numerics // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/Numerics.cs
|
||||
/// <param name="accumulator">The accumulator to reduce.</param>
|
||||
/// <returns>The sum of all elements.</returns>
|
||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
||||
public static int ReduceSum(Vector256<int> accumulator)
|
||||
{
|
||||
/// <summary>
|
||||
/// Reduces elements of the vector into one sum.
|
||||
/// </summary>
|
||||
/// <param name="accumulator">The accumulator to reduce.</param>
|
||||
/// <returns>The sum of all elements.</returns>
|
||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
||||
public static int ReduceSum(Vector256<int> accumulator)
|
||||
{
|
||||
// Add upper lane to lower lane.
|
||||
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
|
||||
// Add upper lane to lower lane.
|
||||
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
|
||||
|
||||
// Add odd to even.
|
||||
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
|
||||
// Add odd to even.
|
||||
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
|
||||
|
||||
// Add high to low.
|
||||
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10));
|
||||
// Add high to low.
|
||||
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10));
|
||||
|
||||
return Sse2.ConvertToInt32(vsum);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reduces even elements of the vector into one sum.
|
||||
/// </summary>
|
||||
/// <param name="accumulator">The accumulator to reduce.</param>
|
||||
/// <returns>The sum of even elements.</returns>
|
||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
||||
public static int EvenReduceSum(Vector256<int> accumulator)
|
||||
{
|
||||
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
|
||||
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
|
||||
|
||||
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
|
||||
return Sse2.ConvertToInt32(vsum);
|
||||
}
|
||||
return Sse2.ConvertToInt32(vsum);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reduces even elements of the vector into one sum.
|
||||
/// </summary>
|
||||
/// <param name="accumulator">The accumulator to reduce.</param>
|
||||
/// <returns>The sum of even elements.</returns>
|
||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
||||
public static int EvenReduceSum(Vector256<int> accumulator)
|
||||
{
|
||||
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
|
||||
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
|
||||
|
||||
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
|
||||
return Sse2.ConvertToInt32(vsum);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
/// <summary>
|
||||
/// The default initial seed value of a Adler32 checksum calculation.
|
||||
/// </summary>
|
||||
public const uint SeedValue = 1U;
|
||||
/// <summary>
|
||||
/// The default initial seed value of a Adler32 checksum calculation.
|
||||
/// </summary>
|
||||
public const uint SeedValue = 1U;
|
||||
|
||||
// Largest prime smaller than 65536
|
||||
private const uint BASE = 65521;
|
||||
// Largest prime smaller than 65536
|
||||
private const uint BASE = 65521;
|
||||
|
||||
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
|
||||
private const uint NMAX = 5552;
|
||||
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
|
||||
private const uint NMAX = 5552;
|
||||
|
||||
#if SUPPORTS_RUNTIME_INTRINSICS
|
||||
private const int MinBufferSize = 64;
|
||||
private const int MinBufferSize = 64;
|
||||
|
||||
private const int BlockSize = 1 << 5;
|
||||
private const int BlockSize = 1 << 5;
|
||||
|
||||
// The C# compiler emits this as a compile-time constant embedded in the PE file.
|
||||
private static ReadOnlySpan<byte> Tap1Tap2 => new byte[]
|
||||
// The C# compiler emits this as a compile-time constant embedded in the PE file.
|
||||
private static ReadOnlySpan<byte> Tap1Tap2 =>
|
||||
new byte[]
|
||||
{
|
||||
32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, // tap1
|
||||
16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 // tap2
|
||||
32,
|
||||
31,
|
||||
30,
|
||||
29,
|
||||
28,
|
||||
27,
|
||||
26,
|
||||
25,
|
||||
24,
|
||||
23,
|
||||
22,
|
||||
21,
|
||||
20,
|
||||
19,
|
||||
18,
|
||||
17, // tap1
|
||||
16,
|
||||
15,
|
||||
14,
|
||||
13,
|
||||
12,
|
||||
11,
|
||||
10,
|
||||
9,
|
||||
8,
|
||||
7,
|
||||
6,
|
||||
5,
|
||||
4,
|
||||
3,
|
||||
2,
|
||||
1, // tap2
|
||||
};
|
||||
#endif
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the Adler32 checksum with the bytes taken from the span.
|
||||
/// </summary>
|
||||
/// <param name="buffer">The readonly span of bytes.</param>
|
||||
/// <returns>The <see cref="uint"/>.</returns>
|
||||
[MethodImpl(InliningOptions.ShortMethod)]
|
||||
public static uint Calculate(ReadOnlySpan<byte> buffer)
|
||||
=> Calculate(SeedValue, buffer);
|
||||
/// <summary>
|
||||
/// Calculates the Adler32 checksum with the bytes taken from the span.
|
||||
/// </summary>
|
||||
/// <param name="buffer">The readonly span of bytes.</param>
|
||||
/// <returns>The <see cref="uint"/>.</returns>
|
||||
[MethodImpl(InliningOptions.ShortMethod)]
|
||||
public static uint Calculate(ReadOnlySpan<byte> buffer) => Calculate(SeedValue, buffer);
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
|
||||
/// </summary>
|
||||
/// <param name="adler">The input Adler32 value.</param>
|
||||
/// <param name="buffer">The readonly span of bytes.</param>
|
||||
/// <returns>The <see cref="uint"/>.</returns>
|
||||
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
|
||||
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
|
||||
/// <summary>
|
||||
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
|
||||
/// </summary>
|
||||
/// <param name="adler">The input Adler32 value.</param>
|
||||
/// <param name="buffer">The readonly span of bytes.</param>
|
||||
/// <returns>The <see cref="uint"/>.</returns>
|
||||
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
|
||||
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
if (buffer.IsEmpty)
|
||||
{
|
||||
if (buffer.IsEmpty)
|
||||
{
|
||||
return adler;
|
||||
}
|
||||
return adler;
|
||||
}
|
||||
|
||||
#if SUPPORTS_RUNTIME_INTRINSICS
|
||||
if (Avx2.IsSupported && buffer.Length >= MinBufferSize)
|
||||
{
|
||||
return CalculateAvx2(adler, buffer);
|
||||
}
|
||||
if (Avx2.IsSupported && buffer.Length >= MinBufferSize)
|
||||
{
|
||||
return CalculateAvx2(adler, buffer);
|
||||
}
|
||||
|
||||
if (Ssse3.IsSupported && buffer.Length >= MinBufferSize)
|
||||
{
|
||||
return CalculateSse(adler, buffer);
|
||||
}
|
||||
if (Ssse3.IsSupported && buffer.Length >= MinBufferSize)
|
||||
{
|
||||
return CalculateSse(adler, buffer);
|
||||
}
|
||||
|
||||
return CalculateScalar(adler, buffer);
|
||||
return CalculateScalar(adler, buffer);
|
||||
#else
|
||||
return CalculateScalar(adler, buffer);
|
||||
return CalculateScalar(adler, buffer);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
|
||||
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
|
||||
#if SUPPORTS_RUNTIME_INTRINSICS
|
||||
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
|
||||
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
|
||||
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
|
||||
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
var s1 = adler & 0xFFFF;
|
||||
var s2 = (adler >> 16) & 0xFFFF;
|
||||
|
||||
// Process the data in blocks.
|
||||
var length = (uint)buffer.Length;
|
||||
var blocks = length / BlockSize;
|
||||
length -= blocks * BlockSize;
|
||||
|
||||
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
|
||||
{
|
||||
uint s1 = adler & 0xFFFF;
|
||||
uint s2 = (adler >> 16) & 0xFFFF;
|
||||
|
||||
// Process the data in blocks.
|
||||
uint length = (uint)buffer.Length;
|
||||
uint blocks = length / BlockSize;
|
||||
length -= blocks * BlockSize;
|
||||
|
||||
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
|
||||
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
|
||||
{
|
||||
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
|
||||
var localBufferPtr = bufferPtr;
|
||||
|
||||
// _mm_setr_epi8 on x86
|
||||
var tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
|
||||
var tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
|
||||
var zero = Vector128<byte>.Zero;
|
||||
var ones = Vector128.Create((short)1);
|
||||
|
||||
while (blocks > 0)
|
||||
{
|
||||
byte* localBufferPtr = bufferPtr;
|
||||
|
||||
// _mm_setr_epi8 on x86
|
||||
Vector128<sbyte> tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
|
||||
Vector128<sbyte> tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
|
||||
Vector128<byte> zero = Vector128<byte>.Zero;
|
||||
var ones = Vector128.Create((short)1);
|
||||
|
||||
while (blocks > 0)
|
||||
var n = NMAX / BlockSize; /* The NMAX constraint. */
|
||||
if (n > blocks)
|
||||
{
|
||||
uint n = NMAX / BlockSize; /* The NMAX constraint. */
|
||||
if (n > blocks)
|
||||
{
|
||||
n = blocks;
|
||||
}
|
||||
|
||||
blocks -= n;
|
||||
|
||||
// Process n blocks of data. At most NMAX data bytes can be
|
||||
// processed before s2 must be reduced modulo BASE.
|
||||
Vector128<uint> v_ps = Vector128.CreateScalar(s1 * n);
|
||||
Vector128<uint> v_s2 = Vector128.CreateScalar(s2);
|
||||
Vector128<uint> v_s1 = Vector128<uint>.Zero;
|
||||
|
||||
do
|
||||
{
|
||||
// Load 32 input bytes.
|
||||
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
|
||||
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
|
||||
|
||||
// Add previous block byte sum to v_ps.
|
||||
v_ps = Sse2.Add(v_ps, v_s1);
|
||||
|
||||
// Horizontally add the bytes for s1, multiply-adds the
|
||||
// bytes by [ 32, 31, 30, ... ] for s2.
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
|
||||
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
|
||||
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
|
||||
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
|
||||
|
||||
localBufferPtr += BlockSize;
|
||||
}
|
||||
while (--n > 0);
|
||||
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
|
||||
|
||||
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
|
||||
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
|
||||
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
|
||||
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
|
||||
|
||||
s1 += v_s1.ToScalar();
|
||||
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
|
||||
|
||||
s2 = v_s2.ToScalar();
|
||||
|
||||
// Reduce.
|
||||
s1 %= BASE;
|
||||
s2 %= BASE;
|
||||
n = blocks;
|
||||
}
|
||||
|
||||
if (length > 0)
|
||||
{
|
||||
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
|
||||
}
|
||||
blocks -= n;
|
||||
|
||||
return s1 | (s2 << 16);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Process n blocks of data. At most NMAX data bytes can be
|
||||
// processed before s2 must be reduced modulo BASE.
|
||||
var v_ps = Vector128.CreateScalar(s1 * n);
|
||||
var v_s2 = Vector128.CreateScalar(s2);
|
||||
var v_s1 = Vector128<uint>.Zero;
|
||||
|
||||
// Based on: https://github.com/zlib-ng/zlib-ng/blob/develop/arch/x86/adler32_avx2.c
|
||||
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
|
||||
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
uint s1 = adler & 0xFFFF;
|
||||
uint s2 = (adler >> 16) & 0xFFFF;
|
||||
uint length = (uint)buffer.Length;
|
||||
|
||||
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
|
||||
{
|
||||
byte* localBufferPtr = bufferPtr;
|
||||
|
||||
Vector256<byte> zero = Vector256<byte>.Zero;
|
||||
var dot3v = Vector256.Create((short)1);
|
||||
var dot2v = Vector256.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
|
||||
|
||||
// Process n blocks of data. At most NMAX data bytes can be
|
||||
// processed before s2 must be reduced modulo BASE.
|
||||
var vs1 = Vector256.CreateScalar(s1);
|
||||
var vs2 = Vector256.CreateScalar(s2);
|
||||
|
||||
while (length >= 32)
|
||||
{
|
||||
int k = length < NMAX ? (int)length : (int)NMAX;
|
||||
k -= k % 32;
|
||||
length -= (uint)k;
|
||||
|
||||
Vector256<uint> vs10 = vs1;
|
||||
Vector256<uint> vs3 = Vector256<uint>.Zero;
|
||||
|
||||
while (k >= 32)
|
||||
do
|
||||
{
|
||||
// Load 32 input bytes.
|
||||
Vector256<byte> block = Avx.LoadVector256(localBufferPtr);
|
||||
var bytes1 = Sse3.LoadDquVector128(localBufferPtr);
|
||||
var bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
|
||||
|
||||
// Sum of abs diff, resulting in 2 x int32's
|
||||
Vector256<ushort> vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
|
||||
// Add previous block byte sum to v_ps.
|
||||
v_ps = Sse2.Add(v_ps, v_s1);
|
||||
|
||||
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
|
||||
vs3 = Avx2.Add(vs3, vs10);
|
||||
// Horizontally add the bytes for s1, multiply-adds the
|
||||
// bytes by [ 32, 31, 30, ... ] for s2.
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
|
||||
var mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
|
||||
|
||||
// sum 32 uint8s to 16 shorts.
|
||||
Vector256<short> vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
|
||||
|
||||
// sum 16 shorts to 8 uint32s.
|
||||
Vector256<int> vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
|
||||
|
||||
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
|
||||
vs10 = vs1;
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
|
||||
var mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
|
||||
|
||||
localBufferPtr += BlockSize;
|
||||
k -= 32;
|
||||
}
|
||||
} while (--n > 0);
|
||||
|
||||
// Defer the multiplication with 32 to outside of the loop.
|
||||
vs3 = Avx2.ShiftLeftLogical(vs3, 5);
|
||||
vs2 = Avx2.Add(vs2, vs3);
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
|
||||
|
||||
s1 = (uint)Numerics.EvenReduceSum(vs1.AsInt32());
|
||||
s2 = (uint)Numerics.ReduceSum(vs2.AsInt32());
|
||||
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
|
||||
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
|
||||
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
|
||||
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
|
||||
|
||||
s1 += v_s1.ToScalar();
|
||||
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
|
||||
|
||||
s2 = v_s2.ToScalar();
|
||||
|
||||
// Reduce.
|
||||
s1 %= BASE;
|
||||
s2 %= BASE;
|
||||
|
||||
vs1 = Vector256.CreateScalar(s1);
|
||||
vs2 = Vector256.CreateScalar(s2);
|
||||
}
|
||||
|
||||
if (length > 0)
|
||||
@@ -324,97 +275,212 @@ namespace SharpCompress.Algorithms
|
||||
return s1 | (s2 << 16);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static unsafe void HandleLeftOver(byte* localBufferPtr, uint length, ref uint s1, ref uint s2)
|
||||
// Based on: https://github.com/zlib-ng/zlib-ng/blob/develop/arch/x86/adler32_avx2.c
|
||||
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
|
||||
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
var s1 = adler & 0xFFFF;
|
||||
var s2 = (adler >> 16) & 0xFFFF;
|
||||
var length = (uint)buffer.Length;
|
||||
|
||||
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
|
||||
{
|
||||
if (length >= 16)
|
||||
var localBufferPtr = bufferPtr;
|
||||
|
||||
var zero = Vector256<byte>.Zero;
|
||||
var dot3v = Vector256.Create((short)1);
|
||||
var dot2v = Vector256.Create(
|
||||
32,
|
||||
31,
|
||||
30,
|
||||
29,
|
||||
28,
|
||||
27,
|
||||
26,
|
||||
25,
|
||||
24,
|
||||
23,
|
||||
22,
|
||||
21,
|
||||
20,
|
||||
19,
|
||||
18,
|
||||
17,
|
||||
16,
|
||||
15,
|
||||
14,
|
||||
13,
|
||||
12,
|
||||
11,
|
||||
10,
|
||||
9,
|
||||
8,
|
||||
7,
|
||||
6,
|
||||
5,
|
||||
4,
|
||||
3,
|
||||
2,
|
||||
1
|
||||
);
|
||||
|
||||
// Process n blocks of data. At most NMAX data bytes can be
|
||||
// processed before s2 must be reduced modulo BASE.
|
||||
var vs1 = Vector256.CreateScalar(s1);
|
||||
var vs2 = Vector256.CreateScalar(s2);
|
||||
|
||||
while (length >= 32)
|
||||
{
|
||||
s2 += s1 += localBufferPtr[0];
|
||||
s2 += s1 += localBufferPtr[1];
|
||||
s2 += s1 += localBufferPtr[2];
|
||||
s2 += s1 += localBufferPtr[3];
|
||||
s2 += s1 += localBufferPtr[4];
|
||||
s2 += s1 += localBufferPtr[5];
|
||||
s2 += s1 += localBufferPtr[6];
|
||||
s2 += s1 += localBufferPtr[7];
|
||||
s2 += s1 += localBufferPtr[8];
|
||||
s2 += s1 += localBufferPtr[9];
|
||||
s2 += s1 += localBufferPtr[10];
|
||||
s2 += s1 += localBufferPtr[11];
|
||||
s2 += s1 += localBufferPtr[12];
|
||||
s2 += s1 += localBufferPtr[13];
|
||||
s2 += s1 += localBufferPtr[14];
|
||||
s2 += s1 += localBufferPtr[15];
|
||||
var k = length < NMAX ? (int)length : (int)NMAX;
|
||||
k -= k % 32;
|
||||
length -= (uint)k;
|
||||
|
||||
localBufferPtr += 16;
|
||||
length -= 16;
|
||||
}
|
||||
var vs10 = vs1;
|
||||
var vs3 = Vector256<uint>.Zero;
|
||||
|
||||
while (length-- > 0)
|
||||
{
|
||||
s2 += s1 += *localBufferPtr++;
|
||||
}
|
||||
|
||||
if (s1 >= BASE)
|
||||
{
|
||||
s1 -= BASE;
|
||||
}
|
||||
|
||||
s2 %= BASE;
|
||||
}
|
||||
#endif
|
||||
|
||||
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
|
||||
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
uint s1 = adler & 0xFFFF;
|
||||
uint s2 = (adler >> 16) & 0xFFFF;
|
||||
uint k;
|
||||
|
||||
fixed (byte* bufferPtr = buffer)
|
||||
{
|
||||
var localBufferPtr = bufferPtr;
|
||||
uint length = (uint)buffer.Length;
|
||||
|
||||
while (length > 0)
|
||||
while (k >= 32)
|
||||
{
|
||||
k = length < NMAX ? length : NMAX;
|
||||
length -= k;
|
||||
// Load 32 input bytes.
|
||||
var block = Avx.LoadVector256(localBufferPtr);
|
||||
|
||||
while (k >= 16)
|
||||
{
|
||||
s2 += s1 += localBufferPtr[0];
|
||||
s2 += s1 += localBufferPtr[1];
|
||||
s2 += s1 += localBufferPtr[2];
|
||||
s2 += s1 += localBufferPtr[3];
|
||||
s2 += s1 += localBufferPtr[4];
|
||||
s2 += s1 += localBufferPtr[5];
|
||||
s2 += s1 += localBufferPtr[6];
|
||||
s2 += s1 += localBufferPtr[7];
|
||||
s2 += s1 += localBufferPtr[8];
|
||||
s2 += s1 += localBufferPtr[9];
|
||||
s2 += s1 += localBufferPtr[10];
|
||||
s2 += s1 += localBufferPtr[11];
|
||||
s2 += s1 += localBufferPtr[12];
|
||||
s2 += s1 += localBufferPtr[13];
|
||||
s2 += s1 += localBufferPtr[14];
|
||||
s2 += s1 += localBufferPtr[15];
|
||||
// Sum of abs diff, resulting in 2 x int32's
|
||||
var vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
|
||||
|
||||
localBufferPtr += 16;
|
||||
k -= 16;
|
||||
}
|
||||
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
|
||||
vs3 = Avx2.Add(vs3, vs10);
|
||||
|
||||
while (k-- > 0)
|
||||
{
|
||||
s2 += s1 += *localBufferPtr++;
|
||||
}
|
||||
// sum 32 uint8s to 16 shorts.
|
||||
var vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
|
||||
|
||||
s1 %= BASE;
|
||||
s2 %= BASE;
|
||||
// sum 16 shorts to 8 uint32s.
|
||||
var vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
|
||||
|
||||
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
|
||||
vs10 = vs1;
|
||||
|
||||
localBufferPtr += BlockSize;
|
||||
k -= 32;
|
||||
}
|
||||
|
||||
return (s2 << 16) | s1;
|
||||
// Defer the multiplication with 32 to outside of the loop.
|
||||
vs3 = Avx2.ShiftLeftLogical(vs3, 5);
|
||||
vs2 = Avx2.Add(vs2, vs3);
|
||||
|
||||
s1 = (uint)Numerics.EvenReduceSum(vs1.AsInt32());
|
||||
s2 = (uint)Numerics.ReduceSum(vs2.AsInt32());
|
||||
|
||||
s1 %= BASE;
|
||||
s2 %= BASE;
|
||||
|
||||
vs1 = Vector256.CreateScalar(s1);
|
||||
vs2 = Vector256.CreateScalar(s2);
|
||||
}
|
||||
|
||||
if (length > 0)
|
||||
{
|
||||
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
|
||||
}
|
||||
|
||||
return s1 | (s2 << 16);
|
||||
}
|
||||
}
|
||||
|
||||
private static unsafe void HandleLeftOver(
|
||||
byte* localBufferPtr,
|
||||
uint length,
|
||||
ref uint s1,
|
||||
ref uint s2
|
||||
)
|
||||
{
|
||||
if (length >= 16)
|
||||
{
|
||||
s2 += s1 += localBufferPtr[0];
|
||||
s2 += s1 += localBufferPtr[1];
|
||||
s2 += s1 += localBufferPtr[2];
|
||||
s2 += s1 += localBufferPtr[3];
|
||||
s2 += s1 += localBufferPtr[4];
|
||||
s2 += s1 += localBufferPtr[5];
|
||||
s2 += s1 += localBufferPtr[6];
|
||||
s2 += s1 += localBufferPtr[7];
|
||||
s2 += s1 += localBufferPtr[8];
|
||||
s2 += s1 += localBufferPtr[9];
|
||||
s2 += s1 += localBufferPtr[10];
|
||||
s2 += s1 += localBufferPtr[11];
|
||||
s2 += s1 += localBufferPtr[12];
|
||||
s2 += s1 += localBufferPtr[13];
|
||||
s2 += s1 += localBufferPtr[14];
|
||||
s2 += s1 += localBufferPtr[15];
|
||||
|
||||
localBufferPtr += 16;
|
||||
length -= 16;
|
||||
}
|
||||
|
||||
while (length-- > 0)
|
||||
{
|
||||
s2 += s1 += *localBufferPtr++;
|
||||
}
|
||||
|
||||
if (s1 >= BASE)
|
||||
{
|
||||
s1 -= BASE;
|
||||
}
|
||||
|
||||
s2 %= BASE;
|
||||
}
|
||||
#endif
|
||||
|
||||
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
|
||||
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
var s1 = adler & 0xFFFF;
|
||||
var s2 = (adler >> 16) & 0xFFFF;
|
||||
uint k;
|
||||
|
||||
fixed (byte* bufferPtr = buffer)
|
||||
{
|
||||
var localBufferPtr = bufferPtr;
|
||||
var length = (uint)buffer.Length;
|
||||
|
||||
while (length > 0)
|
||||
{
|
||||
k = length < NMAX ? length : NMAX;
|
||||
length -= k;
|
||||
|
||||
while (k >= 16)
|
||||
{
|
||||
s2 += s1 += localBufferPtr[0];
|
||||
s2 += s1 += localBufferPtr[1];
|
||||
s2 += s1 += localBufferPtr[2];
|
||||
s2 += s1 += localBufferPtr[3];
|
||||
s2 += s1 += localBufferPtr[4];
|
||||
s2 += s1 += localBufferPtr[5];
|
||||
s2 += s1 += localBufferPtr[6];
|
||||
s2 += s1 += localBufferPtr[7];
|
||||
s2 += s1 += localBufferPtr[8];
|
||||
s2 += s1 += localBufferPtr[9];
|
||||
s2 += s1 += localBufferPtr[10];
|
||||
s2 += s1 += localBufferPtr[11];
|
||||
s2 += s1 += localBufferPtr[12];
|
||||
s2 += s1 += localBufferPtr[13];
|
||||
s2 += s1 += localBufferPtr[14];
|
||||
s2 += s1 += localBufferPtr[15];
|
||||
|
||||
localBufferPtr += 16;
|
||||
k -= 16;
|
||||
}
|
||||
|
||||
while (k-- > 0)
|
||||
{
|
||||
s2 += s1 += *localBufferPtr++;
|
||||
}
|
||||
|
||||
s1 %= BASE;
|
||||
s2 %= BASE;
|
||||
}
|
||||
|
||||
return (s2 << 16) | s1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,161 +6,175 @@ using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
|
||||
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
|
||||
private bool _disposed;
|
||||
private readonly SourceStream? _sourceStream;
|
||||
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
|
||||
|
||||
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
|
||||
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
|
||||
protected ReaderOptions ReaderOptions { get; }
|
||||
|
||||
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
|
||||
{
|
||||
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
|
||||
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
|
||||
Type = type;
|
||||
ReaderOptions = sourceStream.ReaderOptions;
|
||||
_sourceStream = sourceStream;
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
}
|
||||
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
|
||||
internal AbstractArchive(ArchiveType type)
|
||||
{
|
||||
Type = type;
|
||||
ReaderOptions = new();
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
|
||||
}
|
||||
|
||||
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
|
||||
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
|
||||
public ArchiveType Type { get; }
|
||||
|
||||
protected ReaderOptions ReaderOptions { get; }
|
||||
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
|
||||
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
|
||||
|
||||
private bool disposed;
|
||||
protected SourceStream SrcStream;
|
||||
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry) =>
|
||||
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
|
||||
|
||||
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
|
||||
private static Stream CheckStreams(Stream stream)
|
||||
{
|
||||
if (!stream.CanSeek || !stream.CanRead)
|
||||
{
|
||||
Type = type;
|
||||
ReaderOptions = srcStream.ReaderOptions;
|
||||
SrcStream = srcStream;
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
throw new ArchiveException("Archive streams must be Readable and Seekable");
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
|
||||
#nullable disable
|
||||
internal AbstractArchive(ArchiveType type)
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public virtual ICollection<TEntry> Entries => _lazyEntries;
|
||||
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public ICollection<TVolume> Volumes => _lazyVolumes;
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files compressed in the archive.
|
||||
/// </summary>
|
||||
public virtual long TotalSize =>
|
||||
Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
public virtual long TotalUncompressSize =>
|
||||
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
|
||||
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
|
||||
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
|
||||
|
||||
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
|
||||
|
||||
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
|
||||
|
||||
public virtual void Dispose()
|
||||
{
|
||||
if (!_disposed)
|
||||
{
|
||||
Type = type;
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
|
||||
_lazyVolumes.ForEach(v => v.Dispose());
|
||||
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
|
||||
_sourceStream?.Dispose();
|
||||
|
||||
_disposed = true;
|
||||
}
|
||||
#nullable enable
|
||||
}
|
||||
|
||||
public ArchiveType Type { get; }
|
||||
void IArchiveExtractionListener.EnsureEntriesLoaded()
|
||||
{
|
||||
_lazyEntries.EnsureFullyLoaded();
|
||||
_lazyVolumes.EnsureFullyLoaded();
|
||||
}
|
||||
|
||||
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry)
|
||||
{
|
||||
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
|
||||
}
|
||||
|
||||
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry)
|
||||
{
|
||||
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
|
||||
}
|
||||
|
||||
private static Stream CheckStreams(Stream stream)
|
||||
{
|
||||
if (!stream.CanSeek || !stream.CanRead)
|
||||
{
|
||||
throw new ArgumentException("Archive streams must be Readable and Seekable");
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public virtual ICollection<TEntry> Entries => lazyEntries;
|
||||
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public ICollection<TVolume> Volumes => lazyVolumes;
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files compressed in the archive.
|
||||
/// </summary>
|
||||
public virtual long TotalSize => Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
public virtual long TotalUncompressSize => Entries.Aggregate(0L, (total, cf) => total + cf.Size);
|
||||
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
|
||||
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
|
||||
|
||||
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
|
||||
|
||||
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
|
||||
|
||||
public virtual void Dispose()
|
||||
{
|
||||
if (!disposed)
|
||||
{
|
||||
lazyVolumes.ForEach(v => v.Dispose());
|
||||
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
|
||||
if (SrcStream != null)
|
||||
SrcStream.Dispose();
|
||||
disposed = true;
|
||||
}
|
||||
}
|
||||
|
||||
void IArchiveExtractionListener.EnsureEntriesLoaded()
|
||||
{
|
||||
lazyEntries.EnsureFullyLoaded();
|
||||
lazyVolumes.EnsureFullyLoaded();
|
||||
}
|
||||
|
||||
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
|
||||
{
|
||||
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs(
|
||||
void IExtractionListener.FireCompressedBytesRead(
|
||||
long currentPartCompressedBytes,
|
||||
long compressedReadBytes
|
||||
) =>
|
||||
CompressedBytesRead?.Invoke(
|
||||
this,
|
||||
new CompressedBytesReadEventArgs(
|
||||
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
|
||||
compressedBytesRead: compressedReadBytes
|
||||
));
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
|
||||
{
|
||||
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs(
|
||||
void IExtractionListener.FireFilePartExtractionBegin(
|
||||
string name,
|
||||
long size,
|
||||
long compressedSize
|
||||
) =>
|
||||
FilePartExtractionBegin?.Invoke(
|
||||
this,
|
||||
new FilePartExtractionBeginEventArgs(
|
||||
compressedSize: compressedSize,
|
||||
size: size,
|
||||
name: name
|
||||
));
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Use this method to extract all entries in an archive in order.
|
||||
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
|
||||
/// extracted sequentially for the best performance.
|
||||
///
|
||||
/// This method will load all entry information from the archive.
|
||||
///
|
||||
/// WARNING: this will reuse the underlying stream for the archive. Errors may
|
||||
/// occur if this is used at the same time as other extraction methods on this instance.
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public IReader ExtractAllEntries()
|
||||
/// <summary>
|
||||
/// Use this method to extract all entries in an archive in order.
|
||||
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
|
||||
/// extracted sequentially for the best performance.
|
||||
///
|
||||
/// This method will load all entry information from the archive.
|
||||
///
|
||||
/// WARNING: this will reuse the underlying stream for the archive. Errors may
|
||||
/// occur if this is used at the same time as other extraction methods on this instance.
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public IReader ExtractAllEntries()
|
||||
{
|
||||
if (!IsSolid && Type != ArchiveType.SevenZip)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
|
||||
);
|
||||
}
|
||||
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
|
||||
return CreateReaderForSolidExtraction();
|
||||
}
|
||||
|
||||
protected abstract IReader CreateReaderForSolidExtraction();
|
||||
|
||||
/// <summary>
|
||||
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
|
||||
/// </summary>
|
||||
public virtual bool IsSolid => false;
|
||||
|
||||
/// <summary>
|
||||
/// Archive is ENCRYPTED (this means the Archive has password-protected files).
|
||||
/// </summary>
|
||||
public virtual bool IsEncrypted => false;
|
||||
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
|
||||
/// </summary>
|
||||
public bool IsComplete
|
||||
{
|
||||
get
|
||||
{
|
||||
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
|
||||
return CreateReaderForSolidExtraction();
|
||||
}
|
||||
|
||||
protected abstract IReader CreateReaderForSolidExtraction();
|
||||
|
||||
/// <summary>
|
||||
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
|
||||
/// </summary>
|
||||
public virtual bool IsSolid => false;
|
||||
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
|
||||
/// </summary>
|
||||
public bool IsComplete
|
||||
{
|
||||
get
|
||||
{
|
||||
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
|
||||
return Entries.All(x => x.IsComplete);
|
||||
}
|
||||
return Entries.All(x => x.IsComplete);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,165 +2,225 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
: AbstractArchive<TEntry, TVolume>,
|
||||
IWritableArchive
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
public abstract class AbstractWritableArchive<TEntry, TVolume> : AbstractArchive<TEntry, TVolume>, IWritableArchive
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
private class RebuildPauseDisposable : IDisposable
|
||||
{
|
||||
private class RebuildPauseDisposable : IDisposable
|
||||
{
|
||||
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
|
||||
|
||||
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
|
||||
{
|
||||
this.archive = archive;
|
||||
archive.pauseRebuilding = true;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
archive.pauseRebuilding = false;
|
||||
archive.RebuildModifiedCollection();
|
||||
}
|
||||
}
|
||||
private readonly List<TEntry> newEntries = new List<TEntry>();
|
||||
private readonly List<TEntry> removedEntries = new List<TEntry>();
|
||||
|
||||
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
|
||||
private bool hasModifications;
|
||||
private bool pauseRebuilding;
|
||||
|
||||
internal AbstractWritableArchive(ArchiveType type)
|
||||
: base(type)
|
||||
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
|
||||
|
||||
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
|
||||
{
|
||||
this.archive = archive;
|
||||
archive.pauseRebuilding = true;
|
||||
}
|
||||
|
||||
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
|
||||
: base(type, srcStream)
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
|
||||
public override ICollection<TEntry> Entries
|
||||
{
|
||||
get
|
||||
{
|
||||
if (hasModifications)
|
||||
{
|
||||
return modifiedEntries;
|
||||
}
|
||||
return base.Entries;
|
||||
}
|
||||
}
|
||||
|
||||
public IDisposable PauseEntryRebuilding()
|
||||
{
|
||||
return new RebuildPauseDisposable(this);
|
||||
}
|
||||
|
||||
private void RebuildModifiedCollection()
|
||||
{
|
||||
if (pauseRebuilding)
|
||||
{
|
||||
return;
|
||||
}
|
||||
hasModifications = true;
|
||||
newEntries.RemoveAll(v => removedEntries.Contains(v));
|
||||
modifiedEntries.Clear();
|
||||
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
|
||||
}
|
||||
|
||||
private IEnumerable<TEntry> OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
|
||||
|
||||
public void RemoveEntry(TEntry entry)
|
||||
{
|
||||
if (!removedEntries.Contains(entry))
|
||||
{
|
||||
removedEntries.Add(entry);
|
||||
RebuildModifiedCollection();
|
||||
}
|
||||
}
|
||||
|
||||
void IWritableArchive.RemoveEntry(IArchiveEntry entry)
|
||||
{
|
||||
RemoveEntry((TEntry)entry);
|
||||
}
|
||||
|
||||
public TEntry AddEntry(string key, Stream source,
|
||||
long size = 0, DateTime? modified = null)
|
||||
{
|
||||
return AddEntry(key, source, false, size, modified);
|
||||
}
|
||||
|
||||
IArchiveEntry IWritableArchive.AddEntry(string key, Stream source, bool closeStream, long size, DateTime? modified)
|
||||
{
|
||||
return AddEntry(key, source, closeStream, size, modified);
|
||||
}
|
||||
|
||||
public TEntry AddEntry(string key, Stream source, bool closeStream,
|
||||
long size = 0, DateTime? modified = null)
|
||||
{
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
{
|
||||
key = key.Substring(1);
|
||||
}
|
||||
if (DoesKeyMatchExisting(key))
|
||||
{
|
||||
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
|
||||
}
|
||||
var entry = CreateEntry(key, source, size, modified, closeStream);
|
||||
newEntries.Add(entry);
|
||||
RebuildModifiedCollection();
|
||||
return entry;
|
||||
}
|
||||
|
||||
private bool DoesKeyMatchExisting(string key)
|
||||
{
|
||||
foreach (var path in Entries.Select(x => x.Key))
|
||||
{
|
||||
var p = path.Replace('/', '\\');
|
||||
if (p.Length > 0 && p[0] == '\\')
|
||||
{
|
||||
p = p.Substring(1);
|
||||
}
|
||||
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public void SaveTo(Stream stream, WriterOptions options)
|
||||
{
|
||||
//reset streams of new entries
|
||||
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
|
||||
SaveTo(stream, options, OldEntries, newEntries);
|
||||
}
|
||||
|
||||
protected TEntry CreateEntry(string key, Stream source, long size, DateTime? modified,
|
||||
bool closeStream)
|
||||
{
|
||||
if (!source.CanRead || !source.CanSeek)
|
||||
{
|
||||
throw new ArgumentException("Streams must be readable and seekable to use the Writing Archive API");
|
||||
}
|
||||
return CreateEntryInternal(key, source, size, modified, closeStream);
|
||||
}
|
||||
|
||||
protected abstract TEntry CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
|
||||
bool closeStream);
|
||||
|
||||
protected abstract void SaveTo(Stream stream, WriterOptions options, IEnumerable<TEntry> oldEntries, IEnumerable<TEntry> newEntries);
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
base.Dispose();
|
||||
newEntries.Cast<Entry>().ForEach(x => x.Close());
|
||||
removedEntries.Cast<Entry>().ForEach(x => x.Close());
|
||||
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
|
||||
archive.pauseRebuilding = false;
|
||||
archive.RebuildModifiedCollection();
|
||||
}
|
||||
}
|
||||
|
||||
private readonly List<TEntry> newEntries = new();
|
||||
private readonly List<TEntry> removedEntries = new();
|
||||
|
||||
private readonly List<TEntry> modifiedEntries = new();
|
||||
private bool hasModifications;
|
||||
private bool pauseRebuilding;
|
||||
|
||||
internal AbstractWritableArchive(ArchiveType type)
|
||||
: base(type) { }
|
||||
|
||||
internal AbstractWritableArchive(ArchiveType type, SourceStream sourceStream)
|
||||
: base(type, sourceStream) { }
|
||||
|
||||
public override ICollection<TEntry> Entries
|
||||
{
|
||||
get
|
||||
{
|
||||
if (hasModifications)
|
||||
{
|
||||
return modifiedEntries;
|
||||
}
|
||||
return base.Entries;
|
||||
}
|
||||
}
|
||||
|
||||
public IDisposable PauseEntryRebuilding() => new RebuildPauseDisposable(this);
|
||||
|
||||
private void RebuildModifiedCollection()
|
||||
{
|
||||
if (pauseRebuilding)
|
||||
{
|
||||
return;
|
||||
}
|
||||
hasModifications = true;
|
||||
newEntries.RemoveAll(v => removedEntries.Contains(v));
|
||||
modifiedEntries.Clear();
|
||||
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
|
||||
}
|
||||
|
||||
private IEnumerable<TEntry> OldEntries => base.Entries.Where(x => !removedEntries.Contains(x));
|
||||
|
||||
public void RemoveEntry(TEntry entry)
|
||||
{
|
||||
if (!removedEntries.Contains(entry))
|
||||
{
|
||||
removedEntries.Add(entry);
|
||||
RebuildModifiedCollection();
|
||||
}
|
||||
}
|
||||
|
||||
void IWritableArchive.RemoveEntry(IArchiveEntry entry) => RemoveEntry((TEntry)entry);
|
||||
|
||||
public TEntry AddEntry(string key, Stream source, long size = 0, DateTime? modified = null) =>
|
||||
AddEntry(key, source, false, size, modified);
|
||||
|
||||
IArchiveEntry IWritableArchive.AddEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
bool closeStream,
|
||||
long size,
|
||||
DateTime? modified
|
||||
) => AddEntry(key, source, closeStream, size, modified);
|
||||
|
||||
IArchiveEntry IWritableArchive.AddDirectoryEntry(string key, DateTime? modified) =>
|
||||
AddDirectoryEntry(key, modified);
|
||||
|
||||
public TEntry AddEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
bool closeStream,
|
||||
long size = 0,
|
||||
DateTime? modified = null
|
||||
)
|
||||
{
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
{
|
||||
key = key.Substring(1);
|
||||
}
|
||||
if (DoesKeyMatchExisting(key))
|
||||
{
|
||||
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
|
||||
}
|
||||
var entry = CreateEntry(key, source, size, modified, closeStream);
|
||||
newEntries.Add(entry);
|
||||
RebuildModifiedCollection();
|
||||
return entry;
|
||||
}
|
||||
|
||||
private bool DoesKeyMatchExisting(string key)
|
||||
{
|
||||
foreach (var path in Entries.Select(x => x.Key))
|
||||
{
|
||||
if (path is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
var p = path.Replace('/', '\\');
|
||||
if (p.Length > 0 && p[0] == '\\')
|
||||
{
|
||||
p = p.Substring(1);
|
||||
}
|
||||
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public TEntry AddDirectoryEntry(string key, DateTime? modified = null)
|
||||
{
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
{
|
||||
key = key.Substring(1);
|
||||
}
|
||||
if (DoesKeyMatchExisting(key))
|
||||
{
|
||||
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
|
||||
}
|
||||
var entry = CreateDirectoryEntry(key, modified);
|
||||
newEntries.Add(entry);
|
||||
RebuildModifiedCollection();
|
||||
return entry;
|
||||
}
|
||||
|
||||
public void SaveTo(Stream stream, WriterOptions options)
|
||||
{
|
||||
//reset streams of new entries
|
||||
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
|
||||
SaveTo(stream, options, OldEntries, newEntries);
|
||||
}
|
||||
|
||||
public async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
//reset streams of new entries
|
||||
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
|
||||
await SaveToAsync(stream, options, OldEntries, newEntries, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
protected TEntry CreateEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
long size,
|
||||
DateTime? modified,
|
||||
bool closeStream
|
||||
)
|
||||
{
|
||||
if (!source.CanRead || !source.CanSeek)
|
||||
{
|
||||
throw new ArchiveException(
|
||||
"Streams must be readable and seekable to use the Writing Archive API"
|
||||
);
|
||||
}
|
||||
return CreateEntryInternal(key, source, size, modified, closeStream);
|
||||
}
|
||||
|
||||
protected abstract TEntry CreateEntryInternal(
|
||||
string key,
|
||||
Stream source,
|
||||
long size,
|
||||
DateTime? modified,
|
||||
bool closeStream
|
||||
);
|
||||
|
||||
protected abstract TEntry CreateDirectoryEntry(string key, DateTime? modified);
|
||||
|
||||
protected abstract void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<TEntry> oldEntries,
|
||||
IEnumerable<TEntry> newEntries
|
||||
);
|
||||
|
||||
protected abstract Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<TEntry> oldEntries,
|
||||
IEnumerable<TEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
base.Dispose();
|
||||
newEntries.Cast<Entry>().ForEach(x => x.Close());
|
||||
removedEntries.Cast<Entry>().ForEach(x => x.Close());
|
||||
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,295 +2,371 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Archives.GZip;
|
||||
using SharpCompress.Archives.Rar;
|
||||
using SharpCompress.Archives.SevenZip;
|
||||
using SharpCompress.Archives.Tar;
|
||||
using SharpCompress.Archives.Zip;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.Compressors;
|
||||
using SharpCompress.Compressors.BZip2;
|
||||
using SharpCompress.Compressors.LZMA;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class ArchiveFactory
|
||||
{
|
||||
public static class ArchiveFactory
|
||||
/// <summary>
|
||||
/// Opens an Archive for random access
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
/// <returns></returns>
|
||||
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
/// <summary>
|
||||
/// Opens an Archive for random access
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
/// <returns></returns>
|
||||
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
readerOptions ??= new ReaderOptions();
|
||||
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
|
||||
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive Create(ArchiveType type)
|
||||
{
|
||||
var factory = Factory
|
||||
.Factories.OfType<IWriteableArchiveFactory>()
|
||||
.FirstOrDefault(item => item.KnownArchiveType == type);
|
||||
|
||||
if (factory != null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
if (!stream.CanRead || !stream.CanSeek)
|
||||
{
|
||||
throw new ArgumentException("Stream should be readable and seekable");
|
||||
}
|
||||
readerOptions ??= new ReaderOptions();
|
||||
|
||||
ArchiveType? type;
|
||||
IsArchive(stream, out type); //test and reset stream position
|
||||
|
||||
if (type != null)
|
||||
{
|
||||
switch (type.Value)
|
||||
{
|
||||
case ArchiveType.Zip:
|
||||
return ZipArchive.Open(stream, readerOptions);
|
||||
case ArchiveType.SevenZip:
|
||||
return SevenZipArchive.Open(stream, readerOptions);
|
||||
case ArchiveType.GZip:
|
||||
return GZipArchive.Open(stream, readerOptions);
|
||||
case ArchiveType.Rar:
|
||||
return RarArchive.Open(stream, readerOptions);
|
||||
case ArchiveType.Tar:
|
||||
return TarArchive.Open(stream, readerOptions);
|
||||
}
|
||||
}
|
||||
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
|
||||
return factory.CreateWriteableArchive();
|
||||
}
|
||||
|
||||
public static IWritableArchive Create(ArchiveType type)
|
||||
throw new NotSupportedException("Cannot create Archives of type: " + type);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable FileInfo objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var filesArray = fileInfos.ToArray();
|
||||
if (filesArray.Length == 0)
|
||||
{
|
||||
return type switch
|
||||
{
|
||||
ArchiveType.Zip => ZipArchive.Create(),
|
||||
ArchiveType.Tar => TarArchive.Create(),
|
||||
ArchiveType.GZip => GZipArchive.Create(),
|
||||
_ => throw new NotSupportedException("Cannot create Archives of type: " + type)
|
||||
};
|
||||
throw new InvalidOperationException("No files to open");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(string filePath, ReaderOptions? options = null)
|
||||
var fileInfo = filesArray[0];
|
||||
if (filesArray.Length == 1)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), options);
|
||||
return Open(fileInfo, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable FileInfo objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var streamsArray = streams.ToArray();
|
||||
if (streamsArray.Length == 0)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
ArchiveType? type;
|
||||
using (Stream stream = fileInfo.OpenRead())
|
||||
{
|
||||
IsArchive(stream, out type); //test and reset stream position
|
||||
|
||||
if (type != null)
|
||||
{
|
||||
switch (type.Value)
|
||||
{
|
||||
case ArchiveType.Zip:
|
||||
return ZipArchive.Open(fileInfo, options);
|
||||
case ArchiveType.SevenZip:
|
||||
return SevenZipArchive.Open(fileInfo, options);
|
||||
case ArchiveType.GZip:
|
||||
return GZipArchive.Open(fileInfo, options);
|
||||
case ArchiveType.Rar:
|
||||
return RarArchive.Open(fileInfo, options);
|
||||
case ArchiveType.Tar:
|
||||
return TarArchive.Open(fileInfo, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
|
||||
throw new InvalidOperationException("No streams");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable FileInfo objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
|
||||
var firstStream = streamsArray[0];
|
||||
if (streamsArray.Length == 1)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
FileInfo[] files = fileInfos.ToArray();
|
||||
if (files.Length == 0)
|
||||
throw new InvalidOperationException("No files to open");
|
||||
FileInfo fileInfo = files[0];
|
||||
if (files.Length == 1)
|
||||
return Open(fileInfo, options);
|
||||
|
||||
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
ArchiveType? type;
|
||||
using (Stream stream = fileInfo.OpenRead())
|
||||
IsArchive(stream, out type); //test and reset stream position
|
||||
|
||||
if (type != null)
|
||||
{
|
||||
switch (type.Value)
|
||||
{
|
||||
case ArchiveType.Zip:
|
||||
return ZipArchive.Open(files, options);
|
||||
case ArchiveType.SevenZip:
|
||||
return SevenZipArchive.Open(files, options);
|
||||
case ArchiveType.GZip:
|
||||
return GZipArchive.Open(files, options);
|
||||
case ArchiveType.Rar:
|
||||
return RarArchive.Open(files, options);
|
||||
case ArchiveType.Tar:
|
||||
return TarArchive.Open(files, options);
|
||||
}
|
||||
}
|
||||
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
|
||||
return Open(firstStream, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable FileInfo objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
|
||||
firstStream.NotNull(nameof(firstStream));
|
||||
options ??= new ReaderOptions();
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(
|
||||
string sourceArchive,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null
|
||||
)
|
||||
{
|
||||
using var archive = Open(sourceArchive);
|
||||
archive.WriteToDirectory(destinationDirectory, options);
|
||||
}
|
||||
|
||||
private static T FindFactory<T>(FileInfo finfo)
|
||||
where T : IFactory
|
||||
{
|
||||
finfo.NotNull(nameof(finfo));
|
||||
using Stream stream = finfo.OpenRead();
|
||||
return FindFactory<T>(stream, finfo.Name);
|
||||
}
|
||||
|
||||
private static T FindFactory<T>(Stream stream, string? fileName = null)
|
||||
where T : IFactory
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
if (!stream.CanRead || !stream.CanSeek)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
if (streams.Count() == 0)
|
||||
throw new InvalidOperationException("No streams");
|
||||
if (streams.Count() == 1)
|
||||
return Open(streams.First(), options);
|
||||
|
||||
|
||||
options ??= new ReaderOptions();
|
||||
|
||||
ArchiveType? type;
|
||||
using (Stream stream = streams.First())
|
||||
IsArchive(stream, out type); //test and reset stream position
|
||||
|
||||
if (type != null)
|
||||
{
|
||||
switch (type.Value)
|
||||
{
|
||||
case ArchiveType.Zip:
|
||||
return ZipArchive.Open(streams, options);
|
||||
case ArchiveType.SevenZip:
|
||||
return SevenZipArchive.Open(streams, options);
|
||||
case ArchiveType.GZip:
|
||||
return GZipArchive.Open(streams, options);
|
||||
case ArchiveType.Rar:
|
||||
return RarArchive.Open(streams, options);
|
||||
case ArchiveType.Tar:
|
||||
return TarArchive.Open(streams, options);
|
||||
}
|
||||
}
|
||||
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
|
||||
throw new ArgumentException("Stream should be readable and seekable");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
|
||||
ExtractionOptions? options = null)
|
||||
var factories = Factory.Factories.OfType<T>();
|
||||
|
||||
var startPosition = stream.Position;
|
||||
|
||||
foreach (var factory in factories)
|
||||
{
|
||||
using IArchive archive = Open(sourceArchive);
|
||||
foreach (IArchiveEntry entry in archive.Entries)
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
if (factory.IsArchive(stream))
|
||||
{
|
||||
entry.WriteToDirectory(destinationDirectory, options);
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
return factory;
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsArchive(string filePath, out ArchiveType? type)
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
// Check if this is a compressed tar file (tar.bz2, tar.lz, etc.)
|
||||
// These formats are supported by ReaderFactory but not by ArchiveFactory
|
||||
var compressedTarMessage = TryGetCompressedTarMessage(stream, fileName);
|
||||
if (compressedTarMessage != null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
using (Stream s = File.OpenRead(filePath))
|
||||
return IsArchive(s, out type);
|
||||
throw new InvalidOperationException(compressedTarMessage);
|
||||
}
|
||||
|
||||
private static bool IsArchive(Stream stream, out ArchiveType? type)
|
||||
var extensions = string.Join(", ", factories.Select(item => item.Name));
|
||||
|
||||
throw new InvalidOperationException(
|
||||
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsArchive(
|
||||
string filePath,
|
||||
out ArchiveType? type,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
using Stream s = File.OpenRead(filePath);
|
||||
return IsArchive(s, out type, bufferSize);
|
||||
}
|
||||
|
||||
public static bool IsArchive(
|
||||
Stream stream,
|
||||
out ArchiveType? type,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
type = null;
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (!stream.CanRead || !stream.CanSeek)
|
||||
{
|
||||
type = null;
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
|
||||
if (!stream.CanRead || !stream.CanSeek)
|
||||
{
|
||||
throw new ArgumentException("Stream should be readable and seekable");
|
||||
}
|
||||
if (ZipArchive.IsZipFile(stream, null))
|
||||
type = ArchiveType.Zip;
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
if (type == null)
|
||||
{
|
||||
if (SevenZipArchive.IsSevenZipFile(stream))
|
||||
type = ArchiveType.SevenZip;
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
}
|
||||
if (type == null)
|
||||
{
|
||||
if (GZipArchive.IsGZipFile(stream))
|
||||
type = ArchiveType.GZip;
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
}
|
||||
if (type == null)
|
||||
{
|
||||
if (RarArchive.IsRarFile(stream))
|
||||
type = ArchiveType.Rar;
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
}
|
||||
if (type == null)
|
||||
{
|
||||
if (TarArchive.IsTarFile(stream))
|
||||
type = ArchiveType.Tar;
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
}
|
||||
if (type == null) //test multipartzip as it could find zips in other non compressed archive types?
|
||||
{
|
||||
if (ZipArchive.IsZipMulti(stream)) //test the zip (last) file of a multipart zip
|
||||
type = ArchiveType.Zip;
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
return type != null;
|
||||
throw new ArgumentException("Stream should be readable and seekable");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
|
||||
/// </summary>
|
||||
/// <param name="part1"></param>
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<string> GetFileParts(string part1)
|
||||
var startPosition = stream.Position;
|
||||
|
||||
foreach (var factory in Factory.Factories)
|
||||
{
|
||||
part1.CheckNotNullOrEmpty(nameof(part1));
|
||||
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
|
||||
var isArchive = factory.IsArchive(stream);
|
||||
stream.Position = startPosition;
|
||||
|
||||
if (isArchive)
|
||||
{
|
||||
type = factory.KnownArchiveType;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
|
||||
/// </summary>
|
||||
/// <param name="part1"></param>
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
|
||||
{
|
||||
part1.CheckNotNull(nameof(part1));
|
||||
yield return part1;
|
||||
int i = 1;
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
|
||||
/// </summary>
|
||||
/// <param name="part1"></param>
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<string> GetFileParts(string part1)
|
||||
{
|
||||
part1.NotNullOrEmpty(nameof(part1));
|
||||
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
|
||||
/// </summary>
|
||||
/// <param name="part1"></param>
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
|
||||
{
|
||||
part1.NotNull(nameof(part1));
|
||||
yield return part1;
|
||||
|
||||
foreach (var factory in Factory.Factories.OfType<IFactory>())
|
||||
{
|
||||
var i = 1;
|
||||
var part = factory.GetFilePart(i++, part1);
|
||||
|
||||
FileInfo? part = RarArchiveVolumeFactory.GetFilePart(i++, part1);
|
||||
if (part != null)
|
||||
{
|
||||
yield return part;
|
||||
while ((part = RarArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
|
||||
yield return part;
|
||||
}
|
||||
else
|
||||
{
|
||||
i = 1;
|
||||
while ((part = ZipArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
|
||||
while ((part = factory.GetFilePart(i++, part1)) != null) //tests split too
|
||||
{
|
||||
yield return part;
|
||||
}
|
||||
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the stream is a compressed tar file (tar.bz2, tar.lz, etc.) that should use ReaderFactory instead.
|
||||
/// Returns an error message if detected, null otherwise.
|
||||
/// </summary>
|
||||
private static string? TryGetCompressedTarMessage(Stream stream, string? fileName)
|
||||
{
|
||||
var startPosition = stream.Position;
|
||||
try
|
||||
{
|
||||
// Check if it's a BZip2 file
|
||||
if (BZip2Stream.IsBZip2(stream))
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
// Try to decompress and check if it contains a tar archive
|
||||
using var decompressed = new BZip2Stream(stream, CompressionMode.Decompress, true);
|
||||
if (IsTarStream(decompressed))
|
||||
{
|
||||
return "This appears to be a tar.bz2 archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
|
||||
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
|
||||
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
// Check if it's an LZip file
|
||||
if (LZipStream.IsLZipFile(stream))
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
// Try to decompress and check if it contains a tar archive
|
||||
using var decompressed = new LZipStream(stream, CompressionMode.Decompress);
|
||||
if (IsTarStream(decompressed))
|
||||
{
|
||||
return "This appears to be a tar.lz archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
|
||||
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
|
||||
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check file extension as a fallback for other compressed tar formats
|
||||
if (fileName != null)
|
||||
{
|
||||
var lowerFileName = fileName.ToLowerInvariant();
|
||||
if (
|
||||
lowerFileName.EndsWith(".tar.bz2")
|
||||
|| lowerFileName.EndsWith(".tbz")
|
||||
|| lowerFileName.EndsWith(".tbz2")
|
||||
|| lowerFileName.EndsWith(".tb2")
|
||||
|| lowerFileName.EndsWith(".tz2")
|
||||
|| lowerFileName.EndsWith(".tar.lz")
|
||||
|| lowerFileName.EndsWith(".tar.xz")
|
||||
|| lowerFileName.EndsWith(".txz")
|
||||
|| lowerFileName.EndsWith(".tar.zst")
|
||||
|| lowerFileName.EndsWith(".tar.zstd")
|
||||
|| lowerFileName.EndsWith(".tzst")
|
||||
|| lowerFileName.EndsWith(".tzstd")
|
||||
|| lowerFileName.EndsWith(".tar.z")
|
||||
|| lowerFileName.EndsWith(".tz")
|
||||
|| lowerFileName.EndsWith(".taz")
|
||||
)
|
||||
{
|
||||
return $"The file '{fileName}' appears to be a compressed tar archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
|
||||
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
|
||||
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// If we can't determine, just return null and let the normal error handling proceed
|
||||
return null;
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore seek failures
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a stream contains a tar archive by trying to read a tar header.
|
||||
/// </summary>
|
||||
private static bool IsTarStream(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
return tarHeader.Read(new BinaryReader(stream));
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +1,30 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Readers;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
internal abstract class ArchiveVolumeFactory
|
||||
{
|
||||
internal abstract class ArchiveVolumeFactory
|
||||
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
|
||||
{
|
||||
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
|
||||
{
|
||||
FileInfo? item = null;
|
||||
FileInfo? item = null;
|
||||
|
||||
//split 001, 002 ...
|
||||
Match m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
|
||||
|
||||
if (item != null && item.Exists)
|
||||
return item;
|
||||
return null;
|
||||
}
|
||||
//split 001, 002 ...
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
String.Concat(
|
||||
m.Groups[1].Value,
|
||||
(index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0')
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
if (item != null && item.Exists)
|
||||
return item;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
30
src/SharpCompress/Archives/AutoArchiveFactory.cs
Normal file
30
src/SharpCompress/Archives/AutoArchiveFactory.cs
Normal file
@@ -0,0 +1,30 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
class AutoArchiveFactory : IArchiveFactory
|
||||
{
|
||||
public string Name => nameof(AutoArchiveFactory);
|
||||
|
||||
public ArchiveType? KnownArchiveType => null;
|
||||
|
||||
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
|
||||
|
||||
public bool IsArchive(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
) => throw new NotSupportedException();
|
||||
|
||||
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
|
||||
|
||||
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(stream, readerOptions);
|
||||
|
||||
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(fileInfo, readerOptions);
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.IO;
|
||||
@@ -10,182 +12,242 @@ using SharpCompress.Readers.GZip;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
{
|
||||
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
return new GZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static GZipArchive Create() => new();
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private GZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.GZip, sourceStream) { }
|
||||
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
sourceStream.LoadAllParts();
|
||||
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsGZipFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
return new GZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsGZipFile(stream);
|
||||
}
|
||||
|
||||
public void SaveTo(string filePath) => SaveTo(new FileInfo(filePath));
|
||||
|
||||
public void SaveTo(FileInfo fileInfo)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
SaveTo(stream, new WriterOptions(CompressionType.GZip));
|
||||
}
|
||||
|
||||
public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
|
||||
SaveToAsync(new FileInfo(filePath), cancellationToken);
|
||||
|
||||
public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(Stream stream)
|
||||
{
|
||||
// read the header on the first read
|
||||
Span<byte> header = stackalloc byte[10];
|
||||
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (!stream.ReadFully(header))
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
FileInfo[] files = fileInfos.ToArray();
|
||||
return new GZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
Stream[] strms = streams.ToArray();
|
||||
return new GZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
return true;
|
||||
}
|
||||
|
||||
internal GZipArchive()
|
||||
: base(ArchiveType.GZip) { }
|
||||
|
||||
protected override GZipArchiveEntry CreateEntryInternal(
|
||||
string filePath,
|
||||
Stream source,
|
||||
long size,
|
||||
DateTime? modified,
|
||||
bool closeStream
|
||||
)
|
||||
{
|
||||
if (Entries.Any())
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new GZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
}
|
||||
|
||||
public static GZipArchive Create()
|
||||
protected override GZipArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => throw new NotSupportedException("GZip archives do not support directory entries.");
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IEnumerable<GZipArchiveEntry> newEntries
|
||||
)
|
||||
{
|
||||
if (Entries.Count > 1)
|
||||
{
|
||||
return new GZipArchive();
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal GZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Tar, srcStream)
|
||||
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
}
|
||||
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
{
|
||||
srcStream.LoadAllParts();
|
||||
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions));
|
||||
}
|
||||
public static bool IsGZipFile(string filePath)
|
||||
{
|
||||
return IsGZipFile(new FileInfo(filePath));
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsGZipFile(stream);
|
||||
}
|
||||
|
||||
public void SaveTo(string filePath)
|
||||
{
|
||||
SaveTo(new FileInfo(filePath));
|
||||
}
|
||||
|
||||
public void SaveTo(FileInfo fileInfo)
|
||||
{
|
||||
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
|
||||
{
|
||||
SaveTo(stream, new WriterOptions(CompressionType.GZip));
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(Stream stream)
|
||||
{
|
||||
// read the header on the first read
|
||||
Span<byte> header = stackalloc byte[10];
|
||||
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (!stream.ReadFully(header))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
internal GZipArchive()
|
||||
: base(ArchiveType.GZip)
|
||||
{
|
||||
}
|
||||
|
||||
protected override GZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
|
||||
bool closeStream)
|
||||
{
|
||||
if (Entries.Any())
|
||||
{
|
||||
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
}
|
||||
|
||||
protected override void SaveTo(Stream stream, WriterOptions options,
|
||||
IEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IEnumerable<GZipArchiveEntry> newEntries)
|
||||
{
|
||||
if (Entries.Count > 1)
|
||||
{
|
||||
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using (var writer = new GZipWriter(stream, new GZipWriterOptions(options)))
|
||||
{
|
||||
foreach (var entry in oldEntries.Concat(newEntries)
|
||||
.Where(x => !x.IsDirectory))
|
||||
{
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
|
||||
{
|
||||
Stream stream = volumes.Single().Stream;
|
||||
yield return new GZipArchiveEntry(this, new GZipFilePart(stream, ReaderOptions.ArchiveEncoding));
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return GZipReader.Open(stream);
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IEnumerable<GZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (Entries.Count > 1)
|
||||
{
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
yield return new GZipArchiveEntry(
|
||||
this,
|
||||
new GZipFilePart(stream, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return GZipReader.Open(stream);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +1,39 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
|
||||
{
|
||||
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
|
||||
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
|
||||
: base(part) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream()
|
||||
{
|
||||
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
|
||||
: base(part)
|
||||
//this is to reset the stream to be read multiple times
|
||||
var part = (GZipFilePart)Parts.Single();
|
||||
var rawStream = part.GetRawStream();
|
||||
if (rawStream.CanSeek && rawStream.Position != part.EntryStartPosition)
|
||||
{
|
||||
Archive = archive;
|
||||
rawStream.Position = part.EntryStartPosition;
|
||||
}
|
||||
|
||||
public virtual Stream OpenEntryStream()
|
||||
{
|
||||
//this is to reset the stream to be read multiple times
|
||||
var part = (GZipFilePart)Parts.Single();
|
||||
if (part.GetRawStream().Position != part.EntryStartPosition)
|
||||
{
|
||||
part.GetRawStream().Position = part.EntryStartPosition;
|
||||
}
|
||||
return Parts.Single().GetCompressedStream();
|
||||
}
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
public IArchive Archive { get; }
|
||||
|
||||
public bool IsComplete => true;
|
||||
|
||||
#endregion
|
||||
return Parts.Single().GetCompressedStream().NotNull();
|
||||
}
|
||||
}
|
||||
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
// GZip synchronous implementation is fast enough, just wrap it
|
||||
return Task.FromResult(OpenEntryStream());
|
||||
}
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
public IArchive Archive { get; }
|
||||
|
||||
public bool IsComplete => true;
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
@@ -1,68 +1,71 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Archives.GZip
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
|
||||
{
|
||||
internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
|
||||
internal GZipWritableArchiveEntry(
|
||||
GZipArchive archive,
|
||||
Stream stream,
|
||||
string path,
|
||||
long size,
|
||||
DateTime? lastModified,
|
||||
bool closeStream
|
||||
)
|
||||
: base(archive, null)
|
||||
{
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
this.stream = stream;
|
||||
Key = path;
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
}
|
||||
|
||||
internal GZipWritableArchiveEntry(GZipArchive archive, Stream stream,
|
||||
string path, long size, DateTime? lastModified, bool closeStream)
|
||||
: base(archive, null)
|
||||
public override long Crc => 0;
|
||||
|
||||
public override string? Key { get; }
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size { get; }
|
||||
|
||||
public override DateTime? LastModifiedTime { get; }
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
{
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream)
|
||||
{
|
||||
this.stream = stream;
|
||||
Key = path;
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
}
|
||||
|
||||
public override long Crc => 0;
|
||||
|
||||
public override string Key { get; }
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size { get; }
|
||||
|
||||
public override DateTime? LastModifiedTime { get; }
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
{
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return new NonDisposingStream(stream);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream)
|
||||
{
|
||||
stream.Dispose();
|
||||
}
|
||||
stream.Dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,49 +1,48 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IArchive : IDisposable
|
||||
{
|
||||
public interface IArchive : IDisposable
|
||||
{
|
||||
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
|
||||
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
|
||||
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
|
||||
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
|
||||
|
||||
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
|
||||
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
|
||||
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
|
||||
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
|
||||
|
||||
IEnumerable<IArchiveEntry> Entries { get; }
|
||||
IEnumerable<IVolume> Volumes { get; }
|
||||
IEnumerable<IArchiveEntry> Entries { get; }
|
||||
IEnumerable<IVolume> Volumes { get; }
|
||||
|
||||
ArchiveType Type { get; }
|
||||
ArchiveType Type { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Use this method to extract all entries in an archive in order.
|
||||
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
|
||||
/// extracted sequentially for the best performance.
|
||||
/// </summary>
|
||||
IReader ExtractAllEntries();
|
||||
/// <summary>
|
||||
/// Use this method to extract all entries in an archive in order.
|
||||
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
|
||||
/// extracted sequentially for the best performance.
|
||||
/// </summary>
|
||||
IReader ExtractAllEntries();
|
||||
|
||||
/// <summary>
|
||||
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
|
||||
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
|
||||
/// </summary>
|
||||
bool IsSolid { get; }
|
||||
/// <summary>
|
||||
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
|
||||
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
|
||||
/// </summary>
|
||||
bool IsSolid { get; }
|
||||
|
||||
/// <summary>
|
||||
/// This checks to see if all the known entries have IsComplete = true
|
||||
/// </summary>
|
||||
bool IsComplete { get; }
|
||||
/// <summary>
|
||||
/// This checks to see if all the known entries have IsComplete = true
|
||||
/// </summary>
|
||||
bool IsComplete { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files compressed in the archive.
|
||||
/// </summary>
|
||||
long TotalSize { get; }
|
||||
/// <summary>
|
||||
/// The total size of the files compressed in the archive.
|
||||
/// </summary>
|
||||
long TotalSize { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
long TotalUncompressSize { get; }
|
||||
}
|
||||
}
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
long TotalUncompressSize { get; }
|
||||
}
|
||||
|
||||
@@ -1,24 +1,31 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IArchiveEntry : IEntry
|
||||
{
|
||||
public interface IArchiveEntry : IEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Opens the current entry as a stream that will decompress as it is read.
|
||||
/// Read the entire stream or use SkipEntry on EntryStream.
|
||||
/// </summary>
|
||||
Stream OpenEntryStream();
|
||||
/// <summary>
|
||||
/// Opens the current entry as a stream that will decompress as it is read.
|
||||
/// Read the entire stream or use SkipEntry on EntryStream.
|
||||
/// </summary>
|
||||
Stream OpenEntryStream();
|
||||
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to extract this entry.
|
||||
/// </summary>
|
||||
bool IsComplete { get; }
|
||||
/// <summary>
|
||||
/// Opens the current entry as a stream that will decompress as it is read asynchronously.
|
||||
/// Read the entire stream or use SkipEntry on EntryStream.
|
||||
/// </summary>
|
||||
Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// The archive instance this entry belongs to
|
||||
/// </summary>
|
||||
IArchive Archive { get; }
|
||||
}
|
||||
}
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to extract this entry.
|
||||
/// </summary>
|
||||
bool IsComplete { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The archive instance this entry belongs to
|
||||
/// </summary>
|
||||
IArchive Archive { get; }
|
||||
}
|
||||
|
||||
@@ -1,63 +1,134 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class IArchiveEntryExtensions
|
||||
{
|
||||
public static class IArchiveEntryExtensions
|
||||
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
|
||||
{
|
||||
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
|
||||
if (archiveEntry.IsDirectory)
|
||||
{
|
||||
if (archiveEntry.IsDirectory)
|
||||
{
|
||||
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
|
||||
}
|
||||
|
||||
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
|
||||
streamListener.EnsureEntriesLoaded();
|
||||
streamListener.FireEntryExtractionBegin(archiveEntry);
|
||||
streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
|
||||
var entryStream = archiveEntry.OpenEntryStream();
|
||||
if (entryStream is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
using (entryStream)
|
||||
{
|
||||
using (Stream s = new ListeningStream(streamListener, entryStream))
|
||||
{
|
||||
s.TransferTo(streamToWriteTo);
|
||||
}
|
||||
}
|
||||
streamListener.FireEntryExtractionEnd(archiveEntry);
|
||||
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
|
||||
ExtractionOptions? options = null)
|
||||
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
|
||||
streamListener.EnsureEntriesLoaded();
|
||||
streamListener.FireEntryExtractionBegin(archiveEntry);
|
||||
streamListener.FireFilePartExtractionBegin(
|
||||
archiveEntry.Key ?? "Key",
|
||||
archiveEntry.Size,
|
||||
archiveEntry.CompressedSize
|
||||
);
|
||||
var entryStream = archiveEntry.OpenEntryStream();
|
||||
using (entryStream)
|
||||
{
|
||||
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
|
||||
entry.WriteToFile);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file
|
||||
/// </summary>
|
||||
public static void WriteToFile(this IArchiveEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options = null)
|
||||
{
|
||||
|
||||
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
|
||||
(x, fm) =>
|
||||
{
|
||||
using (FileStream fs = File.Open(destinationFileName, fm))
|
||||
{
|
||||
entry.WriteTo(fs);
|
||||
}
|
||||
});
|
||||
using Stream s = new ListeningStream(streamListener, entryStream);
|
||||
s.CopyTo(streamToWriteTo);
|
||||
}
|
||||
streamListener.FireEntryExtractionEnd(archiveEntry);
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task WriteToAsync(
|
||||
this IArchiveEntry archiveEntry,
|
||||
Stream streamToWriteTo,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (archiveEntry.IsDirectory)
|
||||
{
|
||||
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
|
||||
}
|
||||
|
||||
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
|
||||
streamListener.EnsureEntriesLoaded();
|
||||
streamListener.FireEntryExtractionBegin(archiveEntry);
|
||||
streamListener.FireFilePartExtractionBegin(
|
||||
archiveEntry.Key ?? "Key",
|
||||
archiveEntry.Size,
|
||||
archiveEntry.CompressedSize
|
||||
);
|
||||
var entryStream = archiveEntry.OpenEntryStream();
|
||||
using (entryStream)
|
||||
{
|
||||
using Stream s = new ListeningStream(streamListener, entryStream);
|
||||
await s.CopyToAsync(streamToWriteTo, 81920, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
streamListener.FireEntryExtractionEnd(archiveEntry);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(
|
||||
this IArchiveEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToDirectory(
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
entry.WriteToFile
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory asynchronously, retaining filename
|
||||
/// </summary>
|
||||
public static Task WriteToDirectoryAsync(
|
||||
this IArchiveEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToDirectoryAsync(
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
(x, opt) => entry.WriteToFileAsync(x, opt, cancellationToken),
|
||||
cancellationToken
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file
|
||||
/// </summary>
|
||||
public static void WriteToFile(
|
||||
this IArchiveEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options = null
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToFile(
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
(x, fm) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
entry.WriteTo(fs);
|
||||
}
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file asynchronously
|
||||
/// </summary>
|
||||
public static Task WriteToFileAsync(
|
||||
this IArchiveEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToFileAsync(
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
async (x, fm) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
|
||||
},
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,20 +1,85 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class IArchiveExtensions
|
||||
{
|
||||
public static class IArchiveExtensions
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(
|
||||
this IArchive archive,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null
|
||||
)
|
||||
{
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
|
||||
ExtractionOptions? options = null)
|
||||
using var reader = archive.ExtractAllEntries();
|
||||
reader.WriteAllToDirectory(destinationDirectory, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts the archive to the destination directory. Directories will be created as needed.
|
||||
/// </summary>
|
||||
/// <param name="archive">The archive to extract.</param>
|
||||
/// <param name="destination">The folder to extract into.</param>
|
||||
/// <param name="progressReport">Optional progress report callback.</param>
|
||||
/// <param name="cancellationToken">Optional cancellation token.</param>
|
||||
public static void ExtractToDirectory(
|
||||
this IArchive archive,
|
||||
string destination,
|
||||
Action<double>? progressReport = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
// Prepare for progress reporting
|
||||
var totalBytes = archive.TotalUncompressSize;
|
||||
var bytesRead = 0L;
|
||||
|
||||
// Tracking for created directories.
|
||||
var seenDirectories = new HashSet<string>();
|
||||
|
||||
// Extract
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
entry.WriteToDirectory(destinationDirectory, options);
|
||||
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (
|
||||
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
|
||||
&& seenDirectories.Add(dirPath)
|
||||
)
|
||||
{
|
||||
Directory.CreateDirectory(emptyDirectory);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create each directory if not already created
|
||||
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (Path.GetDirectoryName(path) is { } directory)
|
||||
{
|
||||
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
seenDirectories.Add(directory);
|
||||
}
|
||||
}
|
||||
|
||||
// Write file
|
||||
using var fs = File.OpenWrite(path);
|
||||
entry.WriteTo(fs);
|
||||
|
||||
// Update progress
|
||||
bytesRead += entry.Size;
|
||||
progressReport?.Invoke(bytesRead / (double)totalBytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
internal interface IArchiveExtractionListener : IExtractionListener
|
||||
{
|
||||
internal interface IArchiveExtractionListener : IExtractionListener
|
||||
{
|
||||
void EnsureEntriesLoaded();
|
||||
void FireEntryExtractionBegin(IArchiveEntry entry);
|
||||
void FireEntryExtractionEnd(IArchiveEntry entry);
|
||||
}
|
||||
}
|
||||
void EnsureEntriesLoaded();
|
||||
void FireEntryExtractionBegin(IArchiveEntry entry);
|
||||
void FireEntryExtractionEnd(IArchiveEntry entry);
|
||||
}
|
||||
|
||||
35
src/SharpCompress/Archives/IArchiveFactory.cs
Normal file
35
src/SharpCompress/Archives/IArchiveFactory.cs
Normal file
@@ -0,0 +1,35 @@
|
||||
using System.IO;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a factory used to identify and open archives.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Currently implemented by:<br/>
|
||||
/// <list type="table">
|
||||
/// <item><see cref="TarFactory"/></item>
|
||||
/// <item><see cref="RarFactory"/></item>
|
||||
/// <item><see cref="ZipFactory"/></item>
|
||||
/// <item><see cref="GZipFactory"/></item>
|
||||
/// <item><see cref="SevenZipFactory"/></item>
|
||||
/// </list>
|
||||
/// </remarks>
|
||||
public interface IArchiveFactory : IFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Opens an Archive for random access.
|
||||
/// </summary>
|
||||
/// <param name="stream">An open, readable and seekable stream.</param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo">the file to open.</param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
|
||||
}
|
||||
36
src/SharpCompress/Archives/IMultiArchiveFactory.cs
Normal file
36
src/SharpCompress/Archives/IMultiArchiveFactory.cs
Normal file
@@ -0,0 +1,36 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a factory used to identify and open archives.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Implemented by:<br/>
|
||||
/// <list type="table">
|
||||
/// <item><see cref="TarFactory"/></item>
|
||||
/// <item><see cref="RarFactory"/></item>
|
||||
/// <item><see cref="ZipFactory"/></item>
|
||||
/// <item><see cref="GZipFactory"/></item>
|
||||
/// <item><see cref="SevenZipFactory"/></item>
|
||||
/// </list>
|
||||
/// </remarks>
|
||||
public interface IMultiArchiveFactory : IFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable FileInfo objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable Stream objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
|
||||
}
|
||||
@@ -1,21 +1,36 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IWritableArchive : IArchive
|
||||
{
|
||||
public interface IWritableArchive : IArchive
|
||||
{
|
||||
void RemoveEntry(IArchiveEntry entry);
|
||||
void RemoveEntry(IArchiveEntry entry);
|
||||
|
||||
IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
|
||||
IArchiveEntry AddEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
bool closeStream,
|
||||
long size = 0,
|
||||
DateTime? modified = null
|
||||
);
|
||||
|
||||
void SaveTo(Stream stream, WriterOptions options);
|
||||
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
|
||||
|
||||
/// <summary>
|
||||
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
|
||||
/// </summary>
|
||||
/// <returns>IDisposeable to resume entry rebuilding</returns>
|
||||
IDisposable PauseEntryRebuilding();
|
||||
}
|
||||
}
|
||||
void SaveTo(Stream stream, WriterOptions options);
|
||||
|
||||
Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
|
||||
/// </summary>
|
||||
/// <returns>IDisposeable to resume entry rebuilding</returns>
|
||||
IDisposable PauseEntryRebuilding();
|
||||
}
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
internal interface IWritableArchiveEntry
|
||||
{
|
||||
internal interface IWritableArchiveEntry
|
||||
{
|
||||
Stream Stream { get; }
|
||||
}
|
||||
}
|
||||
Stream Stream { get; }
|
||||
}
|
||||
|
||||
@@ -1,57 +1,106 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class IWritableArchiveExtensions
|
||||
{
|
||||
public static class IWritableArchiveExtensions
|
||||
public static void AddEntry(
|
||||
this IWritableArchive writableArchive,
|
||||
string entryPath,
|
||||
string filePath
|
||||
)
|
||||
{
|
||||
public static void AddEntry(this IWritableArchive writableArchive,
|
||||
string entryPath, string filePath)
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
throw new FileNotFoundException("Could not AddEntry: " + filePath);
|
||||
}
|
||||
writableArchive.AddEntry(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
|
||||
fileInfo.LastWriteTime);
|
||||
throw new FileNotFoundException("Could not AddEntry: " + filePath);
|
||||
}
|
||||
writableArchive.AddEntry(
|
||||
entryPath,
|
||||
new FileInfo(filePath).OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
|
||||
public static void SaveTo(this IWritableArchive writableArchive, string filePath, WriterOptions options)
|
||||
{
|
||||
writableArchive.SaveTo(new FileInfo(filePath), options);
|
||||
}
|
||||
public static void SaveTo(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
WriterOptions options
|
||||
) => writableArchive.SaveTo(new FileInfo(filePath), options);
|
||||
|
||||
public static void SaveTo(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options)
|
||||
{
|
||||
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
|
||||
{
|
||||
writableArchive.SaveTo(stream, options);
|
||||
}
|
||||
}
|
||||
public static void SaveTo(
|
||||
this IWritableArchive writableArchive,
|
||||
FileInfo fileInfo,
|
||||
WriterOptions options
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
writableArchive.SaveTo(stream, options);
|
||||
}
|
||||
|
||||
public static void AddAllFromDirectory(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
|
||||
public static Task SaveToAsync(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
) => writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
|
||||
|
||||
public static async Task SaveToAsync(
|
||||
this IWritableArchive writableArchive,
|
||||
FileInfo fileInfo,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await writableArchive.SaveToAsync(stream, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public static void AddAllFromDirectory(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
string searchPattern = "*.*",
|
||||
SearchOption searchOption = SearchOption.AllDirectories
|
||||
)
|
||||
{
|
||||
using (writableArchive.PauseEntryRebuilding())
|
||||
{
|
||||
using (writableArchive.PauseEntryRebuilding())
|
||||
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
|
||||
{
|
||||
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
|
||||
{
|
||||
var fileInfo = new FileInfo(path);
|
||||
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
|
||||
fileInfo.LastWriteTime);
|
||||
}
|
||||
var fileInfo = new FileInfo(path);
|
||||
writableArchive.AddEntry(
|
||||
path.Substring(filePath.Length),
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
}
|
||||
public static IArchiveEntry AddEntry(this IWritableArchive writableArchive, string key, FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
throw new ArgumentException("FileInfo does not exist.");
|
||||
}
|
||||
return writableArchive.AddEntry(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static IArchiveEntry AddEntry(
|
||||
this IWritableArchive writableArchive,
|
||||
string key,
|
||||
FileInfo fileInfo
|
||||
)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
throw new ArgumentException("FileInfo does not exist.");
|
||||
}
|
||||
return writableArchive.AddEntry(
|
||||
key,
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
20
src/SharpCompress/Archives/IWriteableArchiveFactory.cs
Normal file
20
src/SharpCompress/Archives/IWriteableArchiveFactory.cs
Normal file
@@ -0,0 +1,20 @@
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
/// <summary>
|
||||
/// Decorator for <see cref="Factories.Factory"/> used to declare an archive format as able to create writeable archives
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Implemented by:<br/>
|
||||
/// <list type="table">
|
||||
/// <item><see cref="Factories.TarFactory"/></item>
|
||||
/// <item><see cref="Factories.ZipFactory"/></item>
|
||||
/// <item><see cref="Factories.GZipFactory"/></item>
|
||||
/// </list>
|
||||
public interface IWriteableArchiveFactory : Factories.IFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a new, empty archive, ready to be written.
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
IWritableArchive CreateWriteableArchive();
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Common.Rar;
|
||||
@@ -6,39 +7,33 @@ using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
/// <summary>
|
||||
/// A rar part based on a FileInfo object
|
||||
/// </summary>
|
||||
internal class FileInfoRarArchiveVolume : RarVolume
|
||||
{
|
||||
/// <summary>
|
||||
/// A rar part based on a FileInfo object
|
||||
/// </summary>
|
||||
internal class FileInfoRarArchiveVolume : RarVolume
|
||||
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
|
||||
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
|
||||
{
|
||||
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options)
|
||||
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
|
||||
{
|
||||
FileInfo = fileInfo;
|
||||
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
|
||||
}
|
||||
|
||||
private static ReaderOptions FixOptions(ReaderOptions options)
|
||||
{
|
||||
//make sure we're closing streams with fileinfo
|
||||
options.LeaveStreamOpen = false;
|
||||
return options;
|
||||
}
|
||||
|
||||
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
|
||||
|
||||
internal FileInfo FileInfo { get; }
|
||||
|
||||
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
|
||||
{
|
||||
return new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
|
||||
}
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts()
|
||||
{
|
||||
return FileParts;
|
||||
}
|
||||
FileInfo = fileInfo;
|
||||
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
|
||||
}
|
||||
|
||||
private static ReaderOptions FixOptions(ReaderOptions options)
|
||||
{
|
||||
//make sure we're closing streams with fileinfo
|
||||
options.LeaveStreamOpen = false;
|
||||
return options;
|
||||
}
|
||||
|
||||
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
|
||||
|
||||
internal FileInfo FileInfo { get; }
|
||||
|
||||
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
|
||||
new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => FileParts;
|
||||
}
|
||||
|
||||
@@ -1,25 +1,21 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal sealed class FileInfoRarFilePart : SeekableFilePart
|
||||
{
|
||||
internal sealed class FileInfoRarFilePart : SeekableFilePart
|
||||
{
|
||||
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string? password, MarkHeader mh, FileHeader fh, FileInfo fi)
|
||||
: base(mh, fh, volume.Stream, password)
|
||||
{
|
||||
FileInfo = fi;
|
||||
}
|
||||
internal FileInfoRarFilePart(
|
||||
FileInfoRarArchiveVolume volume,
|
||||
string? password,
|
||||
MarkHeader mh,
|
||||
FileHeader fh,
|
||||
FileInfo fi
|
||||
)
|
||||
: base(mh, fh, volume.Index, volume.Stream, password) => FileInfo = fi;
|
||||
|
||||
internal FileInfo FileInfo { get; }
|
||||
internal FileInfo FileInfo { get; }
|
||||
|
||||
internal override string FilePartName
|
||||
{
|
||||
get
|
||||
{
|
||||
return "Rar File: " + FileInfo.FullName
|
||||
+ " File Entry: " + FileHeader.FileName;
|
||||
}
|
||||
}
|
||||
}
|
||||
internal override string FilePartName =>
|
||||
"Rar File: " + FileInfo.FullName + " File Entry: " + FileHeader.FileName;
|
||||
}
|
||||
|
||||
@@ -1,23 +1,18 @@
|
||||
using System.Linq;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
{
|
||||
public static class RarArchiveExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
|
||||
/// </summary>
|
||||
public static bool IsFirstVolume(this RarArchive archive)
|
||||
{
|
||||
return archive.Volumes.First().IsFirstVolume;
|
||||
}
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public static bool IsMultipartVolume(this RarArchive archive)
|
||||
{
|
||||
return archive.Volumes.First().IsMultiVolume;
|
||||
}
|
||||
}
|
||||
}
|
||||
public static class RarArchiveExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
|
||||
/// </summary>
|
||||
public static bool IsFirstVolume(this RarArchive archive) =>
|
||||
archive.Volumes.First().IsFirstVolume;
|
||||
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public static bool IsMultipartVolume(this RarArchive archive) =>
|
||||
archive.Volumes.First().IsMultiVolume;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
@@ -9,145 +10,201 @@ using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.Rar;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
{
|
||||
public class
|
||||
RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
private bool _disposed;
|
||||
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
|
||||
new(() => new Compressors.Rar.UnpackV2017.Unpack());
|
||||
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private RarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Rar, sourceStream) { }
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
|
||||
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal RarArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Rar, srcStream)
|
||||
if (!_disposed)
|
||||
{
|
||||
}
|
||||
|
||||
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes)
|
||||
{
|
||||
return RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
|
||||
}
|
||||
|
||||
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
|
||||
{
|
||||
base.SrcStream.LoadAllParts(); //request all streams
|
||||
Stream[] streams = base.SrcStream.Streams.ToArray();
|
||||
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
|
||||
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
|
||||
{
|
||||
base.SrcStream.IsVolumes = true;
|
||||
streams[1].Position = 0;
|
||||
base.SrcStream.Position = 0;
|
||||
|
||||
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(a, ReaderOptions));
|
||||
unpackV1.Dispose();
|
||||
}
|
||||
else //split mode or single file
|
||||
return new StreamRarArchiveVolume(base.SrcStream, ReaderOptions).AsEnumerable();
|
||||
|
||||
_disposed = true;
|
||||
base.Dispose();
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.First().Stream;
|
||||
stream.Position = 0;
|
||||
return RarReader.Open(stream, ReaderOptions);
|
||||
}
|
||||
|
||||
public override bool IsSolid => Volumes.First().IsSolidArchive;
|
||||
|
||||
#region Creation
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
FileInfo fileInfo = new FileInfo(filePath);
|
||||
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static RarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
FileInfo[] files = fileInfos.ToArray();
|
||||
return new RarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
Stream[] strms = streams.ToArray();
|
||||
return new RarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
|
||||
public static bool IsRarFile(string filePath)
|
||||
{
|
||||
return IsRarFile(new FileInfo(filePath));
|
||||
}
|
||||
|
||||
public static bool IsRarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using (Stream stream = fileInfo.OpenRead())
|
||||
{
|
||||
return IsRarFile(stream);
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
try
|
||||
{
|
||||
MarkHeader.Read(stream, true, false);
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
|
||||
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
|
||||
|
||||
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
sourceStream.LoadAllParts(); //request all streams
|
||||
var streams = sourceStream.Streams.ToArray();
|
||||
var i = 0;
|
||||
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
|
||||
{
|
||||
sourceStream.IsVolumes = true;
|
||||
streams[1].Position = 0;
|
||||
sourceStream.Position = 0;
|
||||
|
||||
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
|
||||
a,
|
||||
ReaderOptions,
|
||||
i++
|
||||
));
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
if (this.IsMultipartVolume())
|
||||
{
|
||||
var streams = Volumes.Select(volume =>
|
||||
{
|
||||
volume.Stream.Position = 0;
|
||||
return volume.Stream;
|
||||
});
|
||||
return RarReader.Open(streams, ReaderOptions);
|
||||
}
|
||||
|
||||
var stream = Volumes.First().Stream;
|
||||
stream.Position = 0;
|
||||
return RarReader.Open(stream, ReaderOptions);
|
||||
}
|
||||
|
||||
public override bool IsSolid => Volumes.First().IsSolidArchive;
|
||||
|
||||
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
|
||||
|
||||
public virtual int MinVersion => Volumes.First().MinVersion;
|
||||
public virtual int MaxVersion => Volumes.First().MaxVersion;
|
||||
|
||||
#region Creation
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
options ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
options ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static RarArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsRarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsRarFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
try
|
||||
{
|
||||
MarkHeader.Read(stream, true, false);
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
@@ -2,88 +2,136 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.Compressors.Rar;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
{
|
||||
public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
private readonly ICollection<RarFilePart> parts;
|
||||
private readonly RarArchive archive;
|
||||
private readonly ReaderOptions readerOptions;
|
||||
|
||||
internal RarArchiveEntry(
|
||||
RarArchive archive,
|
||||
IEnumerable<RarFilePart> parts,
|
||||
ReaderOptions readerOptions
|
||||
)
|
||||
{
|
||||
private readonly ICollection<RarFilePart> parts;
|
||||
private readonly RarArchive archive;
|
||||
private readonly ReaderOptions readerOptions;
|
||||
this.parts = parts.ToList();
|
||||
this.archive = archive;
|
||||
this.readerOptions = readerOptions;
|
||||
IsSolid = FileHeader.IsSolid;
|
||||
}
|
||||
|
||||
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts, ReaderOptions readerOptions)
|
||||
public override CompressionType CompressionType => CompressionType.Rar;
|
||||
|
||||
public IArchive Archive => archive;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => parts.Cast<FilePart>();
|
||||
|
||||
internal override FileHeader FileHeader => parts.First().FileHeader;
|
||||
|
||||
public override long Crc
|
||||
{
|
||||
get
|
||||
{
|
||||
this.parts = parts.ToList();
|
||||
this.archive = archive;
|
||||
this.readerOptions = readerOptions;
|
||||
this.IsSolid = this.FileHeader.IsSolid;
|
||||
}
|
||||
|
||||
public override CompressionType CompressionType => CompressionType.Rar;
|
||||
|
||||
public IArchive Archive => archive;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => parts.Cast<FilePart>();
|
||||
|
||||
internal override FileHeader FileHeader => parts.First().FileHeader;
|
||||
|
||||
public override long Crc
|
||||
{
|
||||
get
|
||||
{
|
||||
CheckIncomplete();
|
||||
return parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc;
|
||||
}
|
||||
}
|
||||
|
||||
public override long Size
|
||||
{
|
||||
get
|
||||
{
|
||||
CheckIncomplete();
|
||||
return parts.First().FileHeader.UncompressedSize;
|
||||
}
|
||||
}
|
||||
|
||||
public override long CompressedSize
|
||||
{
|
||||
get
|
||||
{
|
||||
CheckIncomplete();
|
||||
return parts.Aggregate(0L, (total, fp) => total + fp.FileHeader.CompressedSize);
|
||||
}
|
||||
}
|
||||
|
||||
public Stream OpenEntryStream()
|
||||
{
|
||||
if (IsRarV3)
|
||||
{
|
||||
return new RarStream(archive.UnpackV1.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
|
||||
}
|
||||
|
||||
return new RarStream(archive.UnpackV2017.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
|
||||
}
|
||||
|
||||
public bool IsComplete
|
||||
{
|
||||
get
|
||||
{
|
||||
var headers = parts.Select(x => x.FileHeader);
|
||||
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
|
||||
}
|
||||
}
|
||||
|
||||
private void CheckIncomplete()
|
||||
{
|
||||
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
|
||||
{
|
||||
throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
|
||||
}
|
||||
CheckIncomplete();
|
||||
return BitConverter.ToUInt32(
|
||||
parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc.NotNull(),
|
||||
0
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public override long Size
|
||||
{
|
||||
get
|
||||
{
|
||||
CheckIncomplete();
|
||||
return parts.First().FileHeader.UncompressedSize;
|
||||
}
|
||||
}
|
||||
|
||||
public override long CompressedSize
|
||||
{
|
||||
get
|
||||
{
|
||||
CheckIncomplete();
|
||||
return parts.Aggregate(0L, (total, fp) => total + fp.FileHeader.CompressedSize);
|
||||
}
|
||||
}
|
||||
|
||||
public Stream OpenEntryStream()
|
||||
{
|
||||
RarStream stream;
|
||||
if (IsRarV3)
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV1.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
|
||||
stream.Initialize();
|
||||
return stream;
|
||||
}
|
||||
|
||||
public async Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
RarStream stream;
|
||||
if (IsRarV3)
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV1.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
}
|
||||
|
||||
await stream.InitializeAsync(cancellationToken);
|
||||
return stream;
|
||||
}
|
||||
|
||||
public bool IsComplete
|
||||
{
|
||||
get
|
||||
{
|
||||
var headers = parts.Select(x => x.FileHeader);
|
||||
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
|
||||
}
|
||||
}
|
||||
|
||||
private void CheckIncomplete()
|
||||
{
|
||||
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
|
||||
{
|
||||
throw new IncompleteArchiveException(
|
||||
"ArchiveEntry is incomplete and cannot perform this operation."
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,49 +1,52 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Generic;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal static class RarArchiveEntryFactory
|
||||
{
|
||||
internal static class RarArchiveEntryFactory
|
||||
private static IEnumerable<RarFilePart> GetFileParts(IEnumerable<RarVolume> parts)
|
||||
{
|
||||
private static IEnumerable<RarFilePart> GetFileParts(IEnumerable<RarVolume> parts)
|
||||
foreach (var rarPart in parts)
|
||||
{
|
||||
foreach (RarVolume rarPart in parts)
|
||||
foreach (var fp in rarPart.ReadFileParts())
|
||||
{
|
||||
foreach (RarFilePart fp in rarPart.ReadFileParts())
|
||||
{
|
||||
yield return fp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<IEnumerable<RarFilePart>> GetMatchedFileParts(IEnumerable<RarVolume> parts)
|
||||
{
|
||||
var groupedParts = new List<RarFilePart>();
|
||||
foreach (RarFilePart fp in GetFileParts(parts))
|
||||
{
|
||||
groupedParts.Add(fp);
|
||||
|
||||
if (!fp.FileHeader.IsSplitAfter)
|
||||
{
|
||||
yield return groupedParts;
|
||||
groupedParts = new List<RarFilePart>();
|
||||
}
|
||||
}
|
||||
if (groupedParts.Count > 0)
|
||||
{
|
||||
yield return groupedParts;
|
||||
}
|
||||
}
|
||||
|
||||
internal static IEnumerable<RarArchiveEntry> GetEntries(RarArchive archive,
|
||||
IEnumerable<RarVolume> rarParts,
|
||||
ReaderOptions readerOptions)
|
||||
{
|
||||
foreach (var groupedParts in GetMatchedFileParts(rarParts))
|
||||
{
|
||||
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
|
||||
yield return fp;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<IEnumerable<RarFilePart>> GetMatchedFileParts(
|
||||
IEnumerable<RarVolume> parts
|
||||
)
|
||||
{
|
||||
var groupedParts = new List<RarFilePart>();
|
||||
foreach (var fp in GetFileParts(parts))
|
||||
{
|
||||
groupedParts.Add(fp);
|
||||
|
||||
if (!fp.FileHeader.IsSplitAfter)
|
||||
{
|
||||
yield return groupedParts;
|
||||
groupedParts = new List<RarFilePart>();
|
||||
}
|
||||
}
|
||||
if (groupedParts.Count > 0)
|
||||
{
|
||||
yield return groupedParts;
|
||||
}
|
||||
}
|
||||
|
||||
internal static IEnumerable<RarArchiveEntry> GetEntries(
|
||||
RarArchive archive,
|
||||
IEnumerable<RarVolume> rarParts,
|
||||
ReaderOptions readerOptions
|
||||
)
|
||||
{
|
||||
foreach (var groupedParts in GetMatchedFileParts(rarParts))
|
||||
{
|
||||
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,40 +1,52 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Readers;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal static class RarArchiveVolumeFactory
|
||||
{
|
||||
internal static class RarArchiveVolumeFactory
|
||||
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
|
||||
{
|
||||
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
|
||||
FileInfo? item = null;
|
||||
|
||||
//new style rar - ..part1 | /part01 | part001 ....
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
String.Concat(
|
||||
m.Groups[1].Value,
|
||||
(index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'),
|
||||
m.Groups[3].Value
|
||||
)
|
||||
)
|
||||
);
|
||||
else
|
||||
{
|
||||
FileInfo? item = null;
|
||||
|
||||
//new style rar - ..part1 | /part01 | part001 ....
|
||||
Match m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
|
||||
//old style - ...rar, .r00, .r01 ...
|
||||
m = Regex.Match(part1.Name, @"^(.*\.)([r-z{])(ar|[0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'), m.Groups[3].Value)));
|
||||
else
|
||||
{
|
||||
//old style - ...rar, .r00, .r01 ...
|
||||
m = Regex.Match(part1.Name, @"^(.*\.r)(ar|[0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, index == 0 ? "ar" : (index - 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
|
||||
else //split .001, .002 ....
|
||||
return ArchiveVolumeFactory.GetFilePart(index, part1);
|
||||
}
|
||||
|
||||
if (item != null && item.Exists)
|
||||
return item;
|
||||
|
||||
return null; //no more items
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
String.Concat(
|
||||
m.Groups[1].Value,
|
||||
index == 0
|
||||
? m.Groups[2].Value + m.Groups[3].Value
|
||||
: (char)(m.Groups[2].Value[0] + ((index - 1) / 100))
|
||||
+ (index - 1).ToString("D4").Substring(2)
|
||||
)
|
||||
)
|
||||
);
|
||||
else //split .001, .002 ....
|
||||
return ArchiveVolumeFactory.GetFilePart(index, part1);
|
||||
}
|
||||
|
||||
if (item != null && item.Exists)
|
||||
return item;
|
||||
|
||||
return null; //no more items
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,31 +1,45 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class SeekableFilePart : RarFilePart
|
||||
{
|
||||
internal class SeekableFilePart : RarFilePart
|
||||
private readonly Stream _stream;
|
||||
private readonly string? _password;
|
||||
|
||||
internal SeekableFilePart(
|
||||
MarkHeader mh,
|
||||
FileHeader fh,
|
||||
int index,
|
||||
Stream stream,
|
||||
string? password
|
||||
)
|
||||
: base(mh, fh, index)
|
||||
{
|
||||
private readonly Stream stream;
|
||||
private readonly string? password;
|
||||
|
||||
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string? password)
|
||||
: base(mh, fh)
|
||||
{
|
||||
this.stream = stream;
|
||||
this.password = password;
|
||||
}
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
stream.Position = FileHeader.DataStartPosition;
|
||||
if (FileHeader.R4Salt != null)
|
||||
{
|
||||
return new RarCryptoWrapper(stream, password!, FileHeader.R4Salt);
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
|
||||
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
|
||||
_stream = stream;
|
||||
_password = password;
|
||||
}
|
||||
}
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
_stream.Position = FileHeader.DataStartPosition;
|
||||
|
||||
if (FileHeader.R4Salt != null)
|
||||
{
|
||||
var cryptKey = new CryptKey3(_password!);
|
||||
return new RarCryptoWrapper(_stream, FileHeader.R4Salt, cryptKey);
|
||||
}
|
||||
|
||||
if (FileHeader.Rar5CryptoInfo != null)
|
||||
{
|
||||
var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo);
|
||||
return new RarCryptoWrapper(_stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
|
||||
}
|
||||
|
||||
return _stream;
|
||||
}
|
||||
|
||||
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
|
||||
}
|
||||
|
||||
@@ -1,27 +1,19 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class StreamRarArchiveVolume : RarVolume
|
||||
{
|
||||
internal class StreamRarArchiveVolume : RarVolume
|
||||
{
|
||||
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options)
|
||||
: base(StreamingMode.Seekable, stream, options)
|
||||
{
|
||||
}
|
||||
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
|
||||
: base(StreamingMode.Seekable, stream, options, index) { }
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts()
|
||||
{
|
||||
return GetVolumeFileParts();
|
||||
}
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
|
||||
|
||||
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
|
||||
{
|
||||
return new SeekableFilePart(markHeader, fileHeader, Stream, ReaderOptions.Password);
|
||||
}
|
||||
}
|
||||
}
|
||||
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
|
||||
new SeekableFilePart(markHeader, fileHeader, Index, Stream, ReaderOptions.Password);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -10,238 +8,268 @@ using SharpCompress.Compressors.LZMA.Utilites;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
|
||||
{
|
||||
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
|
||||
private ArchiveDatabase? _database;
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
private ArchiveDatabase database;
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
|
||||
filePath.NotNullOrEmpty("filePath");
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull("fileInfo");
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull("stream");
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty("filePath");
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull("fileInfo");
|
||||
return new SevenZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions readerOptions = null)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
FileInfo[] files = fileInfos.ToArray();
|
||||
return new SevenZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private SevenZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.SevenZip, sourceStream) { }
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(IEnumerable<Stream> streams, ReaderOptions readerOptions = null)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
Stream[] strms = streams.ToArray();
|
||||
return new SevenZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
|
||||
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull("stream");
|
||||
return new SevenZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal SevenZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.SevenZip, srcStream)
|
||||
public static bool IsSevenZipFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsSevenZipFile(stream);
|
||||
}
|
||||
|
||||
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
internal SevenZipArchive()
|
||||
: base(ArchiveType.SevenZip) { }
|
||||
|
||||
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(
|
||||
IEnumerable<SevenZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
LoadFactory(stream);
|
||||
if (_database is null)
|
||||
{
|
||||
base.SrcStream.LoadAllParts(); //request all streams
|
||||
return new SevenZipVolume(srcStream, ReaderOptions).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
return Enumerable.Empty<SevenZipArchiveEntry>();
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(string filePath)
|
||||
var entries = new SevenZipArchiveEntry[_database._files.Count];
|
||||
for (var i = 0; i < _database._files.Count; i++)
|
||||
{
|
||||
return IsSevenZipFile(new FileInfo(filePath));
|
||||
var file = _database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(
|
||||
this,
|
||||
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(FileInfo fileInfo)
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
var isSolid = false;
|
||||
foreach (var entry in group)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using (Stream stream = fileInfo.OpenRead())
|
||||
{
|
||||
return IsSevenZipFile(stream);
|
||||
entry.IsSolid = isSolid;
|
||||
isSolid = true; //mark others in this group as solid - same as rar behaviour.
|
||||
}
|
||||
}
|
||||
|
||||
internal SevenZipArchive()
|
||||
: base(ArchiveType.SevenZip)
|
||||
return entries;
|
||||
}
|
||||
|
||||
private void LoadFactory(Stream stream)
|
||||
{
|
||||
if (_database is null)
|
||||
{
|
||||
}
|
||||
|
||||
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(IEnumerable<SevenZipVolume> volumes)
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
LoadFactory(stream);
|
||||
var entries = new SevenZipArchiveEntry[database._files.Count];
|
||||
for (int i = 0; i < database._files.Count; i++)
|
||||
{
|
||||
var file = database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
|
||||
}
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
{
|
||||
var isSolid = false;
|
||||
foreach (var entry in group)
|
||||
{
|
||||
entry.IsSolid = isSolid;
|
||||
isSolid = true; //mark others in this group as solid - same as rar behaviour.
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
private void LoadFactory(Stream stream)
|
||||
{
|
||||
if (database is null)
|
||||
{
|
||||
stream.Position = 0;
|
||||
var reader = new ArchiveReader();
|
||||
reader.Open(stream);
|
||||
database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
return SignatureMatch(stream);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlySpan<byte> SIGNATURE => new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
|
||||
|
||||
private static bool SignatureMatch(Stream stream)
|
||||
{
|
||||
BinaryReader reader = new BinaryReader(stream);
|
||||
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
|
||||
return signatureBytes.SequenceEqual(SIGNATURE);
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
return new SevenZipReader(ReaderOptions, this);
|
||||
}
|
||||
|
||||
public override bool IsSolid { get { return Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1; } }
|
||||
|
||||
public override long TotalSize
|
||||
{
|
||||
get
|
||||
{
|
||||
int i = Entries.Count;
|
||||
return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
|
||||
{
|
||||
private readonly SevenZipArchive archive;
|
||||
private CFolder currentFolder;
|
||||
private Stream currentStream;
|
||||
private CFileItem currentItem;
|
||||
|
||||
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
|
||||
: base(readerOptions, ArchiveType.SevenZip)
|
||||
{
|
||||
this.archive = archive;
|
||||
}
|
||||
|
||||
public override SevenZipVolume Volume => archive.Volumes.Single();
|
||||
|
||||
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
|
||||
{
|
||||
List<SevenZipArchiveEntry> entries = archive.Entries.ToList();
|
||||
stream.Position = 0;
|
||||
foreach (var dir in entries.Where(x => x.IsDirectory))
|
||||
{
|
||||
yield return dir;
|
||||
}
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
{
|
||||
currentFolder = group.Key;
|
||||
if (group.Key is null)
|
||||
{
|
||||
currentStream = Stream.Null;
|
||||
}
|
||||
else
|
||||
{
|
||||
currentStream = archive.database.GetFolderStream(stream, currentFolder, new PasswordProvider(Options.Password));
|
||||
}
|
||||
foreach (var entry in group)
|
||||
{
|
||||
currentItem = entry.FilePart.Header;
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override EntryStream GetEntryStream()
|
||||
{
|
||||
return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
|
||||
}
|
||||
}
|
||||
|
||||
private class PasswordProvider : IPasswordProvider
|
||||
{
|
||||
private readonly string _password;
|
||||
|
||||
public PasswordProvider(string password)
|
||||
{
|
||||
_password = password;
|
||||
}
|
||||
|
||||
public string CryptoGetTextPassword()
|
||||
{
|
||||
return _password;
|
||||
}
|
||||
stream.Position = 0;
|
||||
var reader = new ArchiveReader();
|
||||
reader.Open(stream, lookForHeader: ReaderOptions.LookForHeader);
|
||||
_database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
return SignatureMatch(stream);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlySpan<byte> Signature =>
|
||||
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
|
||||
|
||||
private static bool SignatureMatch(Stream stream)
|
||||
{
|
||||
var reader = new BinaryReader(stream);
|
||||
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
|
||||
return signatureBytes.SequenceEqual(Signature);
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction() =>
|
||||
new SevenZipReader(ReaderOptions, this);
|
||||
|
||||
public override bool IsSolid =>
|
||||
Entries
|
||||
.Where(x => !x.IsDirectory)
|
||||
.GroupBy(x => x.FilePart.Folder)
|
||||
.Any(folder => folder.Count() > 1);
|
||||
|
||||
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
|
||||
|
||||
public override long TotalSize =>
|
||||
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
|
||||
|
||||
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
|
||||
{
|
||||
private readonly SevenZipArchive _archive;
|
||||
private CFolder? _currentFolder;
|
||||
private Stream? _currentStream;
|
||||
private CFileItem? _currentItem;
|
||||
|
||||
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
|
||||
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
|
||||
|
||||
public override SevenZipVolume Volume => _archive.Volumes.Single();
|
||||
|
||||
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
|
||||
{
|
||||
var entries = _archive.Entries.ToList();
|
||||
stream.Position = 0;
|
||||
foreach (var dir in entries.Where(x => x.IsDirectory))
|
||||
{
|
||||
yield return dir;
|
||||
}
|
||||
foreach (
|
||||
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
|
||||
)
|
||||
{
|
||||
_currentFolder = group.Key;
|
||||
if (group.Key is null)
|
||||
{
|
||||
_currentStream = Stream.Null;
|
||||
}
|
||||
else
|
||||
{
|
||||
_currentStream = _archive._database?.GetFolderStream(
|
||||
stream,
|
||||
_currentFolder,
|
||||
new PasswordProvider(Options.Password)
|
||||
);
|
||||
}
|
||||
foreach (var entry in group)
|
||||
{
|
||||
_currentItem = entry.FilePart.Header;
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override EntryStream GetEntryStream() =>
|
||||
CreateEntryStream(
|
||||
new ReadOnlySubStream(
|
||||
_currentStream.NotNull("currentStream is not null"),
|
||||
_currentItem?.Size ?? 0
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
private class PasswordProvider : IPasswordProvider
|
||||
{
|
||||
private readonly string? _password;
|
||||
|
||||
public PasswordProvider(string? password) => _password = password;
|
||||
|
||||
public string? CryptoGetTextPassword() => _password;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,28 +1,26 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.SevenZip;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
|
||||
{
|
||||
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
|
||||
{
|
||||
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
|
||||
: base(part)
|
||||
{
|
||||
Archive = archive;
|
||||
}
|
||||
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
|
||||
: base(part) => Archive = archive;
|
||||
|
||||
public Stream OpenEntryStream()
|
||||
{
|
||||
return FilePart.GetCompressedStream();
|
||||
}
|
||||
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
|
||||
|
||||
public IArchive Archive { get; }
|
||||
public Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(OpenEntryStream());
|
||||
|
||||
public bool IsComplete => true;
|
||||
public IArchive Archive { get; }
|
||||
|
||||
/// <summary>
|
||||
/// This is a 7Zip Anti item
|
||||
/// </summary>
|
||||
public bool IsAnti => FilePart.Header.IsAnti;
|
||||
}
|
||||
}
|
||||
public bool IsComplete => true;
|
||||
|
||||
/// <summary>
|
||||
/// This is a 7Zip Anti item
|
||||
/// </summary>
|
||||
public bool IsAnti => FilePart.Header.IsAnti;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
@@ -11,197 +13,293 @@ using SharpCompress.Readers.Tar;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.Tar;
|
||||
|
||||
namespace SharpCompress.Archives.Tar
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
{
|
||||
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
return new TarArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
return new TarArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
FileInfo[] files = fileInfos.ToArray();
|
||||
return new TarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
public static bool IsTarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
Stream[] strms = streams.ToArray();
|
||||
return new TarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new TarArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
public static bool IsTarFile(string filePath)
|
||||
{
|
||||
return IsTarFile(new FileInfo(filePath));
|
||||
}
|
||||
|
||||
public static bool IsTarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using (Stream stream = fileInfo.OpenRead())
|
||||
{
|
||||
return IsTarFile(stream);
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsTarFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
TarHeader tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
bool readSucceeded = tarHeader.Read(new BinaryReader(stream));
|
||||
bool isEmptyArchive = tarHeader.Name.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
}
|
||||
catch
|
||||
{
|
||||
}
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsTarFile(stream);
|
||||
}
|
||||
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
|
||||
public static bool IsTarFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
base.SrcStream.LoadAllParts(); //request all streams
|
||||
return new TarVolume(srcStream, ReaderOptions).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
|
||||
var isEmptyArchive =
|
||||
tarHeader.Name?.Length == 0
|
||||
&& tarHeader.Size == 0
|
||||
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
}
|
||||
catch { }
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal TarArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Tar, srcStream)
|
||||
{
|
||||
}
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
|
||||
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
}
|
||||
|
||||
internal TarArchive()
|
||||
: base(ArchiveType.Tar)
|
||||
{
|
||||
}
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private TarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Tar, sourceStream) { }
|
||||
|
||||
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
|
||||
private TarArchive()
|
||||
: base(ArchiveType.Tar) { }
|
||||
|
||||
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
TarHeader? previousHeader = null;
|
||||
foreach (
|
||||
var header in TarHeaderFactory.ReadHeader(
|
||||
StreamingMode.Seekable,
|
||||
stream,
|
||||
ReaderOptions.ArchiveEncoding
|
||||
)
|
||||
)
|
||||
{
|
||||
Stream stream = volumes.Single().Stream;
|
||||
TarHeader? previousHeader = null;
|
||||
foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
|
||||
if (header != null)
|
||||
{
|
||||
if (header != null)
|
||||
if (header.EntryType == EntryType.LongName)
|
||||
{
|
||||
if (header.EntryType == EntryType.LongName)
|
||||
previousHeader = header;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (previousHeader != null)
|
||||
{
|
||||
previousHeader = header;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (previousHeader != null)
|
||||
var entry = new TarArchiveEntry(
|
||||
this,
|
||||
new TarFilePart(previousHeader, stream),
|
||||
CompressionType.None
|
||||
);
|
||||
|
||||
var oldStreamPos = stream.Position;
|
||||
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
var entry = new TarArchiveEntry(this, new TarFilePart(previousHeader, stream),
|
||||
CompressionType.None);
|
||||
using var memoryStream = new MemoryStream();
|
||||
entryStream.CopyTo(memoryStream);
|
||||
memoryStream.Position = 0;
|
||||
var bytes = memoryStream.ToArray();
|
||||
|
||||
var oldStreamPos = stream.Position;
|
||||
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
using (var memoryStream = new MemoryStream())
|
||||
{
|
||||
entryStream.TransferTo(memoryStream);
|
||||
memoryStream.Position = 0;
|
||||
var bytes = memoryStream.ToArray();
|
||||
|
||||
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
|
||||
}
|
||||
}
|
||||
|
||||
stream.Position = oldStreamPos;
|
||||
|
||||
previousHeader = null;
|
||||
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
|
||||
}
|
||||
yield return new TarArchiveEntry(this, new TarFilePart(header, stream), CompressionType.None);
|
||||
|
||||
stream.Position = oldStreamPos;
|
||||
|
||||
previousHeader = null;
|
||||
}
|
||||
yield return new TarArchiveEntry(
|
||||
this,
|
||||
new TarFilePart(header, stream),
|
||||
CompressionType.None
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static TarArchive Create()
|
||||
{
|
||||
return new TarArchive();
|
||||
}
|
||||
|
||||
protected override TarArchiveEntry CreateEntryInternal(string filePath, Stream source,
|
||||
long size, DateTime? modified, bool closeStream)
|
||||
{
|
||||
return new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
|
||||
closeStream);
|
||||
}
|
||||
|
||||
protected override void SaveTo(Stream stream, WriterOptions options,
|
||||
IEnumerable<TarArchiveEntry> oldEntries,
|
||||
IEnumerable<TarArchiveEntry> newEntries)
|
||||
{
|
||||
using (var writer = new TarWriter(stream, new TarWriterOptions(options)))
|
||||
else
|
||||
{
|
||||
foreach (var entry in oldEntries.Concat(newEntries)
|
||||
.Where(x => !x.IsDirectory))
|
||||
{
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
|
||||
}
|
||||
}
|
||||
throw new IncompleteArchiveException("Failed to read TAR header");
|
||||
}
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return TarReader.Open(stream);
|
||||
}
|
||||
}
|
||||
|
||||
public static TarArchive Create() => new();
|
||||
|
||||
protected override TarArchiveEntry CreateEntryInternal(
|
||||
string filePath,
|
||||
Stream source,
|
||||
long size,
|
||||
DateTime? modified,
|
||||
bool closeStream
|
||||
) =>
|
||||
new TarWritableArchiveEntry(
|
||||
this,
|
||||
source,
|
||||
CompressionType.Unknown,
|
||||
filePath,
|
||||
size,
|
||||
modified,
|
||||
closeStream
|
||||
);
|
||||
|
||||
protected override TarArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => new TarWritableArchiveEntry(this, directoryPath, modified);
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<TarArchiveEntry> oldEntries,
|
||||
IEnumerable<TarArchiveEntry> newEntries
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
writer.WriteDirectory(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<TarArchiveEntry> oldEntries,
|
||||
IEnumerable<TarArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return TarReader.Open(stream);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +1,28 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
|
||||
namespace SharpCompress.Archives.Tar
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public class TarArchiveEntry : TarEntry, IArchiveEntry
|
||||
{
|
||||
public class TarArchiveEntry : TarEntry, IArchiveEntry
|
||||
{
|
||||
internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType)
|
||||
: base(part, compressionType)
|
||||
{
|
||||
Archive = archive;
|
||||
}
|
||||
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
|
||||
: base(part, compressionType) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream()
|
||||
{
|
||||
return Parts.Single().GetCompressedStream();
|
||||
}
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
|
||||
|
||||
#region IArchiveEntry Members
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => Task.FromResult(OpenEntryStream());
|
||||
|
||||
public IArchive Archive { get; }
|
||||
#region IArchiveEntry Members
|
||||
|
||||
public bool IsComplete => true;
|
||||
public IArchive Archive { get; }
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
public bool IsComplete => true;
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
@@ -1,67 +1,92 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Archives.Tar
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
|
||||
{
|
||||
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream? stream;
|
||||
private readonly bool isDirectory;
|
||||
|
||||
internal TarWritableArchiveEntry(
|
||||
TarArchive archive,
|
||||
Stream stream,
|
||||
CompressionType compressionType,
|
||||
string path,
|
||||
long size,
|
||||
DateTime? lastModified,
|
||||
bool closeStream
|
||||
)
|
||||
: base(archive, null, compressionType)
|
||||
{
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
this.stream = stream;
|
||||
Key = path;
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
isDirectory = false;
|
||||
}
|
||||
|
||||
internal TarWritableArchiveEntry(TarArchive archive, Stream stream, CompressionType compressionType,
|
||||
string path, long size, DateTime? lastModified, bool closeStream)
|
||||
: base(archive, null, compressionType)
|
||||
internal TarWritableArchiveEntry(
|
||||
TarArchive archive,
|
||||
string directoryPath,
|
||||
DateTime? lastModified
|
||||
)
|
||||
: base(archive, null, CompressionType.None)
|
||||
{
|
||||
stream = null;
|
||||
Key = directoryPath;
|
||||
Size = 0;
|
||||
LastModifiedTime = lastModified;
|
||||
closeStream = false;
|
||||
isDirectory = true;
|
||||
}
|
||||
|
||||
public override long Crc => 0;
|
||||
|
||||
public override string Key { get; }
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size { get; }
|
||||
|
||||
public override DateTime? LastModifiedTime { get; }
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => isDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
{
|
||||
if (stream is null)
|
||||
{
|
||||
this.stream = stream;
|
||||
Key = path;
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
return Stream.Null;
|
||||
}
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
}
|
||||
|
||||
public override long Crc => 0;
|
||||
|
||||
public override string Key { get; }
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size { get; }
|
||||
|
||||
public override DateTime? LastModifiedTime { get; }
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream && stream is not null)
|
||||
{
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return new NonDisposingStream(stream);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream)
|
||||
{
|
||||
stream.Dispose();
|
||||
}
|
||||
stream.Dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
@@ -12,261 +14,375 @@ using SharpCompress.Readers.Zip;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
{
|
||||
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
private readonly SeekableZipHeaderFactory? headerFactory;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the compression level applied to files added to the archive,
|
||||
/// if the compression method is set to deflate
|
||||
/// </summary>
|
||||
public CompressionLevel DeflateCompressionLevel { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal ZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Zip, sourceStream) =>
|
||||
headerFactory = new SeekableZipHeaderFactory(
|
||||
sourceStream.ReaderOptions.Password,
|
||||
sourceStream.ReaderOptions.ArchiveEncoding
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
#nullable disable
|
||||
private readonly SeekableZipHeaderFactory headerFactory;
|
||||
#nullable enable
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the compression level applied to files added to the archive,
|
||||
/// if the compression method is set to deflate
|
||||
/// </summary>
|
||||
public CompressionLevel DeflateCompressionLevel { get; set; }
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal ZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Zip, srcStream)
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
headerFactory = new SeekableZipHeaderFactory(srcStream.ReaderOptions.Password, srcStream.ReaderOptions.ArchiveEncoding);
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
return new ZipArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull(nameof(fileInfo));
|
||||
return new ZipArchive(new SourceStream(fileInfo, i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
public static bool IsZipFile(
|
||||
string filePath,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
|
||||
public static bool IsZipFile(
|
||||
FileInfo fileInfo,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
FileInfo[] files = fileInfos.ToArray();
|
||||
return new ZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsZipFile(stream, password, bufferSize);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
public static bool IsZipFile(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
Stream[] strms = streams.ToArray();
|
||||
return new ZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new ZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
public static bool IsZipFile(string filePath, string? password = null)
|
||||
{
|
||||
return IsZipFile(new FileInfo(filePath), password);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using (Stream stream = fileInfo.OpenRead())
|
||||
{
|
||||
return IsZipFile(stream, password);
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(Stream stream, string? password = null)
|
||||
catch (CryptographicException)
|
||||
{
|
||||
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
|
||||
try
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsZipMulti(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
|
||||
{
|
||||
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
|
||||
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
|
||||
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
|
||||
public static bool IsZipMulti(Stream stream, string? password = null)
|
||||
catch (CryptographicException)
|
||||
{
|
||||
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
|
||||
try
|
||||
{
|
||||
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
|
||||
{
|
||||
SeekableZipHeaderFactory z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
|
||||
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
|
||||
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
|
||||
}
|
||||
else
|
||||
return false;
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
catch
|
||||
{
|
||||
base.SrcStream.LoadAllParts(); //request all streams
|
||||
base.SrcStream.Position = 0;
|
||||
|
||||
List<Stream> streams = base.SrcStream.Streams.ToList();
|
||||
if (streams.Count > 1) //test part 2 - true = multipart not split
|
||||
{
|
||||
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
bool isZip = IsZipFile(streams[1], ReaderOptions.Password);
|
||||
streams[1].Position -= 4;
|
||||
if (isZip)
|
||||
{
|
||||
base.SrcStream.IsVolumes = true;
|
||||
|
||||
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
|
||||
streams.RemoveAt(0);
|
||||
streams.Add(tmp);
|
||||
|
||||
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
return streams.Select(a => new ZipVolume(a, ReaderOptions));
|
||||
}
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new ZipVolume(base.SrcStream, ReaderOptions).AsEnumerable();
|
||||
}
|
||||
|
||||
internal ZipArchive()
|
||||
: base(ArchiveType.Zip)
|
||||
{
|
||||
}
|
||||
|
||||
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
|
||||
{
|
||||
var vols = volumes.ToArray();
|
||||
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
|
||||
{
|
||||
if (h != null)
|
||||
{
|
||||
switch (h.ZipHeaderType)
|
||||
{
|
||||
case ZipHeaderType.DirectoryEntry:
|
||||
{
|
||||
DirectoryEntryHeader deh = (DirectoryEntryHeader)h;
|
||||
Stream s;
|
||||
if (deh.RelativeOffsetOfEntryHeader + deh.CompressedSize > vols[deh.DiskNumberStart].Stream.Length)
|
||||
{
|
||||
var v = vols.Skip(deh.DiskNumberStart).ToArray();
|
||||
s = new SourceStream(v[0].Stream, i => i < v.Length ? v[i].Stream : null, new ReaderOptions() { LeaveStreamOpen = true });
|
||||
}
|
||||
else
|
||||
s = vols[deh.DiskNumberStart].Stream;
|
||||
yield return new ZipArchiveEntry(this, new SeekableZipFilePart(headerFactory, deh, s));
|
||||
}
|
||||
break;
|
||||
case ZipHeaderType.DirectoryEnd:
|
||||
{
|
||||
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
|
||||
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void SaveTo(Stream stream)
|
||||
{
|
||||
SaveTo(stream, new WriterOptions(CompressionType.Deflate));
|
||||
}
|
||||
|
||||
protected override void SaveTo(Stream stream, WriterOptions options,
|
||||
IEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IEnumerable<ZipArchiveEntry> newEntries)
|
||||
{
|
||||
using (var writer = new ZipWriter(stream, new ZipWriterOptions(options)))
|
||||
{
|
||||
foreach (var entry in oldEntries.Concat(newEntries)
|
||||
.Where(x => !x.IsDirectory))
|
||||
{
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override ZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
|
||||
bool closeStream)
|
||||
{
|
||||
return new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
}
|
||||
|
||||
public static ZipArchive Create()
|
||||
{
|
||||
return new ZipArchive();
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return ZipReader.Open(stream, ReaderOptions);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
|
||||
{
|
||||
stream.LoadAllParts(); //request all streams
|
||||
stream.Position = 0;
|
||||
|
||||
var streams = stream.Streams.ToList();
|
||||
var idx = 0;
|
||||
if (streams.Count() > 1) //test part 2 - true = multipart not split
|
||||
{
|
||||
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
|
||||
streams[1].Position -= 4;
|
||||
if (isZip)
|
||||
{
|
||||
stream.IsVolumes = true;
|
||||
|
||||
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
|
||||
streams.RemoveAt(0);
|
||||
streams.Add(tmp);
|
||||
|
||||
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
return streams.Select(a => new ZipVolume(a, ReaderOptions, idx++));
|
||||
}
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
|
||||
}
|
||||
|
||||
internal ZipArchive()
|
||||
: base(ArchiveType.Zip) { }
|
||||
|
||||
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
|
||||
{
|
||||
var vols = volumes.ToArray();
|
||||
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
|
||||
{
|
||||
if (h != null)
|
||||
{
|
||||
switch (h.ZipHeaderType)
|
||||
{
|
||||
case ZipHeaderType.DirectoryEntry:
|
||||
{
|
||||
var deh = (DirectoryEntryHeader)h;
|
||||
Stream s;
|
||||
if (
|
||||
deh.RelativeOffsetOfEntryHeader + deh.CompressedSize
|
||||
> vols[deh.DiskNumberStart].Stream.Length
|
||||
)
|
||||
{
|
||||
var v = vols.Skip(deh.DiskNumberStart).ToArray();
|
||||
s = new SourceStream(
|
||||
v[0].Stream,
|
||||
i => i < v.Length ? v[i].Stream : null,
|
||||
new ReaderOptions() { LeaveStreamOpen = true }
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
s = vols[deh.DiskNumberStart].Stream;
|
||||
}
|
||||
|
||||
yield return new ZipArchiveEntry(
|
||||
this,
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
|
||||
);
|
||||
}
|
||||
break;
|
||||
case ZipHeaderType.DirectoryEnd:
|
||||
{
|
||||
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
|
||||
vols.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void SaveTo(Stream stream) => SaveTo(stream, new WriterOptions(CompressionType.Deflate));
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IEnumerable<ZipArchiveEntry> newEntries
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
writer.WriteDirectory(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IEnumerable<ZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override ZipArchiveEntry CreateEntryInternal(
|
||||
string filePath,
|
||||
Stream source,
|
||||
long size,
|
||||
DateTime? modified,
|
||||
bool closeStream
|
||||
) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
|
||||
protected override ZipArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => new ZipWritableArchiveEntry(this, directoryPath, modified);
|
||||
|
||||
public static ZipArchive Create() => new();
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
((IStreamStack)stream).StackSeek(0);
|
||||
return ZipReader.Open(stream, ReaderOptions, Entries);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +1,27 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
{
|
||||
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
{
|
||||
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
|
||||
: base(part)
|
||||
{
|
||||
Archive = archive;
|
||||
}
|
||||
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
|
||||
: base(part) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream()
|
||||
{
|
||||
return Parts.Single().GetCompressedStream();
|
||||
}
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
|
||||
|
||||
#region IArchiveEntry Members
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => Task.FromResult(OpenEntryStream());
|
||||
|
||||
public IArchive Archive { get; }
|
||||
#region IArchiveEntry Members
|
||||
|
||||
public bool IsComplete => true;
|
||||
public IArchive Archive { get; }
|
||||
|
||||
#endregion
|
||||
public bool IsComplete => true;
|
||||
|
||||
public string? Comment => ((SeekableZipFilePart)Parts.Single()).Comment;
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
}
|
||||
|
||||
@@ -1,34 +1,35 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Readers;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace SharpCompress.Archives.Zip
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
internal static class ZipArchiveVolumeFactory
|
||||
{
|
||||
internal static class ZipArchiveVolumeFactory
|
||||
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
|
||||
{
|
||||
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
|
||||
{
|
||||
FileInfo? item = null;
|
||||
FileInfo? item = null;
|
||||
|
||||
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
|
||||
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
|
||||
Match m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, Regex.Replace(m.Groups[2].Value, @"[^xz]", ""), index.ToString().PadLeft(2, '0'))));
|
||||
else //split - 001, 002 ...
|
||||
return ArchiveVolumeFactory.GetFilePart(index, part1);
|
||||
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
|
||||
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
String.Concat(
|
||||
m.Groups[1].Value,
|
||||
Regex.Replace(m.Groups[2].Value, @"[^xz]", ""),
|
||||
index.ToString().PadLeft(2, '0')
|
||||
)
|
||||
)
|
||||
);
|
||||
else //split - 001, 002 ...
|
||||
return ArchiveVolumeFactory.GetFilePart(index, part1);
|
||||
|
||||
if (item != null && item.Exists)
|
||||
return item;
|
||||
|
||||
return null; //no more items
|
||||
}
|
||||
if (item != null && item.Exists)
|
||||
return item;
|
||||
|
||||
return null; //no more items
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,68 +1,94 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Archives.Zip
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
{
|
||||
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream? stream;
|
||||
private readonly bool isDirectory;
|
||||
private bool isDisposed;
|
||||
|
||||
internal ZipWritableArchiveEntry(
|
||||
ZipArchive archive,
|
||||
Stream stream,
|
||||
string path,
|
||||
long size,
|
||||
DateTime? lastModified,
|
||||
bool closeStream
|
||||
)
|
||||
: base(archive, null)
|
||||
{
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
private bool isDisposed;
|
||||
this.stream = stream;
|
||||
Key = path;
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
isDirectory = false;
|
||||
}
|
||||
|
||||
internal ZipWritableArchiveEntry(ZipArchive archive, Stream stream, string path, long size,
|
||||
DateTime? lastModified, bool closeStream)
|
||||
: base(archive, null)
|
||||
internal ZipWritableArchiveEntry(
|
||||
ZipArchive archive,
|
||||
string directoryPath,
|
||||
DateTime? lastModified
|
||||
)
|
||||
: base(archive, null)
|
||||
{
|
||||
stream = null;
|
||||
Key = directoryPath;
|
||||
Size = 0;
|
||||
LastModifiedTime = lastModified;
|
||||
closeStream = false;
|
||||
isDirectory = true;
|
||||
}
|
||||
|
||||
public override long Crc => 0;
|
||||
|
||||
public override string Key { get; }
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size { get; }
|
||||
|
||||
public override DateTime? LastModifiedTime { get; }
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => isDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
|
||||
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
{
|
||||
if (stream is null)
|
||||
{
|
||||
this.stream = stream;
|
||||
Key = path;
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
return Stream.Null;
|
||||
}
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
}
|
||||
|
||||
public override long Crc => 0;
|
||||
|
||||
public override string Key { get; }
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size { get; }
|
||||
|
||||
public override DateTime? LastModifiedTime { get; }
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream && !isDisposed && stream is not null)
|
||||
{
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return new NonDisposingStream(stream);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream && !isDisposed)
|
||||
{
|
||||
stream.Dispose();
|
||||
isDisposed = true;
|
||||
}
|
||||
stream.Dispose();
|
||||
isDisposed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,24 +1,7 @@
|
||||
using System;
|
||||
using System.Reflection;
|
||||
using System;
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: AssemblyTitle("SharpCompress")]
|
||||
[assembly: AssemblyProduct("SharpCompress")]
|
||||
[assembly: InternalsVisibleTo("SharpCompress.Test" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
|
||||
[assembly: CLSCompliant(true)]
|
||||
|
||||
namespace SharpCompress
|
||||
{
|
||||
/// <summary>
|
||||
/// Just a static class to house the public key, to avoid repetition.
|
||||
/// </summary>
|
||||
internal static class AssemblyInfo
|
||||
{
|
||||
internal const string PublicKeySuffix =
|
||||
",PublicKey=002400000480000094000000060200000024000052534131000400000100010059acfa17d26c44" +
|
||||
"7a4d03f16eaa72c9187c04f16e6569dd168b080e39a6f5c9fd00f28c768cd8e9a089d5a0e1b34c" +
|
||||
"cd971488e7afe030ce5ce8df2053cf12ec89f6d38065c434c09ee6af3ee284c5dc08f44774b679" +
|
||||
"bf39298e57efe30d4b00aecf9e4f6f8448b2cb0146d8956dfcab606cc64a0ac38c60a7d78b0d65" +
|
||||
"d3b98dc0";
|
||||
}
|
||||
}
|
||||
[assembly: InternalsVisibleTo(
|
||||
"SharpCompress.Test,PublicKey=0024000004800000940000000602000000240000525341310004000001000100158bebf1433f76dffc356733c138babea7a47536c65ed8009b16372c6f4edbb20554db74a62687f56b97c20a6ce8c4b123280279e33c894e7b3aa93ab3c573656fde4db576cfe07dba09619ead26375b25d2c4a8e43f7be257d712b0dd2eb546f67adb09281338618a58ac834fc038dd7e2740a7ab3591826252e4f4516306dc"
|
||||
)]
|
||||
|
||||
60
src/SharpCompress/Common/Arc/ArcEntry.cs
Normal file
60
src/SharpCompress/Common/Arc/ArcEntry.cs
Normal file
@@ -0,0 +1,60 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.Common.Tar;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcEntry : Entry
|
||||
{
|
||||
private readonly ArcFilePart? _filePart;
|
||||
|
||||
internal ArcEntry(ArcFilePart? filePart)
|
||||
{
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart == null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
return _filePart.Header.Crc16;
|
||||
}
|
||||
}
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType =>
|
||||
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
|
||||
|
||||
public override long Size => throw new NotImplementedException();
|
||||
|
||||
public override DateTime? LastModifiedTime => null;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
}
|
||||
}
|
||||
76
src/SharpCompress/Common/Arc/ArcEntryHeader.cs
Normal file
76
src/SharpCompress/Common/Arc/ArcEntryHeader.cs
Normal file
@@ -0,0 +1,76 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcEntryHeader
|
||||
{
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public CompressionType CompressionMethod { get; private set; }
|
||||
public string? Name { get; private set; }
|
||||
public long CompressedSize { get; private set; }
|
||||
public DateTime DateTime { get; private set; }
|
||||
public int Crc16 { get; private set; }
|
||||
public long OriginalSize { get; private set; }
|
||||
public long DataStartPosition { get; private set; }
|
||||
|
||||
public ArcEntryHeader(ArchiveEncoding archiveEncoding)
|
||||
{
|
||||
this.ArchiveEncoding = archiveEncoding;
|
||||
}
|
||||
|
||||
public ArcEntryHeader? ReadHeader(Stream stream)
|
||||
{
|
||||
byte[] headerBytes = new byte[29];
|
||||
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
DataStartPosition = stream.Position;
|
||||
return LoadFrom(headerBytes);
|
||||
}
|
||||
|
||||
public ArcEntryHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
CompressionMethod = GetCompressionType(headerBytes[1]);
|
||||
|
||||
// Read name
|
||||
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
|
||||
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
|
||||
|
||||
int offset = 15;
|
||||
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
|
||||
offset += 4;
|
||||
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
|
||||
DateTime = ConvertToDateTime(rawDateTime);
|
||||
offset += 4;
|
||||
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
|
||||
offset += 2;
|
||||
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
|
||||
return this;
|
||||
}
|
||||
|
||||
private CompressionType GetCompressionType(byte value)
|
||||
{
|
||||
return value switch
|
||||
{
|
||||
1 or 2 => CompressionType.None,
|
||||
3 => CompressionType.Packed,
|
||||
4 => CompressionType.Squeezed,
|
||||
5 or 6 or 7 or 8 => CompressionType.Crunched,
|
||||
9 => CompressionType.Squashed,
|
||||
10 => CompressionType.Crushed,
|
||||
11 => CompressionType.Distilled,
|
||||
_ => CompressionType.Unknown,
|
||||
};
|
||||
}
|
||||
|
||||
public static DateTime ConvertToDateTime(long rawDateTime)
|
||||
{
|
||||
// Convert Unix timestamp to DateTime (UTC)
|
||||
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
|
||||
}
|
||||
}
|
||||
}
|
||||
83
src/SharpCompress/Common/Arc/ArcFilePart.cs
Normal file
83
src/SharpCompress/Common/Arc/ArcFilePart.cs
Normal file
@@ -0,0 +1,83 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Compressors.Lzw;
|
||||
using SharpCompress.Compressors.RLE90;
|
||||
using SharpCompress.Compressors.Squeezed;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcFilePart : FilePart
|
||||
{
|
||||
private readonly Stream? _stream;
|
||||
|
||||
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
|
||||
: base(localArcHeader.ArchiveEncoding)
|
||||
{
|
||||
_stream = seekableStream;
|
||||
Header = localArcHeader;
|
||||
}
|
||||
|
||||
internal ArcEntryHeader Header { get; set; }
|
||||
|
||||
internal override string? FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case CompressionType.None:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.Packed:
|
||||
compressedStream = new RunLength90Stream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.Squeezed:
|
||||
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
|
||||
break;
|
||||
case CompressionType.Crunched:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new ArcLzwStream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize,
|
||||
true
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream? GetRawStream() => _stream;
|
||||
}
|
||||
}
|
||||
16
src/SharpCompress/Common/Arc/ArcVolume.cs
Normal file
16
src/SharpCompress/Common/Arc/ArcVolume.cs
Normal file
@@ -0,0 +1,16 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcVolume : Volume
|
||||
{
|
||||
public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
}
|
||||
}
|
||||
@@ -1,76 +1,57 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class ArchiveEncoding
|
||||
{
|
||||
public class ArchiveEncoding
|
||||
/// <summary>
|
||||
/// Default encoding to use when archive format doesn't specify one.
|
||||
/// </summary>
|
||||
public Encoding? Default { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
|
||||
/// </summary>
|
||||
public Encoding? Password { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this encoding when you want to force it for all encoding operations.
|
||||
/// </summary>
|
||||
public Encoding? Forced { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this when you want to use a custom method for all decoding operations.
|
||||
/// </summary>
|
||||
/// <returns>string Func(bytes, index, length)</returns>
|
||||
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
|
||||
|
||||
public ArchiveEncoding()
|
||||
: this(Encoding.Default, Encoding.Default) { }
|
||||
|
||||
public ArchiveEncoding(Encoding def, Encoding password)
|
||||
{
|
||||
/// <summary>
|
||||
/// Default encoding to use when archive format doesn't specify one.
|
||||
/// </summary>
|
||||
public Encoding Default { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
|
||||
/// </summary>
|
||||
public Encoding Password { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this encoding when you want to force it for all encoding operations.
|
||||
/// </summary>
|
||||
public Encoding? Forced { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this when you want to use a custom method for all decoding operations.
|
||||
/// </summary>
|
||||
/// <returns>string Func(bytes, index, length)</returns>
|
||||
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
|
||||
|
||||
public ArchiveEncoding()
|
||||
: this(Encoding.Default, Encoding.Default)
|
||||
{
|
||||
}
|
||||
public ArchiveEncoding(Encoding def, Encoding password)
|
||||
{
|
||||
Default = def;
|
||||
Password = password;
|
||||
}
|
||||
Default = def;
|
||||
Password = password;
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK
|
||||
static ArchiveEncoding()
|
||||
{
|
||||
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
|
||||
}
|
||||
static ArchiveEncoding() => Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
|
||||
#endif
|
||||
|
||||
public string Decode(byte[] bytes)
|
||||
{
|
||||
return Decode(bytes, 0, bytes.Length);
|
||||
}
|
||||
public string Decode(byte[] bytes) => Decode(bytes, 0, bytes.Length);
|
||||
|
||||
public string Decode(byte[] bytes, int start, int length)
|
||||
{
|
||||
return GetDecoder().Invoke(bytes, start, length);
|
||||
}
|
||||
public string Decode(byte[] bytes, int start, int length) =>
|
||||
GetDecoder().Invoke(bytes, start, length);
|
||||
|
||||
public string DecodeUTF8(byte[] bytes)
|
||||
{
|
||||
return Encoding.UTF8.GetString(bytes, 0, bytes.Length);
|
||||
}
|
||||
public string DecodeUTF8(byte[] bytes) => Encoding.UTF8.GetString(bytes, 0, bytes.Length);
|
||||
|
||||
public byte[] Encode(string str)
|
||||
{
|
||||
return GetEncoding().GetBytes(str);
|
||||
}
|
||||
public byte[] Encode(string str) => GetEncoding().GetBytes(str);
|
||||
|
||||
public Encoding GetEncoding()
|
||||
{
|
||||
return Forced ?? Default ?? Encoding.UTF8;
|
||||
}
|
||||
public Encoding GetEncoding() => Forced ?? Default ?? Encoding.UTF8;
|
||||
|
||||
public Func<byte[], int, int, string> GetDecoder()
|
||||
{
|
||||
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
|
||||
}
|
||||
}
|
||||
public Encoding GetPasswordEncoding() => Password ?? Encoding.UTF8;
|
||||
|
||||
public Func<byte[], int, int, string> GetDecoder() =>
|
||||
CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public class ArchiveException : Exception
|
||||
{
|
||||
public ArchiveException(string message)
|
||||
: base(message)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,10 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public class ArchiveExtractionEventArgs<T> : EventArgs
|
||||
{
|
||||
internal ArchiveExtractionEventArgs(T entry)
|
||||
{
|
||||
Item = entry;
|
||||
}
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public T Item { get; }
|
||||
}
|
||||
}
|
||||
public class ArchiveExtractionEventArgs<T> : EventArgs
|
||||
{
|
||||
internal ArchiveExtractionEventArgs(T entry) => Item = entry;
|
||||
|
||||
public T Item { get; }
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public enum ArchiveType
|
||||
{
|
||||
public enum ArchiveType
|
||||
{
|
||||
Rar,
|
||||
Zip,
|
||||
Tar,
|
||||
SevenZip,
|
||||
GZip
|
||||
}
|
||||
}
|
||||
Rar,
|
||||
Zip,
|
||||
Tar,
|
||||
SevenZip,
|
||||
GZip,
|
||||
Arc,
|
||||
Arj,
|
||||
}
|
||||
|
||||
58
src/SharpCompress/Common/Arj/ArjEntry.cs
Normal file
58
src/SharpCompress/Common/Arj/ArjEntry.cs
Normal file
@@ -0,0 +1,58 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arc;
|
||||
using SharpCompress.Common.Arj.Headers;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
{
|
||||
public class ArjEntry : Entry
|
||||
{
|
||||
private readonly ArjFilePart _filePart;
|
||||
|
||||
internal ArjEntry(ArjFilePart filePart)
|
||||
{
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc => _filePart.Header.OriginalCrc32;
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart.Header.CompressionMethod == CompressionMethod.Stored)
|
||||
{
|
||||
return CompressionType.None;
|
||||
}
|
||||
return CompressionType.ArjLZ77;
|
||||
}
|
||||
}
|
||||
|
||||
public override long Size => _filePart?.Header.OriginalSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.Header.DateTimeModified.DateTime;
|
||||
|
||||
public override DateTime? CreatedTime => _filePart.Header.DateTimeCreated.DateTime;
|
||||
|
||||
public override DateTime? LastAccessedTime => _filePart.Header.DateTimeAccessed.DateTime;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => _filePart.Header.FileType == FileType.Directory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
}
|
||||
}
|
||||
72
src/SharpCompress/Common/Arj/ArjFilePart.cs
Normal file
72
src/SharpCompress/Common/Arj/ArjFilePart.cs
Normal file
@@ -0,0 +1,72 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arj.Headers;
|
||||
using SharpCompress.Compressors.Arj;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
{
|
||||
public class ArjFilePart : FilePart
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
internal ArjLocalHeader Header { get; set; }
|
||||
|
||||
internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream)
|
||||
: base(localArjHeader.ArchiveEncoding)
|
||||
{
|
||||
_stream = seekableStream;
|
||||
Header = localArjHeader;
|
||||
}
|
||||
|
||||
internal override string? FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case CompressionMethod.Stored:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedMost:
|
||||
case CompressionMethod.Compressed:
|
||||
case CompressionMethod.CompressedFaster:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new LhaStream<Lh7DecoderCfg>(
|
||||
_stream,
|
||||
(int)Header.OriginalSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedFastest:
|
||||
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream GetRawStream() => _stream;
|
||||
}
|
||||
}
|
||||
36
src/SharpCompress/Common/Arj/ArjVolume.cs
Normal file
36
src/SharpCompress/Common/Arj/ArjVolume.cs
Normal file
@@ -0,0 +1,36 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
{
|
||||
public class ArjVolume : Volume
|
||||
{
|
||||
public ArjVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
|
||||
public override bool IsFirstVolume
|
||||
{
|
||||
get { return true; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ArjArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public override bool IsMultiVolume
|
||||
{
|
||||
get { return false; }
|
||||
}
|
||||
|
||||
internal IEnumerable<ArjFilePart> GetVolumeFileParts()
|
||||
{
|
||||
return new List<ArjFilePart>();
|
||||
}
|
||||
}
|
||||
}
|
||||
142
src/SharpCompress/Common/Arj/Headers/ArjHeader.cs
Normal file
142
src/SharpCompress/Common/Arj/Headers/ArjHeader.cs
Normal file
@@ -0,0 +1,142 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum ArjHeaderType
|
||||
{
|
||||
MainHeader,
|
||||
LocalHeader,
|
||||
}
|
||||
|
||||
public abstract class ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
|
||||
public ArjHeader(ArjHeaderType type)
|
||||
{
|
||||
ArjHeaderType = type;
|
||||
}
|
||||
|
||||
public ArjHeaderType ArjHeaderType { get; }
|
||||
public byte Flags { get; set; }
|
||||
public FileType FileType { get; set; }
|
||||
|
||||
public abstract ArjHeader? Read(Stream reader);
|
||||
|
||||
public byte[] ReadHeader(Stream stream)
|
||||
{
|
||||
// check for magic bytes
|
||||
Span<byte> magic = stackalloc byte[2];
|
||||
if (stream.Read(magic) != 2)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
var magicValue = (ushort)(magic[0] | magic[1] << 8);
|
||||
if (magicValue != ARJ_MAGIC)
|
||||
{
|
||||
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
|
||||
}
|
||||
|
||||
// read header_size
|
||||
byte[] headerBytes = new byte[2];
|
||||
stream.Read(headerBytes, 0, 2);
|
||||
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
|
||||
if (headerSize < 1)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
var body = new byte[headerSize];
|
||||
var read = stream.Read(body, 0, headerSize);
|
||||
if (read < headerSize)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
byte[] crc = new byte[4];
|
||||
read = stream.Read(crc, 0, 4);
|
||||
var checksum = Crc32Stream.Compute(body);
|
||||
// Compute the hash value
|
||||
if (checksum != BitConverter.ToUInt32(crc, 0))
|
||||
{
|
||||
throw new InvalidDataException("Header checksum is invalid");
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
protected List<byte[]> ReadExtendedHeaders(Stream reader)
|
||||
{
|
||||
List<byte[]> extendedHeader = new List<byte[]>();
|
||||
byte[] buffer = new byte[2];
|
||||
|
||||
while (true)
|
||||
{
|
||||
int bytesRead = reader.Read(buffer, 0, 2);
|
||||
if (bytesRead < 2)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header size."
|
||||
);
|
||||
}
|
||||
|
||||
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
|
||||
if (extHeaderSize == 0)
|
||||
{
|
||||
return extendedHeader;
|
||||
}
|
||||
|
||||
byte[] header = new byte[extHeaderSize];
|
||||
bytesRead = reader.Read(header, 0, extHeaderSize);
|
||||
if (bytesRead < extHeaderSize)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header data."
|
||||
);
|
||||
}
|
||||
|
||||
byte[] crc = new byte[4];
|
||||
bytesRead = reader.Read(crc, 0, 4);
|
||||
if (bytesRead < 4)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header CRC."
|
||||
);
|
||||
}
|
||||
|
||||
var checksum = Crc32Stream.Compute(header);
|
||||
if (checksum != BitConverter.ToUInt32(crc, 0))
|
||||
{
|
||||
throw new InvalidDataException("Extended header checksum is invalid");
|
||||
}
|
||||
|
||||
extendedHeader.Add(header);
|
||||
}
|
||||
}
|
||||
|
||||
// Flag helpers
|
||||
public bool IsGabled => (Flags & 0x01) != 0;
|
||||
public bool IsAnsiPage => (Flags & 0x02) != 0;
|
||||
public bool IsVolume => (Flags & 0x04) != 0;
|
||||
public bool IsArjProtected => (Flags & 0x08) != 0;
|
||||
public bool IsPathSym => (Flags & 0x10) != 0;
|
||||
public bool IsBackup => (Flags & 0x20) != 0;
|
||||
public bool IsSecured => (Flags & 0x40) != 0;
|
||||
public bool IsAltName => (Flags & 0x80) != 0;
|
||||
|
||||
public static FileType FileTypeFromByte(byte value)
|
||||
{
|
||||
return Enum.IsDefined(typeof(FileType), value)
|
||||
? (FileType)value
|
||||
: Headers.FileType.Unknown;
|
||||
}
|
||||
}
|
||||
}
|
||||
161
src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.cs
Normal file
161
src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.cs
Normal file
@@ -0,0 +1,161 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public class ArjLocalHeader : ArjHeader
|
||||
{
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public long DataStartPosition { get; protected set; }
|
||||
|
||||
public byte ArchiverVersionNumber { get; set; }
|
||||
public byte MinVersionToExtract { get; set; }
|
||||
public HostOS HostOS { get; set; }
|
||||
public CompressionMethod CompressionMethod { get; set; }
|
||||
public DosDateTime DateTimeModified { get; set; } = new DosDateTime(0);
|
||||
public long CompressedSize { get; set; }
|
||||
public long OriginalSize { get; set; }
|
||||
public long OriginalCrc32 { get; set; }
|
||||
public int FileSpecPosition { get; set; }
|
||||
public int FileAccessMode { get; set; }
|
||||
public byte FirstChapter { get; set; }
|
||||
public byte LastChapter { get; set; }
|
||||
public long ExtendedFilePosition { get; set; }
|
||||
public DosDateTime DateTimeAccessed { get; set; } = new DosDateTime(0);
|
||||
public DosDateTime DateTimeCreated { get; set; } = new DosDateTime(0);
|
||||
public long OriginalSizeEvenForVolumes { get; set; }
|
||||
public string Name { get; set; } = string.Empty;
|
||||
public string Comment { get; set; } = string.Empty;
|
||||
|
||||
private const byte StdHdrSize = 30;
|
||||
private const byte R9HdrSize = 46;
|
||||
|
||||
public ArjLocalHeader(ArchiveEncoding archiveEncoding)
|
||||
: base(ArjHeaderType.LocalHeader)
|
||||
{
|
||||
ArchiveEncoding =
|
||||
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
|
||||
}
|
||||
|
||||
public override ArjHeader? Read(Stream stream)
|
||||
{
|
||||
var body = ReadHeader(stream);
|
||||
if (body.Length > 0)
|
||||
{
|
||||
ReadExtendedHeaders(stream);
|
||||
var header = LoadFrom(body);
|
||||
header.DataStartPosition = stream.Position;
|
||||
return header;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public ArjLocalHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
int offset = 0;
|
||||
|
||||
int ReadInt16()
|
||||
{
|
||||
if (offset + 1 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
|
||||
offset += 2;
|
||||
return v;
|
||||
}
|
||||
long ReadInt32()
|
||||
{
|
||||
if (offset + 3 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
long v =
|
||||
headerBytes[offset] & 0xFF
|
||||
| (headerBytes[offset + 1] & 0xFF) << 8
|
||||
| (headerBytes[offset + 2] & 0xFF) << 16
|
||||
| (headerBytes[offset + 3] & 0xFF) << 24;
|
||||
offset += 4;
|
||||
return v;
|
||||
}
|
||||
|
||||
byte headerSize = headerBytes[offset++];
|
||||
ArchiverVersionNumber = headerBytes[offset++];
|
||||
MinVersionToExtract = headerBytes[offset++];
|
||||
HostOS hostOS = (HostOS)headerBytes[offset++];
|
||||
Flags = headerBytes[offset++];
|
||||
CompressionMethod = CompressionMethodFromByte(headerBytes[offset++]);
|
||||
FileType = FileTypeFromByte(headerBytes[offset++]);
|
||||
|
||||
offset++; // Skip 1 byte
|
||||
|
||||
var rawTimestamp = ReadInt32();
|
||||
DateTimeModified =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
|
||||
CompressedSize = ReadInt32();
|
||||
OriginalSize = ReadInt32();
|
||||
OriginalCrc32 = ReadInt32();
|
||||
FileSpecPosition = ReadInt16();
|
||||
FileAccessMode = ReadInt16();
|
||||
|
||||
FirstChapter = headerBytes[offset++];
|
||||
LastChapter = headerBytes[offset++];
|
||||
|
||||
ExtendedFilePosition = 0;
|
||||
OriginalSizeEvenForVolumes = 0;
|
||||
|
||||
if (headerSize > StdHdrSize)
|
||||
{
|
||||
ExtendedFilePosition = ReadInt32();
|
||||
|
||||
if (headerSize >= R9HdrSize)
|
||||
{
|
||||
rawTimestamp = ReadInt32();
|
||||
DateTimeAccessed =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
rawTimestamp = ReadInt32();
|
||||
DateTimeCreated =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
OriginalSizeEvenForVolumes = ReadInt32();
|
||||
}
|
||||
}
|
||||
|
||||
Name = Encoding.ASCII.GetString(
|
||||
headerBytes,
|
||||
offset,
|
||||
Array.IndexOf(headerBytes, (byte)0, offset) - offset
|
||||
);
|
||||
offset += Name.Length + 1;
|
||||
|
||||
Comment = Encoding.ASCII.GetString(
|
||||
headerBytes,
|
||||
offset,
|
||||
Array.IndexOf(headerBytes, (byte)0, offset) - offset
|
||||
);
|
||||
offset += Comment.Length + 1;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public static CompressionMethod CompressionMethodFromByte(byte value)
|
||||
{
|
||||
return value switch
|
||||
{
|
||||
0 => CompressionMethod.Stored,
|
||||
1 => CompressionMethod.CompressedMost,
|
||||
2 => CompressionMethod.Compressed,
|
||||
3 => CompressionMethod.CompressedFaster,
|
||||
4 => CompressionMethod.CompressedFastest,
|
||||
8 => CompressionMethod.NoDataNoCrc,
|
||||
9 => CompressionMethod.NoData,
|
||||
_ => CompressionMethod.Unknown,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
138
src/SharpCompress/Common/Arj/Headers/ArjMainHeader.cs
Normal file
138
src/SharpCompress/Common/Arj/Headers/ArjMainHeader.cs
Normal file
@@ -0,0 +1,138 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public class ArjMainHeader : ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
public int ArchiverVersionNumber { get; private set; }
|
||||
public int MinVersionToExtract { get; private set; }
|
||||
public HostOS HostOs { get; private set; }
|
||||
public int SecurityVersion { get; private set; }
|
||||
public DosDateTime CreationDateTime { get; private set; } = new DosDateTime(0);
|
||||
public long CompressedSize { get; private set; }
|
||||
public long ArchiveSize { get; private set; }
|
||||
public long SecurityEnvelope { get; private set; }
|
||||
public int FileSpecPosition { get; private set; }
|
||||
public int SecurityEnvelopeLength { get; private set; }
|
||||
public int EncryptionVersion { get; private set; }
|
||||
public int LastChapter { get; private set; }
|
||||
|
||||
public int ArjProtectionFactor { get; private set; }
|
||||
public int Flags2 { get; private set; }
|
||||
public string Name { get; private set; } = string.Empty;
|
||||
public string Comment { get; private set; } = string.Empty;
|
||||
|
||||
public ArjMainHeader(ArchiveEncoding archiveEncoding)
|
||||
: base(ArjHeaderType.MainHeader)
|
||||
{
|
||||
ArchiveEncoding =
|
||||
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
|
||||
}
|
||||
|
||||
public override ArjHeader? Read(Stream stream)
|
||||
{
|
||||
var body = ReadHeader(stream);
|
||||
ReadExtendedHeaders(stream);
|
||||
return LoadFrom(body);
|
||||
}
|
||||
|
||||
public ArjMainHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
var offset = 1;
|
||||
|
||||
byte ReadByte()
|
||||
{
|
||||
if (offset >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
return (byte)(headerBytes[offset++] & 0xFF);
|
||||
}
|
||||
|
||||
int ReadInt16()
|
||||
{
|
||||
if (offset + 1 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
|
||||
offset += 2;
|
||||
return v;
|
||||
}
|
||||
|
||||
long ReadInt32()
|
||||
{
|
||||
if (offset + 3 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
long v =
|
||||
headerBytes[offset] & 0xFF
|
||||
| (headerBytes[offset + 1] & 0xFF) << 8
|
||||
| (headerBytes[offset + 2] & 0xFF) << 16
|
||||
| (headerBytes[offset + 3] & 0xFF) << 24;
|
||||
offset += 4;
|
||||
return v;
|
||||
}
|
||||
string ReadNullTerminatedString(byte[] x, int startIndex)
|
||||
{
|
||||
var result = new StringBuilder();
|
||||
int i = startIndex;
|
||||
|
||||
while (i < x.Length && x[i] != 0)
|
||||
{
|
||||
result.Append((char)x[i]);
|
||||
i++;
|
||||
}
|
||||
|
||||
// Skip the null terminator
|
||||
i++;
|
||||
if (i < x.Length)
|
||||
{
|
||||
byte[] remainder = new byte[x.Length - i];
|
||||
Array.Copy(x, i, remainder, 0, remainder.Length);
|
||||
x = remainder;
|
||||
}
|
||||
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
ArchiverVersionNumber = ReadByte();
|
||||
MinVersionToExtract = ReadByte();
|
||||
|
||||
var hostOsByte = ReadByte();
|
||||
HostOs = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
|
||||
|
||||
Flags = ReadByte();
|
||||
SecurityVersion = ReadByte();
|
||||
FileType = FileTypeFromByte(ReadByte());
|
||||
|
||||
offset++; // skip reserved
|
||||
|
||||
CreationDateTime = new DosDateTime((int)ReadInt32());
|
||||
CompressedSize = ReadInt32();
|
||||
ArchiveSize = ReadInt32();
|
||||
|
||||
SecurityEnvelope = ReadInt32();
|
||||
FileSpecPosition = ReadInt16();
|
||||
SecurityEnvelopeLength = ReadInt16();
|
||||
|
||||
EncryptionVersion = ReadByte();
|
||||
LastChapter = ReadByte();
|
||||
|
||||
Name = ReadNullTerminatedString(headerBytes, offset);
|
||||
Comment = ReadNullTerminatedString(headerBytes, offset + 1 + Name.Length);
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
20
src/SharpCompress/Common/Arj/Headers/CompressionMethod.cs
Normal file
20
src/SharpCompress/Common/Arj/Headers/CompressionMethod.cs
Normal file
@@ -0,0 +1,20 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum CompressionMethod
|
||||
{
|
||||
Stored = 0,
|
||||
CompressedMost = 1,
|
||||
Compressed = 2,
|
||||
CompressedFaster = 3,
|
||||
CompressedFastest = 4,
|
||||
NoDataNoCrc = 8,
|
||||
NoData = 9,
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
37
src/SharpCompress/Common/Arj/Headers/DosDateTime.cs
Normal file
37
src/SharpCompress/Common/Arj/Headers/DosDateTime.cs
Normal file
@@ -0,0 +1,37 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public class DosDateTime
|
||||
{
|
||||
public DateTime DateTime { get; }
|
||||
|
||||
public DosDateTime(long dosValue)
|
||||
{
|
||||
// Ensure only the lower 32 bits are used
|
||||
int value = unchecked((int)(dosValue & 0xFFFFFFFF));
|
||||
|
||||
var date = (value >> 16) & 0xFFFF;
|
||||
var time = value & 0xFFFF;
|
||||
|
||||
var day = date & 0x1F;
|
||||
var month = (date >> 5) & 0x0F;
|
||||
var year = ((date >> 9) & 0x7F) + 1980;
|
||||
|
||||
var second = (time & 0x1F) * 2;
|
||||
var minute = (time >> 5) & 0x3F;
|
||||
var hour = (time >> 11) & 0x1F;
|
||||
|
||||
try
|
||||
{
|
||||
DateTime = new DateTime(year, month, day, hour, minute, second);
|
||||
}
|
||||
catch
|
||||
{
|
||||
DateTime = DateTime.MinValue;
|
||||
}
|
||||
}
|
||||
|
||||
public override string ToString() => DateTime.ToString("yyyy-MM-dd HH:mm:ss");
|
||||
}
|
||||
}
|
||||
13
src/SharpCompress/Common/Arj/Headers/FileType.cs
Normal file
13
src/SharpCompress/Common/Arj/Headers/FileType.cs
Normal file
@@ -0,0 +1,13 @@
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum FileType : byte
|
||||
{
|
||||
Binary = 0,
|
||||
Text7Bit = 1,
|
||||
CommentHeader = 2,
|
||||
Directory = 3,
|
||||
VolumeLabel = 4,
|
||||
ChapterLabel = 5,
|
||||
Unknown = 255,
|
||||
}
|
||||
}
|
||||
19
src/SharpCompress/Common/Arj/Headers/HostOS.cs
Normal file
19
src/SharpCompress/Common/Arj/Headers/HostOS.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public enum HostOS
|
||||
{
|
||||
MsDos = 0,
|
||||
PrimOS = 1,
|
||||
Unix = 2,
|
||||
Amiga = 3,
|
||||
MacOs = 4,
|
||||
OS2 = 5,
|
||||
AppleGS = 6,
|
||||
AtariST = 7,
|
||||
NeXT = 8,
|
||||
VaxVMS = 9,
|
||||
Win95 = 10,
|
||||
Win32 = 11,
|
||||
Unknown = 255,
|
||||
}
|
||||
}
|
||||
@@ -1,23 +1,25 @@
|
||||
using System;
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public sealed class CompressedBytesReadEventArgs : EventArgs
|
||||
{
|
||||
public sealed class CompressedBytesReadEventArgs : EventArgs
|
||||
public CompressedBytesReadEventArgs(
|
||||
long compressedBytesRead,
|
||||
long currentFilePartCompressedBytesRead
|
||||
)
|
||||
{
|
||||
public CompressedBytesReadEventArgs(long compressedBytesRead, long currentFilePartCompressedBytesRead)
|
||||
{
|
||||
CompressedBytesRead = compressedBytesRead;
|
||||
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compressed bytes read for the current entry
|
||||
/// </summary>
|
||||
public long CompressedBytesRead { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Current file part read for Multipart files (e.g. Rar)
|
||||
/// </summary>
|
||||
public long CurrentFilePartCompressedBytesRead { get; }
|
||||
CompressedBytesRead = compressedBytesRead;
|
||||
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compressed bytes read for the current entry
|
||||
/// </summary>
|
||||
public long CompressedBytesRead { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Current file part read for Multipart files (e.g. Rar)
|
||||
/// </summary>
|
||||
public long CurrentFilePartCompressedBytesRead { get; }
|
||||
}
|
||||
|
||||
@@ -1,19 +1,33 @@
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public enum CompressionType
|
||||
{
|
||||
public enum CompressionType
|
||||
{
|
||||
None,
|
||||
GZip,
|
||||
BZip2,
|
||||
PPMd,
|
||||
Deflate,
|
||||
Rar,
|
||||
LZMA,
|
||||
BCJ,
|
||||
BCJ2,
|
||||
LZip,
|
||||
Xz,
|
||||
Unknown,
|
||||
Deflate64
|
||||
}
|
||||
}
|
||||
None,
|
||||
GZip,
|
||||
BZip2,
|
||||
PPMd,
|
||||
Deflate,
|
||||
Rar,
|
||||
LZMA,
|
||||
BCJ,
|
||||
BCJ2,
|
||||
LZip,
|
||||
Xz,
|
||||
Unknown,
|
||||
Deflate64,
|
||||
Shrink,
|
||||
Lzw,
|
||||
Reduce1,
|
||||
Reduce2,
|
||||
Reduce3,
|
||||
Reduce4,
|
||||
Explode,
|
||||
Squeezed,
|
||||
Packed,
|
||||
Crunched,
|
||||
Squashed,
|
||||
Crushed,
|
||||
Distilled,
|
||||
ZStandard,
|
||||
ArjLZ77,
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public class CryptographicException : Exception
|
||||
{
|
||||
public CryptographicException(string message)
|
||||
: base(message)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,89 +1,90 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public abstract class Entry : IEntry
|
||||
{
|
||||
public abstract class Entry : IEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// The File's 32 bit CRC Hash
|
||||
/// </summary>
|
||||
public abstract long Crc { get; }
|
||||
/// <summary>
|
||||
/// The File's 32 bit CRC Hash
|
||||
/// </summary>
|
||||
public abstract long Crc { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The string key of the file internal to the Archive.
|
||||
/// </summary>
|
||||
public abstract string Key { get; }
|
||||
/// <summary>
|
||||
/// The string key of the file internal to the Archive.
|
||||
/// </summary>
|
||||
public abstract string? Key { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
|
||||
/// </summary>
|
||||
public abstract string? LinkTarget { get; }
|
||||
/// <summary>
|
||||
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
|
||||
/// </summary>
|
||||
public abstract string? LinkTarget { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The compressed file size
|
||||
/// </summary>
|
||||
public abstract long CompressedSize { get; }
|
||||
/// <summary>
|
||||
/// The compressed file size
|
||||
/// </summary>
|
||||
public abstract long CompressedSize { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The compression type
|
||||
/// </summary>
|
||||
public abstract CompressionType CompressionType { get; }
|
||||
/// <summary>
|
||||
/// The compression type
|
||||
/// </summary>
|
||||
public abstract CompressionType CompressionType { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The uncompressed file size
|
||||
/// </summary>
|
||||
public abstract long Size { get; }
|
||||
/// <summary>
|
||||
/// The uncompressed file size
|
||||
/// </summary>
|
||||
public abstract long Size { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The entry last modified time in the archive, if recorded
|
||||
/// </summary>
|
||||
public abstract DateTime? LastModifiedTime { get; }
|
||||
/// <summary>
|
||||
/// The entry last modified time in the archive, if recorded
|
||||
/// </summary>
|
||||
public abstract DateTime? LastModifiedTime { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The entry create time in the archive, if recorded
|
||||
/// </summary>
|
||||
public abstract DateTime? CreatedTime { get; }
|
||||
/// <summary>
|
||||
/// The entry create time in the archive, if recorded
|
||||
/// </summary>
|
||||
public abstract DateTime? CreatedTime { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The entry last accessed time in the archive, if recorded
|
||||
/// </summary>
|
||||
public abstract DateTime? LastAccessedTime { get; }
|
||||
/// <summary>
|
||||
/// The entry last accessed time in the archive, if recorded
|
||||
/// </summary>
|
||||
public abstract DateTime? LastAccessedTime { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The entry time when archived, if recorded
|
||||
/// </summary>
|
||||
public abstract DateTime? ArchivedTime { get; }
|
||||
/// <summary>
|
||||
/// The entry time when archived, if recorded
|
||||
/// </summary>
|
||||
public abstract DateTime? ArchivedTime { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry is password protected and encrypted and cannot be extracted.
|
||||
/// </summary>
|
||||
public abstract bool IsEncrypted { get; }
|
||||
/// <summary>
|
||||
/// Entry is password protected and encrypted and cannot be extracted.
|
||||
/// </summary>
|
||||
public abstract bool IsEncrypted { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry is directory.
|
||||
/// </summary>
|
||||
public abstract bool IsDirectory { get; }
|
||||
/// <summary>
|
||||
/// Entry is directory.
|
||||
/// </summary>
|
||||
public abstract bool IsDirectory { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry is split among multiple volumes
|
||||
/// </summary>
|
||||
public abstract bool IsSplitAfter { get; }
|
||||
/// <summary>
|
||||
/// Entry is split among multiple volumes
|
||||
/// </summary>
|
||||
public abstract bool IsSplitAfter { get; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string ToString() => Key;
|
||||
public int VolumeIndexFirst => Parts.FirstOrDefault()?.Index ?? 0;
|
||||
public int VolumeIndexLast => Parts.LastOrDefault()?.Index ?? 0;
|
||||
|
||||
internal abstract IEnumerable<FilePart> Parts { get; }
|
||||
/// <inheritdoc/>
|
||||
public override string ToString() => Key ?? "Entry";
|
||||
|
||||
public bool IsSolid { get; set; }
|
||||
internal abstract IEnumerable<FilePart> Parts { get; }
|
||||
|
||||
internal virtual void Close()
|
||||
{
|
||||
}
|
||||
public bool IsSolid { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry file attribute.
|
||||
/// </summary>
|
||||
public virtual int? Attrib => throw new NotImplementedException();
|
||||
}
|
||||
internal virtual void Close() { }
|
||||
|
||||
/// <summary>
|
||||
/// Entry file attribute.
|
||||
/// </summary>
|
||||
public virtual int? Attrib => throw new NotImplementedException();
|
||||
}
|
||||
|
||||
@@ -1,93 +1,212 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public class EntryStream : Stream
|
||||
{
|
||||
private readonly IReader _reader;
|
||||
private readonly Stream _stream;
|
||||
private bool _completed;
|
||||
private bool _isDisposed;
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
internal EntryStream(IReader reader, Stream stream)
|
||||
public class EntryStream : Stream, IStreamStack
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
long IStreamStack.InstanceId { get; set; }
|
||||
#endif
|
||||
int IStreamStack.DefaultBufferSize { get; set; }
|
||||
|
||||
Stream IStreamStack.BaseStream() => _stream;
|
||||
|
||||
int IStreamStack.BufferSize
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
int IStreamStack.BufferPosition
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
|
||||
void IStreamStack.SetPosition(long position) { }
|
||||
|
||||
private readonly IReader _reader;
|
||||
private readonly Stream _stream;
|
||||
private bool _completed;
|
||||
private bool _isDisposed;
|
||||
|
||||
internal EntryStream(IReader reader, Stream stream)
|
||||
{
|
||||
_reader = reader;
|
||||
_stream = stream;
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugConstruct(typeof(EntryStream));
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
|
||||
/// </summary>
|
||||
public void SkipEntry()
|
||||
{
|
||||
this.Skip();
|
||||
_completed = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Asynchronously skip the rest of the entry stream.
|
||||
/// </summary>
|
||||
public async Task SkipEntryAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
await this.SkipAsync(cancellationToken).ConfigureAwait(false);
|
||||
_completed = true;
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
_reader = reader;
|
||||
_stream = stream;
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
SkipEntry();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
|
||||
/// </summary>
|
||||
public void SkipEntry()
|
||||
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
|
||||
if (_stream is IStreamStack ss)
|
||||
{
|
||||
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
|
||||
{
|
||||
deflateStream.Flush(); //Deflate over reads. Knock it back
|
||||
}
|
||||
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
|
||||
{
|
||||
lzmaStream.Flush(); //Lzma over reads. Knock it back
|
||||
}
|
||||
}
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(EntryStream));
|
||||
#endif
|
||||
base.Dispose(disposing);
|
||||
_stream.Dispose();
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
await SkipEntryAsync().ConfigureAwait(false);
|
||||
}
|
||||
|
||||
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
|
||||
if (_stream is IStreamStack ss)
|
||||
{
|
||||
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
|
||||
{
|
||||
await deflateStream.FlushAsync().ConfigureAwait(false);
|
||||
}
|
||||
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
|
||||
{
|
||||
await lzmaStream.FlushAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(EntryStream));
|
||||
#endif
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
await _stream.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
#endif
|
||||
|
||||
public override bool CanRead => true;
|
||||
|
||||
public override bool CanSeek => false;
|
||||
|
||||
public override bool CanWrite => false;
|
||||
|
||||
public override void Flush() { }
|
||||
|
||||
public override Task FlushAsync(CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
|
||||
public override long Length => _stream.Length;
|
||||
|
||||
public override long Position
|
||||
{
|
||||
get => _stream.Position; //throw new NotSupportedException();
|
||||
set => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
var read = _stream.Read(buffer, offset, count);
|
||||
if (read <= 0)
|
||||
{
|
||||
this.Skip();
|
||||
_completed = true;
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
SkipEntry();
|
||||
}
|
||||
if (_isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
base.Dispose(disposing);
|
||||
_stream.Dispose();
|
||||
}
|
||||
|
||||
public override bool CanRead => true;
|
||||
|
||||
public override bool CanSeek => false;
|
||||
|
||||
public override bool CanWrite => false;
|
||||
|
||||
public override void Flush()
|
||||
{
|
||||
}
|
||||
|
||||
public override long Length => _stream.Length;
|
||||
|
||||
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
int read = _stream.Read(buffer, offset, count);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
|
||||
public override int ReadByte()
|
||||
{
|
||||
int value = _stream.ReadByte();
|
||||
if (value == -1)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override void SetLength(long value)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
return read;
|
||||
}
|
||||
}
|
||||
|
||||
public override async Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var read = await _stream
|
||||
.ReadAsync(buffer, offset, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override async ValueTask<int> ReadAsync(
|
||||
Memory<byte> buffer,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var read = await _stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
#endif
|
||||
|
||||
public override int ReadByte()
|
||||
{
|
||||
var value = _stream.ReadByte();
|
||||
if (value == -1)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
|
||||
|
||||
public override void SetLength(long value) => throw new NotSupportedException();
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override Task WriteAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken
|
||||
) => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public class ExtractionException : Exception
|
||||
{
|
||||
public ExtractionException(string message)
|
||||
: base(message)
|
||||
{
|
||||
}
|
||||
|
||||
public ExtractionException(string message, Exception inner)
|
||||
: base(message, inner)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,104 +1,232 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
internal static class ExtractionMethods
|
||||
{
|
||||
internal static class ExtractionMethods
|
||||
/// <summary>
|
||||
/// Gets the appropriate StringComparison for path checks based on the file system.
|
||||
/// Windows uses case-insensitive file systems, while Unix-like systems use case-sensitive file systems.
|
||||
/// </summary>
|
||||
private static StringComparison PathComparison =>
|
||||
RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
|
||||
? StringComparison.OrdinalIgnoreCase
|
||||
: StringComparison.Ordinal;
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteEntryToDirectory(
|
||||
IEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
Action<string, ExtractionOptions?> write
|
||||
)
|
||||
{
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteEntryToDirectory(IEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
Action<string, ExtractionOptions?> write)
|
||||
string destinationFileName;
|
||||
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
|
||||
|
||||
//check for trailing slash.
|
||||
if (
|
||||
fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1]
|
||||
!= Path.DirectorySeparatorChar
|
||||
)
|
||||
{
|
||||
string destinationFileName;
|
||||
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
|
||||
|
||||
//check for trailing slash.
|
||||
if (fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1] != Path.DirectorySeparatorChar)
|
||||
{
|
||||
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
|
||||
}
|
||||
|
||||
if (!Directory.Exists(fullDestinationDirectoryPath))
|
||||
{
|
||||
throw new ExtractionException($"Directory does not exist to extract to: {fullDestinationDirectoryPath}");
|
||||
}
|
||||
|
||||
options ??= new ExtractionOptions()
|
||||
{
|
||||
Overwrite = true
|
||||
};
|
||||
|
||||
string file = Path.GetFileName(entry.Key);
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
string folder = Path.GetDirectoryName(entry.Key)!;
|
||||
string destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
|
||||
{
|
||||
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(destdir);
|
||||
}
|
||||
destinationFileName = Path.Combine(destdir, file);
|
||||
}
|
||||
else
|
||||
{
|
||||
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
|
||||
|
||||
}
|
||||
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
destinationFileName = Path.GetFullPath(destinationFileName);
|
||||
|
||||
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
|
||||
{
|
||||
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
|
||||
}
|
||||
write(destinationFileName, options);
|
||||
}
|
||||
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
|
||||
{
|
||||
Directory.CreateDirectory(destinationFileName);
|
||||
}
|
||||
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
|
||||
}
|
||||
|
||||
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
|
||||
ExtractionOptions? options,
|
||||
Action<string, FileMode> openAndWrite)
|
||||
if (!Directory.Exists(fullDestinationDirectoryPath))
|
||||
{
|
||||
if (entry.LinkTarget != null)
|
||||
{
|
||||
if (options?.WriteSymbolicLink is null)
|
||||
{
|
||||
throw new ExtractionException("Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null");
|
||||
}
|
||||
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
|
||||
}
|
||||
else
|
||||
{
|
||||
FileMode fm = FileMode.Create;
|
||||
options ??= new ExtractionOptions()
|
||||
{
|
||||
Overwrite = true
|
||||
};
|
||||
throw new ExtractionException(
|
||||
$"Directory does not exist to extract to: {fullDestinationDirectoryPath}"
|
||||
);
|
||||
}
|
||||
|
||||
if (!options.Overwrite)
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
|
||||
file = Utility.ReplaceInvalidFileNameChars(file);
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
|
||||
.NotNull("Directory is null");
|
||||
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
fm = FileMode.CreateNew;
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to create a directory outside of the destination directory."
|
||||
);
|
||||
}
|
||||
|
||||
openAndWrite(destinationFileName, fm);
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
Directory.CreateDirectory(destdir);
|
||||
}
|
||||
destinationFileName = Path.Combine(destdir, file);
|
||||
}
|
||||
else
|
||||
{
|
||||
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
|
||||
}
|
||||
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
destinationFileName = Path.GetFullPath(destinationFileName);
|
||||
|
||||
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to write a file outside of the destination directory."
|
||||
);
|
||||
}
|
||||
write(destinationFileName, options);
|
||||
}
|
||||
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
|
||||
{
|
||||
Directory.CreateDirectory(destinationFileName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void WriteEntryToFile(
|
||||
IEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options,
|
||||
Action<string, FileMode> openAndWrite
|
||||
)
|
||||
{
|
||||
if (entry.LinkTarget != null)
|
||||
{
|
||||
if (options?.WriteSymbolicLink is null)
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
|
||||
);
|
||||
}
|
||||
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
|
||||
}
|
||||
else
|
||||
{
|
||||
var fm = FileMode.Create;
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
if (!options.Overwrite)
|
||||
{
|
||||
fm = FileMode.CreateNew;
|
||||
}
|
||||
|
||||
openAndWrite(destinationFileName, fm);
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task WriteEntryToDirectoryAsync(
|
||||
IEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
Func<string, ExtractionOptions?, Task> writeAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
string destinationFileName;
|
||||
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
|
||||
|
||||
//check for trailing slash.
|
||||
if (
|
||||
fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1]
|
||||
!= Path.DirectorySeparatorChar
|
||||
)
|
||||
{
|
||||
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
|
||||
}
|
||||
|
||||
if (!Directory.Exists(fullDestinationDirectoryPath))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
$"Directory does not exist to extract to: {fullDestinationDirectoryPath}"
|
||||
);
|
||||
}
|
||||
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
|
||||
file = Utility.ReplaceInvalidFileNameChars(file);
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
|
||||
.NotNull("Directory is null");
|
||||
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to create a directory outside of the destination directory."
|
||||
);
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(destdir);
|
||||
}
|
||||
destinationFileName = Path.Combine(destdir, file);
|
||||
}
|
||||
else
|
||||
{
|
||||
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
|
||||
}
|
||||
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
destinationFileName = Path.GetFullPath(destinationFileName);
|
||||
|
||||
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to write a file outside of the destination directory."
|
||||
);
|
||||
}
|
||||
await writeAsync(destinationFileName, options).ConfigureAwait(false);
|
||||
}
|
||||
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
|
||||
{
|
||||
Directory.CreateDirectory(destinationFileName);
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task WriteEntryToFileAsync(
|
||||
IEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options,
|
||||
Func<string, FileMode, Task> openAndWriteAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (entry.LinkTarget != null)
|
||||
{
|
||||
if (options?.WriteSymbolicLink is null)
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
|
||||
);
|
||||
}
|
||||
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
|
||||
}
|
||||
else
|
||||
{
|
||||
var fm = FileMode.Create;
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
if (!options.Overwrite)
|
||||
{
|
||||
fm = FileMode.CreateNew;
|
||||
}
|
||||
|
||||
await openAndWriteAsync(destinationFileName, fm).ConfigureAwait(false);
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,40 +1,40 @@
|
||||
using System;
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class ExtractionOptions
|
||||
{
|
||||
public class ExtractionOptions
|
||||
/// <summary>
|
||||
/// overwrite target if it exists
|
||||
/// </summary>
|
||||
public bool Overwrite { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// extract with internal directory structure
|
||||
/// </summary>
|
||||
public bool ExtractFullPath { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// preserve file time
|
||||
/// </summary>
|
||||
public bool PreserveFileTime { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// preserve windows file attributes
|
||||
/// </summary>
|
||||
public bool PreserveAttributes { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Delegate for writing symbolic links to disk.
|
||||
/// sourcePath is where the symlink is created.
|
||||
/// targetPath is what the symlink refers to.
|
||||
/// </summary>
|
||||
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
|
||||
|
||||
public SymbolicLinkWriterDelegate WriteSymbolicLink = (sourcePath, targetPath) =>
|
||||
{
|
||||
/// <summary>
|
||||
/// overwrite target if it exists
|
||||
/// </summary>
|
||||
public bool Overwrite { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// extract with internal directory structure
|
||||
/// </summary>
|
||||
public bool ExtractFullPath { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// preserve file time
|
||||
/// </summary>
|
||||
public bool PreserveFileTime { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// preserve windows file attributes
|
||||
/// </summary>
|
||||
public bool PreserveAttributes { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Delegate for writing symbolic links to disk.
|
||||
/// sourcePath is where the symlink is created.
|
||||
/// targetPath is what the symlink refers to.
|
||||
/// </summary>
|
||||
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
|
||||
|
||||
public SymbolicLinkWriterDelegate WriteSymbolicLink =
|
||||
(sourcePath, targetPath) =>
|
||||
{
|
||||
Console.WriteLine($"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271");
|
||||
};
|
||||
}
|
||||
}
|
||||
Console.WriteLine(
|
||||
$"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271"
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,20 +1,17 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public abstract class FilePart
|
||||
{
|
||||
public abstract class FilePart
|
||||
{
|
||||
protected FilePart(ArchiveEncoding archiveEncoding)
|
||||
{
|
||||
ArchiveEncoding = archiveEncoding;
|
||||
}
|
||||
protected FilePart(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
|
||||
|
||||
internal ArchiveEncoding ArchiveEncoding { get; }
|
||||
internal ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
internal abstract string FilePartName { get; }
|
||||
internal abstract string? FilePartName { get; }
|
||||
public int Index { get; set; }
|
||||
|
||||
internal abstract Stream GetCompressedStream();
|
||||
internal abstract Stream? GetRawStream();
|
||||
internal bool Skipped { get; set; }
|
||||
}
|
||||
internal abstract Stream? GetCompressedStream();
|
||||
internal abstract Stream? GetRawStream();
|
||||
internal bool Skipped { get; set; }
|
||||
}
|
||||
|
||||
@@ -1,29 +1,28 @@
|
||||
using System;
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public sealed class FilePartExtractionBeginEventArgs : EventArgs
|
||||
{
|
||||
public sealed class FilePartExtractionBeginEventArgs : EventArgs
|
||||
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
|
||||
{
|
||||
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
|
||||
{
|
||||
Name = name;
|
||||
Size = size;
|
||||
CompressedSize = compressedSize;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// File name for the part for the current entry
|
||||
/// </summary>
|
||||
public string Name { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Uncompressed size of the current entry in the part
|
||||
/// </summary>
|
||||
public long Size { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Compressed size of the current entry in the part
|
||||
/// </summary>
|
||||
public long CompressedSize { get; }
|
||||
Name = name;
|
||||
Size = size;
|
||||
CompressedSize = compressedSize;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// File name for the part for the current entry
|
||||
/// </summary>
|
||||
public string Name { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Uncompressed size of the current entry in the part
|
||||
/// </summary>
|
||||
public long Size { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Compressed size of the current entry in the part
|
||||
/// </summary>
|
||||
public long CompressedSize { get; }
|
||||
}
|
||||
|
||||
@@ -1,108 +1,86 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
internal static class FlagUtility
|
||||
{
|
||||
internal static class FlagUtility
|
||||
/// <summary>
|
||||
/// Returns true if the flag is set on the specified bit field.
|
||||
/// Currently only works with 32-bit bitfields.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to test</param>
|
||||
/// <returns></returns>
|
||||
public static bool HasFlag<T>(long bitField, T flag)
|
||||
where T : struct => HasFlag(bitField, flag);
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the flag is set on the specified bit field.
|
||||
/// Currently only works with 32-bit bitfields.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to test</param>
|
||||
/// <returns></returns>
|
||||
public static bool HasFlag<T>(ulong bitField, T flag)
|
||||
where T : struct => HasFlag(bitField, flag);
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the flag is set on the specified bit field.
|
||||
/// Currently only works with 32-bit bitfields.
|
||||
/// </summary>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to test</param>
|
||||
/// <returns></returns>
|
||||
public static bool HasFlag(ulong bitField, ulong flag) => ((bitField & flag) == flag);
|
||||
|
||||
public static bool HasFlag(short bitField, short flag) => ((bitField & flag) == flag);
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the flag is set on the specified bit field.
|
||||
/// Currently only works with 32-bit bitfields.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to test</param>
|
||||
/// <returns></returns>
|
||||
public static bool HasFlag<T>(T bitField, T flag)
|
||||
where T : struct => HasFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag));
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the flag is set on the specified bit field.
|
||||
/// Currently only works with 32-bit bitfields.
|
||||
/// </summary>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to test</param>
|
||||
/// <returns></returns>
|
||||
public static bool HasFlag(long bitField, long flag) => ((bitField & flag) == flag);
|
||||
|
||||
/// <summary>
|
||||
/// Sets a bit-field to either on or off for the specified flag.
|
||||
/// </summary>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to change</param>
|
||||
/// <param name="on">bool</param>
|
||||
/// <returns>The flagged variable with the flag changed</returns>
|
||||
public static long SetFlag(long bitField, long flag, bool on)
|
||||
{
|
||||
/// <summary>
|
||||
/// Returns true if the flag is set on the specified bit field.
|
||||
/// Currently only works with 32-bit bitfields.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to test</param>
|
||||
/// <returns></returns>
|
||||
public static bool HasFlag<T>(long bitField, T flag)
|
||||
where T : struct
|
||||
if (on)
|
||||
{
|
||||
return HasFlag(bitField, flag);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the flag is set on the specified bit field.
|
||||
/// Currently only works with 32-bit bitfields.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to test</param>
|
||||
/// <returns></returns>
|
||||
public static bool HasFlag<T>(ulong bitField, T flag)
|
||||
where T : struct
|
||||
{
|
||||
return HasFlag(bitField, flag);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the flag is set on the specified bit field.
|
||||
/// Currently only works with 32-bit bitfields.
|
||||
/// </summary>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to test</param>
|
||||
/// <returns></returns>
|
||||
public static bool HasFlag(ulong bitField, ulong flag)
|
||||
{
|
||||
return ((bitField & flag) == flag);
|
||||
}
|
||||
|
||||
public static bool HasFlag(short bitField, short flag)
|
||||
{
|
||||
return ((bitField & flag) == flag);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the flag is set on the specified bit field.
|
||||
/// Currently only works with 32-bit bitfields.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to test</param>
|
||||
/// <returns></returns>
|
||||
public static bool HasFlag<T>(T bitField, T flag)
|
||||
where T : struct
|
||||
{
|
||||
return HasFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the flag is set on the specified bit field.
|
||||
/// Currently only works with 32-bit bitfields.
|
||||
/// </summary>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to test</param>
|
||||
/// <returns></returns>
|
||||
public static bool HasFlag(long bitField, long flag)
|
||||
{
|
||||
return ((bitField & flag) == flag);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets a bit-field to either on or off for the specified flag.
|
||||
/// </summary>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to change</param>
|
||||
/// <param name="on">bool</param>
|
||||
/// <returns>The flagged variable with the flag changed</returns>
|
||||
public static long SetFlag(long bitField, long flag, bool on)
|
||||
{
|
||||
if (on)
|
||||
{
|
||||
return bitField | flag;
|
||||
}
|
||||
return bitField & (~flag);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets a bit-field to either on or off for the specified flag.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to change</param>
|
||||
/// <param name="on">bool</param>
|
||||
/// <returns>The flagged variable with the flag changed</returns>
|
||||
public static long SetFlag<T>(T bitField, T flag, bool on)
|
||||
where T : struct
|
||||
{
|
||||
return SetFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag), on);
|
||||
return bitField | flag;
|
||||
}
|
||||
return bitField & (~flag);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets a bit-field to either on or off for the specified flag.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
|
||||
/// <param name="bitField">Flagged variable</param>
|
||||
/// <param name="flag">Flag to change</param>
|
||||
/// <param name="on">bool</param>
|
||||
/// <returns>The flagged variable with the flag changed</returns>
|
||||
public static long SetFlag<T>(T bitField, T flag, bool on)
|
||||
where T : struct => SetFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag), on);
|
||||
}
|
||||
|
||||
@@ -2,48 +2,44 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
|
||||
namespace SharpCompress.Common.GZip
|
||||
namespace SharpCompress.Common.GZip;
|
||||
|
||||
public class GZipEntry : Entry
|
||||
{
|
||||
public class GZipEntry : Entry
|
||||
private readonly GZipFilePart? _filePart;
|
||||
|
||||
internal GZipEntry(GZipFilePart? filePart) => _filePart = filePart;
|
||||
|
||||
public override CompressionType CompressionType => CompressionType.GZip;
|
||||
|
||||
public override long Crc => _filePart?.Crc ?? 0;
|
||||
|
||||
public override string? Key => _filePart?.FilePartName;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size => _filePart?.UncompressedSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart?.DateModified;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
|
||||
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
|
||||
{
|
||||
private readonly GZipFilePart _filePart;
|
||||
|
||||
internal GZipEntry(GZipFilePart filePart)
|
||||
{
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override CompressionType CompressionType => CompressionType.GZip;
|
||||
|
||||
public override long Crc => _filePart.Crc ?? 0;
|
||||
|
||||
public override string Key => _filePart.FilePartName;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size => _filePart.UncompressedSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.DateModified;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
|
||||
|
||||
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
|
||||
{
|
||||
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
|
||||
}
|
||||
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -6,133 +6,132 @@ using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.Compressors;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
|
||||
namespace SharpCompress.Common.GZip
|
||||
namespace SharpCompress.Common.GZip;
|
||||
|
||||
internal sealed class GZipFilePart : FilePart
|
||||
{
|
||||
internal sealed class GZipFilePart : FilePart
|
||||
private string? _name;
|
||||
private readonly Stream _stream;
|
||||
|
||||
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding)
|
||||
{
|
||||
private string? _name;
|
||||
private readonly Stream _stream;
|
||||
|
||||
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding)
|
||||
_stream = stream;
|
||||
ReadAndValidateGzipHeader();
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
_stream = stream;
|
||||
ReadAndValidateGzipHeader();
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
long position = stream.Position;
|
||||
stream.Position = stream.Length - 8;
|
||||
ReadTrailer();
|
||||
stream.Position = position;
|
||||
}
|
||||
EntryStartPosition = stream.Position;
|
||||
var position = stream.Position;
|
||||
stream.Position = stream.Length - 8;
|
||||
ReadTrailer();
|
||||
stream.Position = position;
|
||||
EntryStartPosition = position;
|
||||
}
|
||||
|
||||
internal long EntryStartPosition { get; }
|
||||
|
||||
internal DateTime? DateModified { get; private set; }
|
||||
internal int? Crc { get; private set; }
|
||||
internal int? UncompressedSize { get; private set; }
|
||||
|
||||
internal override string FilePartName => _name!;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
else
|
||||
{
|
||||
return new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
|
||||
}
|
||||
|
||||
internal override Stream GetRawStream()
|
||||
{
|
||||
return _stream;
|
||||
}
|
||||
|
||||
private void ReadTrailer()
|
||||
{
|
||||
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
|
||||
Span<byte> trailer = stackalloc byte[8];
|
||||
int n = _stream.Read(trailer);
|
||||
|
||||
Crc = BinaryPrimitives.ReadInt32LittleEndian(trailer);
|
||||
UncompressedSize = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
|
||||
}
|
||||
|
||||
private void ReadAndValidateGzipHeader()
|
||||
{
|
||||
// read the header on the first read
|
||||
Span<byte> header = stackalloc byte[10];
|
||||
int n = _stream.Read(header);
|
||||
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (n == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (n != 10)
|
||||
{
|
||||
throw new ZlibException("Not a valid GZIP stream.");
|
||||
}
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
throw new ZlibException("Bad GZIP header.");
|
||||
}
|
||||
|
||||
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
|
||||
DateModified = TarHeader.EPOCH.AddSeconds(timet);
|
||||
if ((header[3] & 0x04) == 0x04)
|
||||
{
|
||||
// read and discard extra field
|
||||
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
|
||||
|
||||
short extraLength = (short)(header[0] + header[1] * 256);
|
||||
byte[] extra = new byte[extraLength];
|
||||
|
||||
if (!_stream.ReadFully(extra))
|
||||
{
|
||||
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
|
||||
}
|
||||
n = extraLength;
|
||||
}
|
||||
if ((header[3] & 0x08) == 0x08)
|
||||
{
|
||||
_name = ReadZeroTerminatedString(_stream);
|
||||
}
|
||||
if ((header[3] & 0x10) == 0x010)
|
||||
{
|
||||
ReadZeroTerminatedString(_stream);
|
||||
}
|
||||
if ((header[3] & 0x02) == 0x02)
|
||||
{
|
||||
_stream.ReadByte(); // CRC16, ignore
|
||||
}
|
||||
}
|
||||
|
||||
private string ReadZeroTerminatedString(Stream stream)
|
||||
{
|
||||
Span<byte> buf1 = stackalloc byte[1];
|
||||
var list = new List<byte>();
|
||||
bool done = false;
|
||||
do
|
||||
{
|
||||
// workitem 7740
|
||||
int n = stream.Read(buf1);
|
||||
if (n != 1)
|
||||
{
|
||||
throw new ZlibException("Unexpected EOF reading GZIP header.");
|
||||
}
|
||||
if (buf1[0] == 0)
|
||||
{
|
||||
done = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
list.Add(buf1[0]);
|
||||
}
|
||||
}
|
||||
while (!done);
|
||||
byte[] buffer = list.ToArray();
|
||||
return ArchiveEncoding.Decode(buffer);
|
||||
// For non-seekable streams, we can't read the trailer or track position.
|
||||
// Set to 0 since the stream will be read sequentially from its current position.
|
||||
EntryStartPosition = 0;
|
||||
}
|
||||
}
|
||||
|
||||
internal long EntryStartPosition { get; }
|
||||
|
||||
internal DateTime? DateModified { get; private set; }
|
||||
internal uint? Crc { get; private set; }
|
||||
internal uint? UncompressedSize { get; private set; }
|
||||
|
||||
internal override string? FilePartName => _name;
|
||||
|
||||
internal override Stream GetCompressedStream() =>
|
||||
new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
|
||||
|
||||
internal override Stream GetRawStream() => _stream;
|
||||
|
||||
private void ReadTrailer()
|
||||
{
|
||||
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
|
||||
Span<byte> trailer = stackalloc byte[8];
|
||||
var n = _stream.Read(trailer);
|
||||
|
||||
Crc = BinaryPrimitives.ReadUInt32LittleEndian(trailer);
|
||||
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.Slice(4));
|
||||
}
|
||||
|
||||
private void ReadAndValidateGzipHeader()
|
||||
{
|
||||
// read the header on the first read
|
||||
Span<byte> header = stackalloc byte[10];
|
||||
var n = _stream.Read(header);
|
||||
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (n == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (n != 10)
|
||||
{
|
||||
throw new ZlibException("Not a valid GZIP stream.");
|
||||
}
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
throw new ZlibException("Bad GZIP header.");
|
||||
}
|
||||
|
||||
var timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
|
||||
DateModified = TarHeader.EPOCH.AddSeconds(timet);
|
||||
if ((header[3] & 0x04) == 0x04)
|
||||
{
|
||||
// read and discard extra field
|
||||
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
|
||||
|
||||
var extraLength = (short)(header[0] + (header[1] * 256));
|
||||
var extra = new byte[extraLength];
|
||||
|
||||
if (!_stream.ReadFully(extra))
|
||||
{
|
||||
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
|
||||
}
|
||||
n = extraLength;
|
||||
}
|
||||
if ((header[3] & 0x08) == 0x08)
|
||||
{
|
||||
_name = ReadZeroTerminatedString(_stream);
|
||||
}
|
||||
if ((header[3] & 0x10) == 0x010)
|
||||
{
|
||||
ReadZeroTerminatedString(_stream);
|
||||
}
|
||||
if ((header[3] & 0x02) == 0x02)
|
||||
{
|
||||
_stream.ReadByte(); // CRC16, ignore
|
||||
}
|
||||
}
|
||||
|
||||
private string ReadZeroTerminatedString(Stream stream)
|
||||
{
|
||||
Span<byte> buf1 = stackalloc byte[1];
|
||||
var list = new List<byte>();
|
||||
var done = false;
|
||||
do
|
||||
{
|
||||
// workitem 7740
|
||||
var n = stream.Read(buf1);
|
||||
if (n != 1)
|
||||
{
|
||||
throw new ZlibException("Unexpected EOF reading GZIP header.");
|
||||
}
|
||||
if (buf1[0] == 0)
|
||||
{
|
||||
done = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
list.Add(buf1[0]);
|
||||
}
|
||||
} while (!done);
|
||||
var buffer = list.ToArray();
|
||||
return ArchiveEncoding.Decode(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,17 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.GZip
|
||||
namespace SharpCompress.Common.GZip;
|
||||
|
||||
public class GZipVolume : Volume
|
||||
{
|
||||
public class GZipVolume : Volume
|
||||
{
|
||||
public GZipVolume(Stream stream, ReaderOptions options)
|
||||
: base(stream, options)
|
||||
{
|
||||
}
|
||||
public GZipVolume(Stream stream, ReaderOptions? options, int index)
|
||||
: base(stream, options, index) { }
|
||||
|
||||
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
|
||||
: base(fileInfo.OpenRead(), options)
|
||||
{
|
||||
options.LeaveStreamOpen = false;
|
||||
}
|
||||
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
|
||||
: base(fileInfo.OpenRead(), options) => options.LeaveStreamOpen = false;
|
||||
|
||||
public override bool IsFirstVolume => true;
|
||||
public override bool IsFirstVolume => true;
|
||||
|
||||
public override bool IsMultiVolume => true;
|
||||
}
|
||||
}
|
||||
public override bool IsMultiVolume => true;
|
||||
}
|
||||
|
||||
@@ -1,45 +1,48 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
internal static class EntryExtensions
|
||||
{
|
||||
internal static class EntryExtensions
|
||||
internal static void PreserveExtractionOptions(
|
||||
this IEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions options
|
||||
)
|
||||
{
|
||||
internal static void PreserveExtractionOptions(this IEntry entry, string destinationFileName,
|
||||
ExtractionOptions options)
|
||||
if (options.PreserveFileTime || options.PreserveAttributes)
|
||||
{
|
||||
if (options.PreserveFileTime || options.PreserveAttributes)
|
||||
var nf = new FileInfo(destinationFileName);
|
||||
if (!nf.Exists)
|
||||
{
|
||||
FileInfo nf = new FileInfo(destinationFileName);
|
||||
if (!nf.Exists)
|
||||
return;
|
||||
}
|
||||
|
||||
// update file time to original packed time
|
||||
if (options.PreserveFileTime)
|
||||
{
|
||||
if (entry.CreatedTime.HasValue)
|
||||
{
|
||||
return;
|
||||
nf.CreationTime = entry.CreatedTime.Value;
|
||||
}
|
||||
|
||||
// update file time to original packed time
|
||||
if (options.PreserveFileTime)
|
||||
if (entry.LastModifiedTime.HasValue)
|
||||
{
|
||||
if (entry.CreatedTime.HasValue)
|
||||
{
|
||||
nf.CreationTime = entry.CreatedTime.Value;
|
||||
}
|
||||
|
||||
if (entry.LastModifiedTime.HasValue)
|
||||
{
|
||||
nf.LastWriteTime = entry.LastModifiedTime.Value;
|
||||
}
|
||||
|
||||
if (entry.LastAccessedTime.HasValue)
|
||||
{
|
||||
nf.LastAccessTime = entry.LastAccessedTime.Value;
|
||||
}
|
||||
nf.LastWriteTime = entry.LastModifiedTime.Value;
|
||||
}
|
||||
|
||||
if (options.PreserveAttributes)
|
||||
if (entry.LastAccessedTime.HasValue)
|
||||
{
|
||||
if (entry.Attrib.HasValue)
|
||||
{
|
||||
nf.Attributes = (FileAttributes)System.Enum.ToObject(typeof(FileAttributes), entry.Attrib.Value);
|
||||
}
|
||||
nf.LastAccessTime = entry.LastAccessedTime.Value;
|
||||
}
|
||||
}
|
||||
|
||||
if (options.PreserveAttributes)
|
||||
{
|
||||
if (entry.Attrib.HasValue)
|
||||
{
|
||||
nf.Attributes = (FileAttributes)
|
||||
System.Enum.ToObject(typeof(FileAttributes), entry.Attrib.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,24 @@
|
||||
using System;
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public interface IEntry
|
||||
{
|
||||
public interface IEntry
|
||||
{
|
||||
CompressionType CompressionType { get; }
|
||||
DateTime? ArchivedTime { get; }
|
||||
long CompressedSize { get; }
|
||||
long Crc { get; }
|
||||
DateTime? CreatedTime { get; }
|
||||
string Key { get; }
|
||||
string? LinkTarget { get; }
|
||||
bool IsDirectory { get; }
|
||||
bool IsEncrypted { get; }
|
||||
bool IsSplitAfter { get; }
|
||||
bool IsSolid { get; }
|
||||
DateTime? LastAccessedTime { get; }
|
||||
DateTime? LastModifiedTime { get; }
|
||||
long Size { get; }
|
||||
int? Attrib { get; }
|
||||
}
|
||||
}
|
||||
CompressionType CompressionType { get; }
|
||||
DateTime? ArchivedTime { get; }
|
||||
long CompressedSize { get; }
|
||||
long Crc { get; }
|
||||
DateTime? CreatedTime { get; }
|
||||
string? Key { get; }
|
||||
string? LinkTarget { get; }
|
||||
bool IsDirectory { get; }
|
||||
bool IsEncrypted { get; }
|
||||
bool IsSplitAfter { get; }
|
||||
bool IsSolid { get; }
|
||||
int VolumeIndexFirst { get; }
|
||||
int VolumeIndexLast { get; }
|
||||
DateTime? LastAccessedTime { get; }
|
||||
DateTime? LastModifiedTime { get; }
|
||||
long Size { get; }
|
||||
int? Attrib { get; }
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public interface IExtractionListener
|
||||
{
|
||||
internal interface IExtractionListener
|
||||
{
|
||||
void FireFilePartExtractionBegin(string name, long size, long compressedSize);
|
||||
void FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes);
|
||||
}
|
||||
}
|
||||
void FireFilePartExtractionBegin(string name, long size, long compressedSize);
|
||||
void FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes);
|
||||
}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
using System;
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public interface IVolume : IDisposable
|
||||
{
|
||||
public interface IVolume : IDisposable
|
||||
{
|
||||
}
|
||||
}
|
||||
int Index { get; }
|
||||
|
||||
string? FileName { get; }
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user