mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-04 13:34:59 +00:00
Compare commits
789 Commits
adam/enabl
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9def35f78a | ||
|
|
7e8005a9d8 | ||
|
|
b93ed79ef3 | ||
|
|
94716a5ba9 | ||
|
|
f67168f479 | ||
|
|
7e54f91bfa | ||
|
|
3ab4478275 | ||
|
|
8759cf08ff | ||
|
|
8cff7cb551 | ||
|
|
1b0ec2410d | ||
|
|
08d64ee8a1 | ||
|
|
eedc7c7a0f | ||
|
|
3198b32008 | ||
|
|
dff17a95e8 | ||
|
|
236ee215b9 | ||
|
|
6af612fd54 | ||
|
|
361e695380 | ||
|
|
8a8784a974 | ||
|
|
ddc01527bd | ||
|
|
e6ad44def8 | ||
|
|
9b8e3d8530 | ||
|
|
0b15b60506 | ||
|
|
46e2ea8507 | ||
|
|
62b8fc92d1 | ||
|
|
cb27b117b4 | ||
|
|
6112b2d1d9 | ||
|
|
0e2f8068a6 | ||
|
|
227e70926b | ||
|
|
86e412cf77 | ||
|
|
037b6842bf | ||
|
|
895dd02830 | ||
|
|
7112dba345 | ||
|
|
0767292bb0 | ||
|
|
b40e1a002a | ||
|
|
c096164486 | ||
|
|
d92def91b0 | ||
|
|
b48e938c98 | ||
|
|
4ed1f89866 | ||
|
|
525bcea989 | ||
|
|
6c3f7c86da | ||
|
|
595a97bd62 | ||
|
|
c9db03335b | ||
|
|
659f5d7834 | ||
|
|
42f6c77419 | ||
|
|
bcaec86514 | ||
|
|
1ca914823f | ||
|
|
be8841075a | ||
|
|
a94e319935 | ||
|
|
d60abc3f45 | ||
|
|
b994f0ab55 | ||
|
|
e2cb9f39ab | ||
|
|
58459bda12 | ||
|
|
8dfd5349f0 | ||
|
|
c770bc4788 | ||
|
|
24b4ef8780 | ||
|
|
6ddcbf2bc9 | ||
|
|
8d5d686b79 | ||
|
|
f4369e540a | ||
|
|
c219eb4abb | ||
|
|
9a7bdd39e8 | ||
|
|
484bc740d7 | ||
|
|
8a67d501a8 | ||
|
|
3c87242bd0 | ||
|
|
999124e68e | ||
|
|
db2f5c9cb9 | ||
|
|
af08a7cd54 | ||
|
|
72eaf66f05 | ||
|
|
8a3be35d67 | ||
|
|
d59e4c2a0d | ||
|
|
71655e04c4 | ||
|
|
a706a9d725 | ||
|
|
970934a40b | ||
|
|
a9c28a7b62 | ||
|
|
4d31436740 | ||
|
|
c82744c51c | ||
|
|
f0eaddc6a6 | ||
|
|
d6156f0f1e | ||
|
|
3c88c7fdd5 | ||
|
|
d11f6aefb0 | ||
|
|
010a38bb73 | ||
|
|
53f12d75db | ||
|
|
6c866324b2 | ||
|
|
a114155189 | ||
|
|
014bbc3ea4 | ||
|
|
d52facd4ab | ||
|
|
0a50386ada | ||
|
|
f64fa53ed1 | ||
|
|
335db1eb9e | ||
|
|
27fe2d807e | ||
|
|
27cf2795ef | ||
|
|
979c8d9234 | ||
|
|
04eabb7866 | ||
|
|
f4eccea20c | ||
|
|
fc63217dd0 | ||
|
|
b9fc680548 | ||
|
|
7dcc13c1f0 | ||
|
|
56d3091688 | ||
|
|
a0af0604d1 | ||
|
|
875c2d7694 | ||
|
|
8c95f863cb | ||
|
|
ddf37e82c2 | ||
|
|
a82fda98d7 | ||
|
|
44e4b1804e | ||
|
|
984ea8f46f | ||
|
|
4d84394417 | ||
|
|
507074cf72 | ||
|
|
f364b68e09 | ||
|
|
244acc0c9e | ||
|
|
def0bce221 | ||
|
|
d0823db595 | ||
|
|
73704bcd7e | ||
|
|
86c3b93fa5 | ||
|
|
e89fb211ce | ||
|
|
55100cb37a | ||
|
|
14fd880dac | ||
|
|
4ca1a7713e | ||
|
|
9caf7be928 | ||
|
|
bf4217fde6 | ||
|
|
de3cda9034 | ||
|
|
f1102dc980 | ||
|
|
f2bb81d611 | ||
|
|
41e0c151de | ||
|
|
d0f44839ff | ||
|
|
414cad1241 | ||
|
|
abe0087cfd | ||
|
|
060b1ed5dd | ||
|
|
fbc168fafe | ||
|
|
d5a8c37113 | ||
|
|
21ce9a38e6 | ||
|
|
7732fbb698 | ||
|
|
44402414a6 | ||
|
|
11b92d102a | ||
|
|
16831e1e6e | ||
|
|
3b83d08e2a | ||
|
|
b622a2ce73 | ||
|
|
c5814502f6 | ||
|
|
d9be6389ca | ||
|
|
336a8f2876 | ||
|
|
b4f949ba9b | ||
|
|
9403c12793 | ||
|
|
77c1cebefc | ||
|
|
caa7acdbc5 | ||
|
|
1522e64797 | ||
|
|
5152e3197e | ||
|
|
ae4f2c08fd | ||
|
|
9628f2dda1 | ||
|
|
65208a30c1 | ||
|
|
4c838db876 | ||
|
|
d1f6fd9af1 | ||
|
|
61c6f8403a | ||
|
|
a8f47237d7 | ||
|
|
7cbdc5b46c | ||
|
|
8b74243e79 | ||
|
|
f77a2aabab | ||
|
|
e6fb704780 | ||
|
|
c5d7407919 | ||
|
|
b9ed2b09c1 | ||
|
|
db0bb8a30d | ||
|
|
85d82e5c86 | ||
|
|
1a87075f33 | ||
|
|
8df9232171 | ||
|
|
7b7eba8cd9 | ||
|
|
169364f6ae | ||
|
|
c38f74d34c | ||
|
|
895699d22e | ||
|
|
cf901c2784 | ||
|
|
e1bbc65f5b | ||
|
|
f6faaa83ec | ||
|
|
4d3ae3a97f | ||
|
|
cc47fde57f | ||
|
|
a8d5b8e86b | ||
|
|
0a9c5bfe15 | ||
|
|
ff0769e988 | ||
|
|
3987733079 | ||
|
|
b26d38b7e4 | ||
|
|
2175cb299d | ||
|
|
8abb972f87 | ||
|
|
05bf22f518 | ||
|
|
3b5ee481c5 | ||
|
|
b54617238b | ||
|
|
44174e7b03 | ||
|
|
ecd9317ab3 | ||
|
|
884f0b702e | ||
|
|
2e95832bea | ||
|
|
97879f18b6 | ||
|
|
d74454f7e9 | ||
|
|
ce01cc7ce1 | ||
|
|
9454466be7 | ||
|
|
0e4a159998 | ||
|
|
4998676476 | ||
|
|
f359f553b3 | ||
|
|
08118f7286 | ||
|
|
408d2e6663 | ||
|
|
4c4b727bd7 | ||
|
|
8e54b10b7f | ||
|
|
f99e421115 | ||
|
|
82d56b9678 | ||
|
|
447d35267f | ||
|
|
763805e03a | ||
|
|
cd70a7760e | ||
|
|
ec7c359341 | ||
|
|
cc59c1960a | ||
|
|
1cc80e7675 | ||
|
|
cfe59fc515 | ||
|
|
2180df3318 | ||
|
|
29f4c7fe2e | ||
|
|
d5f9815561 | ||
|
|
6e5e47f041 | ||
|
|
b0fde2b8c7 | ||
|
|
4b9b20de42 | ||
|
|
f7c91bb26f | ||
|
|
4b34dd61d3 | ||
|
|
c958d184d0 | ||
|
|
0de5c59a77 | ||
|
|
3b10be53b5 | ||
|
|
5336eb6fe6 | ||
|
|
9fa686b8f9 | ||
|
|
2012077fb0 | ||
|
|
302cf2e14f | ||
|
|
b9fccbd691 | ||
|
|
bbbbc8810a | ||
|
|
c7da19f3a5 | ||
|
|
e919930cf6 | ||
|
|
2906529080 | ||
|
|
75cc36849b | ||
|
|
63e124e72f | ||
|
|
394d982168 | ||
|
|
f4ce4cbad8 | ||
|
|
491beabe03 | ||
|
|
f5d83c0e33 | ||
|
|
d2cb792d91 | ||
|
|
52fef492a5 | ||
|
|
a5300f3383 | ||
|
|
cab3e7d498 | ||
|
|
405dbb30cd | ||
|
|
9bb670ad19 | ||
|
|
bbba2e6c7a | ||
|
|
0b2158f74c | ||
|
|
5c06b8c48f | ||
|
|
810df8a18b | ||
|
|
63736efcac | ||
|
|
3e219fa9ec | ||
|
|
33b6447c18 | ||
|
|
ec310c87de | ||
|
|
c55a383112 | ||
|
|
227fec66ad | ||
|
|
38eec23e07 | ||
|
|
437271c6a2 | ||
|
|
81a2060c75 | ||
|
|
5e90cfd6c5 | ||
|
|
2d597e6e43 | ||
|
|
a410f73bf3 | ||
|
|
b41296194f | ||
|
|
bf7416753a | ||
|
|
7fbd751d27 | ||
|
|
85b28dfe68 | ||
|
|
779fba5deb | ||
|
|
2756b1f6f8 | ||
|
|
7b76858ae1 | ||
|
|
84b5b5a717 | ||
|
|
ebfa16f09f | ||
|
|
c1d240b516 | ||
|
|
5c4719f4a9 | ||
|
|
95d2278d8b | ||
|
|
e63ee57ef0 | ||
|
|
775efa1b26 | ||
|
|
3677b4b193 | ||
|
|
c32f4b4f2a | ||
|
|
8d34f88ca6 | ||
|
|
ca4cf25a1f | ||
|
|
4fa976b478 | ||
|
|
767f3a4985 | ||
|
|
ddc08e068e | ||
|
|
a1a86cdde8 | ||
|
|
fc85f1fa2c | ||
|
|
0b8081f320 | ||
|
|
0b5371d986 | ||
|
|
cdca909d84 | ||
|
|
ec7d2e357d | ||
|
|
1c0183ef11 | ||
|
|
9cf2b3129c | ||
|
|
9a4e864f5e | ||
|
|
4df952db1b | ||
|
|
1b4cedfa13 | ||
|
|
6d6103afd6 | ||
|
|
d727d76299 | ||
|
|
0502ff545e | ||
|
|
fce4a96718 | ||
|
|
38203fb950 | ||
|
|
65dba509e0 | ||
|
|
0615d17b8b | ||
|
|
c1f8580d89 | ||
|
|
c5a6f900df | ||
|
|
3807c3ce2a | ||
|
|
d2f328af01 | ||
|
|
c3ffcf4fe8 | ||
|
|
95c409d979 | ||
|
|
dadf9e71bb | ||
|
|
05ebf22009 | ||
|
|
f4b1780d8a | ||
|
|
921cff00a5 | ||
|
|
64a09eb0f8 | ||
|
|
3a636531e8 | ||
|
|
292da90184 | ||
|
|
90c8ff8650 | ||
|
|
0f37049aad | ||
|
|
3fb07d129f | ||
|
|
8d0ac5062f | ||
|
|
b2d1505e5c | ||
|
|
a35e65ee42 | ||
|
|
d1fcf31f7e | ||
|
|
17cd934b5b | ||
|
|
ae614cd3fe | ||
|
|
47037a4b9d | ||
|
|
507b1e35d8 | ||
|
|
2839e1d33f | ||
|
|
ef0b9d525c | ||
|
|
01e6e04a78 | ||
|
|
8c876c70af | ||
|
|
a7d6d6493e | ||
|
|
b6cc95af73 | ||
|
|
bdcc1d32c2 | ||
|
|
90d91cc7c2 | ||
|
|
ec83cf588f | ||
|
|
4f0a2e3c95 | ||
|
|
3747a27109 | ||
|
|
b501bac54a | ||
|
|
7aec98d652 | ||
|
|
406b198e0e | ||
|
|
8e42296c3a | ||
|
|
60e5220bd0 | ||
|
|
0f37cbfd0b | ||
|
|
541fd136d5 | ||
|
|
60d42ca9c3 | ||
|
|
5c947bccc7 | ||
|
|
fbdefc17c1 | ||
|
|
1425c6ff0d | ||
|
|
e038aea694 | ||
|
|
87ccbf329d | ||
|
|
9dcf384263 | ||
|
|
ac0716ddeb | ||
|
|
b9792ca491 | ||
|
|
c3fd42057a | ||
|
|
39d85ff4f6 | ||
|
|
fbce3e77ba | ||
|
|
66e9de2685 | ||
|
|
321520408b | ||
|
|
68451bd75f | ||
|
|
486fdf118b | ||
|
|
bd3cda0617 | ||
|
|
725503d1ce | ||
|
|
be045c4f15 | ||
|
|
fd968b3f78 | ||
|
|
833dd7b3a2 | ||
|
|
b9258ad496 | ||
|
|
0678318dde | ||
|
|
7116c0d098 | ||
|
|
2fde8436fb | ||
|
|
61ecd6475f | ||
|
|
64b209a772 | ||
|
|
48dbdbfed5 | ||
|
|
cf50311b9c | ||
|
|
e4d8582a2a | ||
|
|
b825e15406 | ||
|
|
b8e5ee45eb | ||
|
|
9f20a9e7d2 | ||
|
|
201521d814 | ||
|
|
18bb3cba11 | ||
|
|
af951d6f6a | ||
|
|
e5fe92bf90 | ||
|
|
b1aca7c305 | ||
|
|
c0a0cc4a44 | ||
|
|
7a49eb9e93 | ||
|
|
5aa0610882 | ||
|
|
41ed4c8186 | ||
|
|
90a33ce6b0 | ||
|
|
12574798e1 | ||
|
|
83b11254db | ||
|
|
b25493fd29 | ||
|
|
bb66100486 | ||
|
|
9bd86f64c9 | ||
|
|
3ebf97dd49 | ||
|
|
bfcdeb3784 | ||
|
|
77015224f6 | ||
|
|
372ecb77d0 | ||
|
|
05642cbdc6 | ||
|
|
feece3d788 | ||
|
|
94adb77e9e | ||
|
|
909d36c237 | ||
|
|
e1c8aa226d | ||
|
|
1a71c01fd4 | ||
|
|
54640548ed | ||
|
|
2327679f23 | ||
|
|
574d9f970c | ||
|
|
235096a2eb | ||
|
|
a739fdc544 | ||
|
|
6196e26044 | ||
|
|
46a4064989 | ||
|
|
72b3948f43 | ||
|
|
5d47bfaeb6 | ||
|
|
b2f2ea65ba | ||
|
|
7afa468e15 | ||
|
|
29cb1fed12 | ||
|
|
29f8b512c4 | ||
|
|
9794c8ba72 | ||
|
|
f1b305f682 | ||
|
|
091a800c9d | ||
|
|
975f5f4b4c | ||
|
|
9d6cd930ea | ||
|
|
ea02d31096 | ||
|
|
d5913e8371 | ||
|
|
7f71f76f6e | ||
|
|
d04830ba90 | ||
|
|
caa82a6146 | ||
|
|
bcf7137073 | ||
|
|
9238cf1128 | ||
|
|
2f874ace51 | ||
|
|
2feabed297 | ||
|
|
9001e28b36 | ||
|
|
24d651d7ae | ||
|
|
8533b09091 | ||
|
|
44b7955d85 | ||
|
|
038b9f18c6 | ||
|
|
5667595587 | ||
|
|
6e0e20ba6e | ||
|
|
ec31cb9987 | ||
|
|
32d5b61c4a | ||
|
|
128c9e639f | ||
|
|
5e3f01dc03 | ||
|
|
39a0b4ce78 | ||
|
|
af719707bf | ||
|
|
8415a19912 | ||
|
|
1607d2768e | ||
|
|
c97c05a3a7 | ||
|
|
b2beea9c4e | ||
|
|
41fbaa1c28 | ||
|
|
d9274cf794 | ||
|
|
583b048046 | ||
|
|
ead5916eae | ||
|
|
d15ab92da3 | ||
|
|
1ab30f2af5 | ||
|
|
4dbe0b91f1 | ||
|
|
a972d3784e | ||
|
|
6991900eb0 | ||
|
|
d614beb9eb | ||
|
|
253a46d458 | ||
|
|
32b1ec32c6 | ||
|
|
eb2cba09b2 | ||
|
|
e79dceb67e | ||
|
|
87c38d6dab | ||
|
|
9e98d9c45c | ||
|
|
0e9a4b0511 | ||
|
|
eae25aff64 | ||
|
|
b8c06ff36e | ||
|
|
6cf2e054bf | ||
|
|
95749234f5 | ||
|
|
b976961434 | ||
|
|
e1aa727513 | ||
|
|
1f71ce1be2 | ||
|
|
cf13de6ac1 | ||
|
|
c2e01798f8 | ||
|
|
8fc8295a89 | ||
|
|
d392991764 | ||
|
|
e57e87090f | ||
|
|
c701bbbee3 | ||
|
|
2f0eb0bd4b | ||
|
|
17bde8da8a | ||
|
|
99d355e6ca | ||
|
|
c790fd21a4 | ||
|
|
bee51af48b | ||
|
|
ca743eae22 | ||
|
|
93504cf82f | ||
|
|
6d3e4e842b | ||
|
|
54b64a8c3b | ||
|
|
0e59bf39f4 | ||
|
|
8b95e0a76d | ||
|
|
48a2ad7b57 | ||
|
|
cfc6651fff | ||
|
|
b23827a8db | ||
|
|
3f9986c13c | ||
|
|
224989f19b | ||
|
|
c7010b75c1 | ||
|
|
00cfeee56e | ||
|
|
aaa97e2ce2 | ||
|
|
1d52618137 | ||
|
|
34309f17f4 | ||
|
|
220ba67faa | ||
|
|
230f96e8e8 | ||
|
|
930c8899d2 | ||
|
|
7c0cef7dd8 | ||
|
|
951ebb3fa2 | ||
|
|
2a4d098b41 | ||
|
|
5839b87f98 | ||
|
|
425a2bd680 | ||
|
|
939c2497c8 | ||
|
|
8995ba56b8 | ||
|
|
e941ab60ca | ||
|
|
48860f1349 | ||
|
|
94b2c5c593 | ||
|
|
f1d8ae5a22 | ||
|
|
e44d2093e5 | ||
|
|
9fa1201a4c | ||
|
|
7800808648 | ||
|
|
2789e86d21 | ||
|
|
afd1e39b88 | ||
|
|
aec4c738ef | ||
|
|
63ecc8c842 | ||
|
|
5f6d583521 | ||
|
|
0341984f10 | ||
|
|
fe757486ae | ||
|
|
46d480c9a1 | ||
|
|
8fe3cba7a8 | ||
|
|
aa19f4da8b | ||
|
|
a08f95326c | ||
|
|
b4c5437c92 | ||
|
|
8680e3b39e | ||
|
|
1b3002c8df | ||
|
|
394fd2e7db | ||
|
|
d83af56d28 | ||
|
|
28c93d6841 | ||
|
|
5f52fc2176 | ||
|
|
8fba579e3a | ||
|
|
40b1aadeb2 | ||
|
|
40e72ad199 | ||
|
|
618b4bbb83 | ||
|
|
1eaf3e6294 | ||
|
|
fd453e946d | ||
|
|
c294071015 | ||
|
|
c2f6055e33 | ||
|
|
5161f4df33 | ||
|
|
3396f8fe00 | ||
|
|
9291f58091 | ||
|
|
85f3b17c42 | ||
|
|
2a3086a0d7 | ||
|
|
41c3cc1a18 | ||
|
|
1b1df86a11 | ||
|
|
e0660e7775 | ||
|
|
99a6c4de88 | ||
|
|
ffa765bd97 | ||
|
|
b1696524b3 | ||
|
|
14d432e22d | ||
|
|
6a37c55085 | ||
|
|
9c1c6fff9f | ||
|
|
db8c6f4bcb | ||
|
|
ff17ecda7d | ||
|
|
692058677c | ||
|
|
1e90d69912 | ||
|
|
64a1cc68e1 | ||
|
|
0fdf9c74a8 | ||
|
|
20353f35ff | ||
|
|
e2df7894f9 | ||
|
|
7af029b5de | ||
|
|
8fc5ca5a71 | ||
|
|
aa0356de9f | ||
|
|
e44a43d2b1 | ||
|
|
8997f00b9b | ||
|
|
c5da416764 | ||
|
|
840e58fc03 | ||
|
|
7f911c5219 | ||
|
|
a887390c23 | ||
|
|
f4dddcec8e | ||
|
|
0d9d82d7e6 | ||
|
|
3a6d24b1d9 | ||
|
|
b9b159be4c | ||
|
|
40212083a5 | ||
|
|
d3428b066e | ||
|
|
94c64b2a45 | ||
|
|
0d671a0bb2 | ||
|
|
d34a47c148 | ||
|
|
5aa216bd21 | ||
|
|
8af47548fe | ||
|
|
131bd2b7b8 | ||
|
|
1993673a22 | ||
|
|
30e036f9ec | ||
|
|
0f374b27cf | ||
|
|
0d487df61b | ||
|
|
c082d4203b | ||
|
|
d4380b6bb6 | ||
|
|
095c871174 | ||
|
|
6d73c5b295 | ||
|
|
cc4d28193c | ||
|
|
fb76bd82f2 | ||
|
|
3bdaba46a9 | ||
|
|
9433e06b93 | ||
|
|
a92aaa51d5 | ||
|
|
7c3c94ed7f | ||
|
|
d41908adeb | ||
|
|
81ca15b567 | ||
|
|
b81d0fd730 | ||
|
|
3a1bb187e8 | ||
|
|
3fee14a070 | ||
|
|
5bf789ac65 | ||
|
|
be06049db3 | ||
|
|
a0435f6a60 | ||
|
|
2321e2c90b | ||
|
|
97e98d8629 | ||
|
|
d96e7362d2 | ||
|
|
7dd46fe5ed | ||
|
|
04c044cb2b | ||
|
|
cc10a12fbc | ||
|
|
8b0a1c699f | ||
|
|
15ca7c9807 | ||
|
|
2b4da7e39b | ||
|
|
31f81f38af | ||
|
|
72cf77b7c7 | ||
|
|
0fe48c647e | ||
|
|
7b06652bff | ||
|
|
434ce05416 | ||
|
|
0698031ed4 | ||
|
|
51237a34eb | ||
|
|
b8264a8131 | ||
|
|
cad923018e | ||
|
|
db94b49941 | ||
|
|
72d15d9cbf | ||
|
|
e0186eadc0 | ||
|
|
4cfa5b04af | ||
|
|
f2c54b1f8b | ||
|
|
d7d0bc6582 | ||
|
|
dd9dc2500b | ||
|
|
4efb109da8 | ||
|
|
bcf9a6bdf1 | ||
|
|
e3a25ecdc0 | ||
|
|
783521928d | ||
|
|
9a876abd31 | ||
|
|
97f58b412e | ||
|
|
4c61628078 | ||
|
|
99a8b0f750 | ||
|
|
a9017d7c25 | ||
|
|
d9e4b26648 | ||
|
|
0d03bafe49 | ||
|
|
fee15a31f9 | ||
|
|
997d3910d4 | ||
|
|
a3918cc0d7 | ||
|
|
f056986b07 | ||
|
|
59c1f02f98 | ||
|
|
3a71a2b1f8 | ||
|
|
2ef1215b49 | ||
|
|
130ac83076 | ||
|
|
dd606a0702 | ||
|
|
84cd772f50 | ||
|
|
fa1d7af22f | ||
|
|
a771ba3bc0 | ||
|
|
8b612c658d | ||
|
|
7dd0da5fd7 | ||
|
|
f7b3525c4e | ||
|
|
de83bdae48 | ||
|
|
d90b610767 | ||
|
|
2d41de6b72 | ||
|
|
f391c3caf3 | ||
|
|
9bdf150676 | ||
|
|
0c199609eb | ||
|
|
6eff9d3753 | ||
|
|
7ab16457c7 | ||
|
|
e7ad8132b5 | ||
|
|
da87e45534 | ||
|
|
2ffaef5563 | ||
|
|
55cb350d2c | ||
|
|
7fa271a1b4 | ||
|
|
c53ca372f2 | ||
|
|
75bc8501f4 | ||
|
|
1e22b47fe1 | ||
|
|
74e2dca207 | ||
|
|
a669de24b7 | ||
|
|
e1e9c449e9 | ||
|
|
60e1dc0239 | ||
|
|
10eb94fd82 | ||
|
|
ccc8587e5f | ||
|
|
53c96193c1 | ||
|
|
d4f11e00b1 | ||
|
|
321233b82c | ||
|
|
eb188051d4 | ||
|
|
a136084e11 | ||
|
|
bc06f3179d | ||
|
|
ee84d971b2 | ||
|
|
264d80ef4c | ||
|
|
dba68187ac | ||
|
|
ca4a1936b3 | ||
|
|
77c8d31a90 | ||
|
|
ab7196f86c | ||
|
|
88b3a66bf9 | ||
|
|
ea77666b4a | ||
|
|
db98e5f39b | ||
|
|
df59c5cb9d | ||
|
|
e786e95358 | ||
|
|
75ada5623c | ||
|
|
ad5c655c45 | ||
|
|
65e607454e | ||
|
|
f238be6003 | ||
|
|
dc31e4c5fa | ||
|
|
665d8cd266 | ||
|
|
8324114e84 | ||
|
|
b83e6ee4ce | ||
|
|
58bab0d310 | ||
|
|
1af51aaaba | ||
|
|
a09327b831 | ||
|
|
16543bf74c | ||
|
|
aa4cd373ac | ||
|
|
351e294362 | ||
|
|
a0c5b1cd9d | ||
|
|
df2ed1e584 | ||
|
|
b354f7a3a5 | ||
|
|
bb53d1e1c6 | ||
|
|
2aabd8d0e1 | ||
|
|
aca97c2c6c | ||
|
|
8e7d959cf4 | ||
|
|
b23f031db9 | ||
|
|
1ba529a9d5 | ||
|
|
3d29c183ef | ||
|
|
8a108b590d | ||
|
|
bca0f67344 | ||
|
|
f3dad51134 | ||
|
|
f51840829c | ||
|
|
aa1c0d0870 | ||
|
|
dee5ee6589 | ||
|
|
b799f479c4 | ||
|
|
b4352fefa5 | ||
|
|
77d06fb60e | ||
|
|
00b647457c | ||
|
|
153d10a35c | ||
|
|
06713c641e | ||
|
|
210978ec2d | ||
|
|
42f7d43139 | ||
|
|
19967f5ad7 | ||
|
|
a1de3eb47d | ||
|
|
e88841bdec | ||
|
|
c8e4915f8e | ||
|
|
a93a3f0598 | ||
|
|
084f81fc8d | ||
|
|
d148f36e87 | ||
|
|
150d9c35b7 | ||
|
|
e11198616e | ||
|
|
2f27f1e6f9 | ||
|
|
5392ca9794 | ||
|
|
46672eb583 | ||
|
|
79653eee80 | ||
|
|
16ad86c52a | ||
|
|
6b7c6be5f5 | ||
|
|
fda1c2cc79 | ||
|
|
ef2fee0ee3 | ||
|
|
e287d0811d | ||
|
|
a7164f3c9f | ||
|
|
c55060039a | ||
|
|
c68d8deddd | ||
|
|
f6eabc5db1 | ||
|
|
72d5884db6 | ||
|
|
3595c89c79 | ||
|
|
9ebbc718c5 | ||
|
|
e862480b86 | ||
|
|
1f3d8fe6f1 | ||
|
|
41ae036ab4 | ||
|
|
588d176b96 | ||
|
|
f8697120a0 | ||
|
|
1a767105e6 | ||
|
|
4067b6ed2c | ||
|
|
b272dbfd1f | ||
|
|
48be7bbf86 | ||
|
|
51e22cea71 | ||
|
|
2241e27e68 | ||
|
|
11c90ae879 | ||
|
|
cf55125202 | ||
|
|
9cefb85905 | ||
|
|
fc672da0e0 | ||
|
|
25b297b142 | ||
|
|
ab03c12fa8 | ||
|
|
3095c805ad | ||
|
|
9c18daafb8 | ||
|
|
16182417fb | ||
|
|
9af35201e4 | ||
|
|
f21b982955 | ||
|
|
b3a20d05c5 | ||
|
|
4cd024a2b2 | ||
|
|
63d08ebfd2 | ||
|
|
c696197b03 | ||
|
|
738a72228b | ||
|
|
90641f4488 | ||
|
|
a4cc7eaf9b | ||
|
|
fdca728fdc | ||
|
|
d2c4ae8cdf | ||
|
|
f3d3ac30a6 | ||
|
|
f8cc4ade8a | ||
|
|
b3975b7bbd | ||
|
|
4f1b61f5bc | ||
|
|
beeb37b4fd | ||
|
|
43aa2bad22 | ||
|
|
1b2ba921bb | ||
|
|
f543da0ea8 | ||
|
|
e60c9efa84 | ||
|
|
c52fc6f240 | ||
|
|
ee136b024a |
@@ -3,7 +3,7 @@
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"csharpier": {
|
||||
"version": "1.1.2",
|
||||
"version": "1.2.5",
|
||||
"commands": [
|
||||
"csharpier"
|
||||
],
|
||||
|
||||
@@ -307,7 +307,6 @@ dotnet_diagnostic.CS8602.severity = error
|
||||
dotnet_diagnostic.CS8604.severity = error
|
||||
dotnet_diagnostic.CS8618.severity = error
|
||||
dotnet_diagnostic.CS0618.severity = suggestion
|
||||
dotnet_diagnostic.CS1998.severity = error
|
||||
dotnet_diagnostic.CS4014.severity = error
|
||||
dotnet_diagnostic.CS8600.severity = error
|
||||
dotnet_diagnostic.CS8603.severity = error
|
||||
@@ -368,6 +367,9 @@ dotnet_diagnostic.NX0001.severity = error
|
||||
dotnet_diagnostic.NX0002.severity = silent
|
||||
dotnet_diagnostic.NX0003.severity = silent
|
||||
|
||||
dotnet_diagnostic.VSTHRD110.severity = error
|
||||
dotnet_diagnostic.VSTHRD107.severity = error
|
||||
|
||||
##########################################
|
||||
# Styles
|
||||
##########################################
|
||||
|
||||
13
.github/COPILOT_AGENT_README.md
vendored
13
.github/COPILOT_AGENT_README.md
vendored
@@ -1,13 +0,0 @@
|
||||
# Copilot Coding Agent Configuration
|
||||
|
||||
This repository includes a minimal opt-in configuration and CI workflow to allow the GitHub Copilot coding agent to open and validate PRs.
|
||||
|
||||
- .copilot-agent.yml: opt-in config for automated agents
|
||||
- .github/workflows/dotnetcore.yml: CI runs on PRs touching the solution, source, or tests to validate changes
|
||||
- AGENTS.yml: general information for this project
|
||||
|
||||
Maintainers can adjust the allowed paths or disable the agent by editing or removing .copilot-agent.yml.
|
||||
|
||||
Notes:
|
||||
- Do not change any other files in the repository.
|
||||
- If build/test paths are different, update the workflow accordingly; this workflow targets SharpCompress.sln and the SharpCompress.Tests test project.
|
||||
7
.github/agents/copilot-agent.yml
vendored
7
.github/agents/copilot-agent.yml
vendored
@@ -1,7 +0,0 @@
|
||||
enabled: true
|
||||
agent:
|
||||
name: copilot-coding-agent
|
||||
allow:
|
||||
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
|
||||
actions: ["create", "modify"]
|
||||
require_review_before_merge: true
|
||||
155
.github/workflows/NUGET_RELEASE.md
vendored
Normal file
155
.github/workflows/NUGET_RELEASE.md
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
# NuGet Release Workflow
|
||||
|
||||
This document describes the automated NuGet release workflow for SharpCompress.
|
||||
|
||||
## Overview
|
||||
|
||||
The `nuget-release.yml` workflow automatically builds, tests, and publishes SharpCompress packages to NuGet.org when:
|
||||
- Changes are pushed to the `master` or `release` branch
|
||||
- A version tag (format: `MAJOR.MINOR.PATCH`) is pushed
|
||||
|
||||
The workflow runs on both Windows and Ubuntu, but only the Windows build publishes to NuGet.
|
||||
|
||||
## How It Works
|
||||
|
||||
### Version Determination
|
||||
|
||||
The workflow automatically determines the version based on whether the commit is tagged using C# code in the build project:
|
||||
|
||||
1. **Tagged Release (Stable)**:
|
||||
- If the current commit has a version tag (e.g., `0.42.1`)
|
||||
- Uses the tag as the version number
|
||||
- Published as a stable release
|
||||
|
||||
2. **Untagged Release (Prerelease)**:
|
||||
- If the current commit is NOT tagged
|
||||
- Creates a prerelease version based on the next minor version
|
||||
- Format: `{NEXT_MINOR_VERSION}-beta.{COMMIT_COUNT}`
|
||||
- Example: `0.43.0-beta.123` (if last tag is 0.42.x)
|
||||
- Published as a prerelease to NuGet.org (Windows build only)
|
||||
|
||||
### Workflow Steps
|
||||
|
||||
The workflow runs on a matrix of operating systems (Windows and Ubuntu):
|
||||
|
||||
1. **Checkout**: Fetches the repository with full history for version detection
|
||||
2. **Setup .NET**: Installs .NET 10.0
|
||||
3. **Determine Version**: Runs `determine-version` build target to check for tags and determine version
|
||||
4. **Update Version**: Runs `update-version` build target to update the version in the project file
|
||||
5. **Build and Test**: Runs the full build and test suite on both platforms
|
||||
6. **Upload Artifacts**: Uploads the generated `.nupkg` files as workflow artifacts (separate for each OS)
|
||||
7. **Push to NuGet**: (Windows only) Runs `push-to-nuget` build target to publish the package to NuGet.org using the API key
|
||||
|
||||
All version detection, file updates, and publishing logic is implemented in C# in the `build/Program.cs` file using build targets.
|
||||
|
||||
## Setup Requirements
|
||||
|
||||
### 1. NuGet API Key Secret
|
||||
|
||||
The workflow requires a `NUGET_API_KEY` secret to be configured in the repository settings:
|
||||
|
||||
1. Go to https://www.nuget.org/account/apikeys
|
||||
2. Create a new API key with "Push" permission for the SharpCompress package
|
||||
3. In GitHub, go to: **Settings** → **Secrets and variables** → **Actions**
|
||||
4. Create a new secret named `NUGET_API_KEY` with the API key value
|
||||
|
||||
### 2. Branch Protection (Recommended)
|
||||
|
||||
Consider enabling branch protection rules for the `release` branch to ensure:
|
||||
- Code reviews are required before merging
|
||||
- Status checks pass before merging
|
||||
- Only authorized users can push to the branch
|
||||
|
||||
## Usage
|
||||
|
||||
### Creating a Stable Release
|
||||
|
||||
There are two ways to trigger a stable release:
|
||||
|
||||
**Method 1: Push tag to trigger workflow**
|
||||
1. Ensure all changes are committed on the `master` or `release` branch
|
||||
2. Create and push a version tag:
|
||||
```bash
|
||||
git checkout master # or release
|
||||
git tag 0.43.0
|
||||
git push origin 0.43.0
|
||||
```
|
||||
3. The workflow will automatically trigger, build, test, and publish `SharpCompress 0.43.0` to NuGet.org (Windows build)
|
||||
|
||||
**Method 2: Tag after pushing to branch**
|
||||
1. Ensure all changes are merged and pushed to the `master` or `release` branch
|
||||
2. Create and push a version tag on the already-pushed commit:
|
||||
```bash
|
||||
git checkout master # or release
|
||||
git tag 0.43.0
|
||||
git push origin 0.43.0
|
||||
```
|
||||
3. The workflow will automatically trigger, build, test, and publish `SharpCompress 0.43.0` to NuGet.org (Windows build)
|
||||
|
||||
### Creating a Prerelease
|
||||
|
||||
1. Push changes to the `master` or `release` branch without tagging:
|
||||
```bash
|
||||
git checkout master # or release
|
||||
git push origin master # or release
|
||||
```
|
||||
2. The workflow will automatically:
|
||||
- Build and test the project on both Windows and Ubuntu
|
||||
- Publish a prerelease version like `0.43.0-beta.456` to NuGet.org (Windows build)
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Workflow Fails to Push to NuGet
|
||||
|
||||
- **Check the API Key**: Ensure `NUGET_API_KEY` is set correctly in repository secrets
|
||||
- **Check API Key Permissions**: Verify the API key has "Push" permission for SharpCompress
|
||||
- **Check API Key Expiration**: NuGet API keys may expire; create a new one if needed
|
||||
|
||||
### Version Conflict
|
||||
|
||||
If you see "Package already exists" errors:
|
||||
- The workflow uses `--skip-duplicate` flag to handle this gracefully
|
||||
- If you need to republish the same version, delete it from NuGet.org first (if allowed)
|
||||
|
||||
### Build or Test Failures
|
||||
|
||||
- The workflow will not push to NuGet if build or tests fail
|
||||
- Check the workflow logs in GitHub Actions for details
|
||||
- Fix the issues and push again
|
||||
|
||||
## Manual Package Creation
|
||||
|
||||
If you need to create a package manually without publishing:
|
||||
|
||||
```bash
|
||||
dotnet run --project build/build.csproj -- publish
|
||||
```
|
||||
|
||||
The package will be created in the `artifacts/` directory.
|
||||
|
||||
## Build Targets
|
||||
|
||||
The workflow uses the following C# build targets defined in `build/Program.cs`:
|
||||
|
||||
- **determine-version**: Detects version from git tags and outputs VERSION and PRERELEASE variables
|
||||
- **update-version**: Updates VersionPrefix, AssemblyVersion, and FileVersion in the project file
|
||||
- **push-to-nuget**: Pushes the generated NuGet packages to NuGet.org (requires NUGET_API_KEY)
|
||||
|
||||
These targets can be run manually for testing:
|
||||
|
||||
```bash
|
||||
# Determine the version
|
||||
dotnet run --project build/build.csproj -- determine-version
|
||||
|
||||
# Update version in project file
|
||||
VERSION=0.43.0 dotnet run --project build/build.csproj -- update-version
|
||||
|
||||
# Push to NuGet (requires NUGET_API_KEY environment variable)
|
||||
NUGET_API_KEY=your-key dotnet run --project build/build.csproj -- push-to-nuget
|
||||
```
|
||||
|
||||
## Related Files
|
||||
|
||||
- `.github/workflows/nuget-release.yml` - The workflow definition
|
||||
- `build/Program.cs` - Build script with version detection and publishing logic
|
||||
- `src/SharpCompress/SharpCompress.csproj` - Project file with version information
|
||||
120
.github/workflows/TESTING.md
vendored
Normal file
120
.github/workflows/TESTING.md
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
# Testing Guide for NuGet Release Workflow
|
||||
|
||||
This document describes how to test the NuGet release workflow.
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
Since this workflow publishes to NuGet.org and requires repository secrets, testing should be done carefully. The workflow runs on both Windows and Ubuntu, but only the Windows build publishes to NuGet.
|
||||
|
||||
## Pre-Testing Checklist
|
||||
|
||||
- [x] Workflow YAML syntax validated
|
||||
- [x] Version determination logic tested locally
|
||||
- [x] Version update logic tested locally
|
||||
- [x] Build script works (`dotnet run --project build/build.csproj`)
|
||||
|
||||
## Manual Testing Steps
|
||||
|
||||
### 1. Test Prerelease Publishing (Recommended First Test)
|
||||
|
||||
This tests the workflow on untagged commits to the master or release branch.
|
||||
|
||||
**Steps:**
|
||||
1. Ensure `NUGET_API_KEY` secret is configured in repository settings
|
||||
2. Create a test commit on the `master` or `release` branch (e.g., update a comment or README)
|
||||
3. Push to the `master` or `release` branch
|
||||
4. Monitor the GitHub Actions workflow at: https://github.com/adamhathcock/sharpcompress/actions
|
||||
5. Verify:
|
||||
- Workflow triggers and runs successfully on both Windows and Ubuntu
|
||||
- Version is determined correctly (e.g., `0.43.0-beta.XXX` if last tag is 0.42.x)
|
||||
- Build and tests pass on both platforms
|
||||
- Package artifacts are uploaded for both platforms
|
||||
- Package is pushed to NuGet.org as prerelease (Windows build only)
|
||||
|
||||
**Expected Outcome:**
|
||||
- A new prerelease package appears on NuGet.org: https://www.nuget.org/packages/SharpCompress/
|
||||
- Package version follows pattern: `{NEXT_MINOR_VERSION}-beta.{COMMIT_COUNT}`
|
||||
|
||||
### 2. Test Tagged Release Publishing
|
||||
|
||||
This tests the workflow when a version tag is pushed.
|
||||
|
||||
**Steps:**
|
||||
1. Prepare the `master` or `release` branch with all desired changes
|
||||
2. Create a version tag (must be a pure semantic version like `MAJOR.MINOR.PATCH`):
|
||||
```bash
|
||||
git checkout master # or release
|
||||
git tag 0.42.2
|
||||
git push origin 0.42.2
|
||||
```
|
||||
3. Monitor the GitHub Actions workflow
|
||||
4. Verify:
|
||||
- Workflow triggers and runs successfully on both Windows and Ubuntu
|
||||
- Version is determined as the tag (e.g., `0.42.2`)
|
||||
- Build and tests pass on both platforms
|
||||
- Package artifacts are uploaded for both platforms
|
||||
- Package is pushed to NuGet.org as stable release (Windows build only)
|
||||
|
||||
**Expected Outcome:**
|
||||
- A new stable release package appears on NuGet.org
|
||||
- Package version matches the tag
|
||||
|
||||
### 3. Test Duplicate Package Handling
|
||||
|
||||
This tests the `--skip-duplicate` flag behavior.
|
||||
|
||||
**Steps:**
|
||||
1. Push to the `release` branch without making changes
|
||||
2. Monitor the workflow
|
||||
3. Verify:
|
||||
- Workflow runs but NuGet push is skipped with "duplicate" message
|
||||
- No errors occur
|
||||
|
||||
### 4. Test Build Failure Handling
|
||||
|
||||
This tests that failed builds don't publish packages.
|
||||
|
||||
**Steps:**
|
||||
1. Introduce a breaking change in a test or code
|
||||
2. Push to the `release` branch
|
||||
3. Verify:
|
||||
- Workflow runs and detects the failure
|
||||
- Build or test step fails
|
||||
- NuGet push step is skipped
|
||||
- No package is published
|
||||
|
||||
## Verification
|
||||
|
||||
After each test, verify:
|
||||
|
||||
1. **GitHub Actions Logs**: Check the workflow logs for any errors or warnings
|
||||
2. **NuGet.org**: Verify the package appears with correct version and metadata
|
||||
3. **Artifacts**: Download and inspect the uploaded artifacts
|
||||
|
||||
## Rollback/Cleanup
|
||||
|
||||
If testing produces unwanted packages:
|
||||
|
||||
1. **Prerelease packages**: Can be unlisted on NuGet.org (Settings → Unlist)
|
||||
2. **Stable packages**: Cannot be deleted, only unlisted (use test versions)
|
||||
3. **Tags**: Can be deleted with:
|
||||
```bash
|
||||
git tag -d 0.42.2
|
||||
git push origin :refs/tags/0.42.2
|
||||
```
|
||||
|
||||
## Known Limitations
|
||||
|
||||
- NuGet.org does not allow re-uploading the same version
|
||||
- Deleted packages on NuGet.org reserve the version number
|
||||
- The workflow requires the `NUGET_API_KEY` secret to be set
|
||||
|
||||
## Success Criteria
|
||||
|
||||
The workflow is considered successful if:
|
||||
|
||||
- ✅ Prerelease versions are published correctly with beta suffix
|
||||
- ✅ Tagged versions are published as stable releases
|
||||
- ✅ Build and test failures prevent publishing
|
||||
- ✅ Duplicate packages are handled gracefully
|
||||
- ✅ Workflow logs are clear and informative
|
||||
25
.github/workflows/dotnetcore.yml
vendored
25
.github/workflows/dotnetcore.yml
vendored
@@ -1,25 +0,0 @@
|
||||
name: SharpCompress
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types: [ opened, synchronize, reopened, ready_for_review ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
- run: dotnet run --project build/build.csproj
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}-sharpcompress.nupkg
|
||||
path: artifacts/*
|
||||
61
.github/workflows/nuget-release.yml
vendored
Normal file
61
.github/workflows/nuget-release.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
name: NuGet Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'release'
|
||||
tags:
|
||||
- '[0-9]+.[0-9]+.[0-9]+'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'release'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build-and-publish:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0 # Fetch all history for versioning
|
||||
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: 10.0.x
|
||||
|
||||
# Determine version using C# build target
|
||||
- name: Determine Version
|
||||
id: version
|
||||
run: dotnet run --project build/build.csproj -- determine-version
|
||||
|
||||
# Update version in project file using C# build target
|
||||
- name: Update Version in Project
|
||||
run: dotnet run --project build/build.csproj -- update-version
|
||||
env:
|
||||
VERSION: ${{ steps.version.outputs.version }}
|
||||
|
||||
# Build and test
|
||||
- name: Build and Test
|
||||
run: dotnet run --project build/build.csproj
|
||||
|
||||
# Upload artifacts for verification
|
||||
- name: Upload NuGet Package
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: ${{ matrix.os }}-nuget-package
|
||||
path: artifacts/*.nupkg
|
||||
|
||||
# Push to NuGet.org using C# build target (Windows only, not on PRs)
|
||||
- name: Push to NuGet
|
||||
if: success() && matrix.os == 'windows-latest' && github.event_name != 'pull_request'
|
||||
run: dotnet run --project build/build.csproj -- push-to-nuget
|
||||
env:
|
||||
NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -4,18 +4,19 @@ _ReSharper.SharpCompress/
|
||||
bin/
|
||||
*.suo
|
||||
*.user
|
||||
TestArchives/Scratch/
|
||||
TestArchives/Scratch2/
|
||||
tests/TestArchives/Scratch/
|
||||
tests/TestArchives/Scratch2/
|
||||
TestResults/
|
||||
*.nupkg
|
||||
packages/*/
|
||||
project.lock.json
|
||||
tests/TestArchives/Scratch
|
||||
tests/TestArchives/*/Scratch
|
||||
tests/TestArchives/*/Scratch2
|
||||
.vs
|
||||
tools
|
||||
.vscode
|
||||
.idea/
|
||||
artifacts/
|
||||
|
||||
.DS_Store
|
||||
*.snupkg
|
||||
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch
|
||||
|
||||
9
.vscode/extensions.json
vendored
Normal file
9
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-dotnettools.csdevkit",
|
||||
"ms-dotnettools.csharp",
|
||||
"ms-dotnettools.vscode-dotnet-runtime",
|
||||
"csharpier.csharpier-vscode",
|
||||
"formulahendry.dotnet-test-explorer"
|
||||
]
|
||||
}
|
||||
97
.vscode/launch.json
vendored
Normal file
97
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug Tests (net10.0)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net10.0",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Debug Specific Test (net10.0)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net10.0",
|
||||
"--no-build",
|
||||
"--filter",
|
||||
"FullyQualifiedName~${input:testName}"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Debug Performance Tests",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
|
||||
"--no-build"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Debug Build Script",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/build/build.csproj",
|
||||
"--",
|
||||
"${input:buildTarget}"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
}
|
||||
],
|
||||
"inputs": [
|
||||
{
|
||||
"id": "testName",
|
||||
"type": "promptString",
|
||||
"description": "Enter test name or pattern (e.g., TestMethodName or ClassName)",
|
||||
"default": ""
|
||||
},
|
||||
{
|
||||
"id": "buildTarget",
|
||||
"type": "pickString",
|
||||
"description": "Select build target",
|
||||
"options": [
|
||||
"clean",
|
||||
"restore",
|
||||
"build",
|
||||
"test",
|
||||
"format",
|
||||
"publish",
|
||||
"default"
|
||||
],
|
||||
"default": "build"
|
||||
}
|
||||
]
|
||||
}
|
||||
29
.vscode/settings.json
vendored
Normal file
29
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"dotnet.defaultSolution": "SharpCompress.sln",
|
||||
"files.exclude": {
|
||||
"**/bin": true,
|
||||
"**/obj": true
|
||||
},
|
||||
"files.watcherExclude": {
|
||||
"**/bin/**": true,
|
||||
"**/obj/**": true,
|
||||
"**/artifacts/**": true
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/bin": true,
|
||||
"**/obj": true,
|
||||
"**/artifacts": true
|
||||
},
|
||||
"editor.formatOnSave": false,
|
||||
"[csharp]": {
|
||||
"editor.defaultFormatter": "csharpier.csharpier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": "explicit"
|
||||
}
|
||||
},
|
||||
"csharpier.enableDebugLogs": false,
|
||||
"omnisharp.enableRoslynAnalyzers": true,
|
||||
"omnisharp.enableEditorConfigSupport": true,
|
||||
"dotnet-test-explorer.testProjectPath": "tests/**/*.csproj"
|
||||
}
|
||||
178
.vscode/tasks.json
vendored
Normal file
178
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "build",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/SharpCompress.sln",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary;ForceNoAlign"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "build-release",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/SharpCompress.sln",
|
||||
"-c",
|
||||
"Release",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary;ForceNoAlign"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "build-library",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary;ForceNoAlign"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "restore",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"restore",
|
||||
"${workspaceFolder}/SharpCompress.sln"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "clean",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"clean",
|
||||
"${workspaceFolder}/SharpCompress.sln"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "test",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"dependsOn": "build"
|
||||
},
|
||||
{
|
||||
"label": "test-net10",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net10.0",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "test",
|
||||
"dependsOn": "build"
|
||||
},
|
||||
{
|
||||
"label": "test-net48",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net48",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "test",
|
||||
"dependsOn": "build"
|
||||
},
|
||||
{
|
||||
"label": "format",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"csharpier",
|
||||
"."
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "format-check",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"csharpier",
|
||||
"check",
|
||||
"."
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "run-build-script",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/build/build.csproj"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "pack",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"pack",
|
||||
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
|
||||
"-c",
|
||||
"Release",
|
||||
"-o",
|
||||
"${workspaceFolder}/artifacts/"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"dependsOn": "build-release"
|
||||
},
|
||||
{
|
||||
"label": "performance-tests",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
|
||||
"-c",
|
||||
"Release"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
}
|
||||
]
|
||||
}
|
||||
220
AGENTS.md
220
AGENTS.md
@@ -1,19 +1,25 @@
|
||||
---
|
||||
description: 'Guidelines for building C# applications'
|
||||
description: 'Guidelines for building SharpCompress - A C# compression library'
|
||||
applyTo: '**/*.cs'
|
||||
---
|
||||
|
||||
# C# Development
|
||||
# SharpCompress Development
|
||||
|
||||
## About SharpCompress
|
||||
SharpCompress is a pure C# compression library supporting multiple archive formats (Zip, Tar, GZip, BZip2, 7Zip, Rar, LZip, XZ, ZStandard) for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0. The library provides both seekable Archive APIs and forward-only Reader/Writer APIs for streaming scenarios.
|
||||
|
||||
## C# Instructions
|
||||
- Always use the latest version C#, currently C# 13 features.
|
||||
- Write clear and concise comments for each function.
|
||||
- Follow the existing code style and patterns in the codebase.
|
||||
|
||||
## General Instructions
|
||||
- **Agents should NEVER commit to git** - Agents should stage files and leave committing to the user. Only create commits when the user explicitly requests them.
|
||||
- Make only high confidence suggestions when reviewing code changes.
|
||||
- Write code with good maintainability practices, including comments on why certain design decisions were made.
|
||||
- Handle edge cases and write clear exception handling.
|
||||
- For libraries or external dependencies, mention their usage and purpose in comments.
|
||||
- Preserve backward compatibility when making changes to public APIs.
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
@@ -23,20 +29,74 @@ applyTo: '**/*.cs'
|
||||
|
||||
## Code Formatting
|
||||
|
||||
- Use CSharpier for all code formatting to ensure consistent style across the project.
|
||||
- Install CSharpier globally: `dotnet tool install -g csharpier`
|
||||
- Format files with: `dotnet csharpier format .`
|
||||
- Configure your IDE to format on save using CSharpier.
|
||||
- CSharpier configuration can be customized via `.csharpierrc` file in the project root.
|
||||
- Trust CSharpier's opinionated formatting decisions to maintain consistency.
|
||||
**Copilot agents: You MUST run the `format` task after making code changes to ensure consistency.**
|
||||
|
||||
- Use CSharpier for code formatting to ensure consistent style across the project
|
||||
- CSharpier is configured as a local tool in `.config/dotnet-tools.json`
|
||||
|
||||
### Commands
|
||||
|
||||
1. **Restore tools** (first time only):
|
||||
```bash
|
||||
dotnet tool restore
|
||||
```
|
||||
|
||||
2. **Check if files are formatted correctly** (doesn't modify files):
|
||||
```bash
|
||||
dotnet csharpier check .
|
||||
```
|
||||
- Exit code 0: All files are properly formatted
|
||||
- Exit code 1: Some files need formatting (will show which files and differences)
|
||||
|
||||
3. **Format files** (modifies files):
|
||||
```bash
|
||||
dotnet csharpier format .
|
||||
```
|
||||
- Formats all files in the project to match CSharpier style
|
||||
- Run from project root directory
|
||||
|
||||
4. **Configure your IDE** to format on save using CSharpier for the best experience
|
||||
|
||||
### Additional Notes
|
||||
- The project also uses `.editorconfig` for editor settings (indentation, encoding, etc.)
|
||||
- Let CSharpier handle code style while `.editorconfig` handles editor behavior
|
||||
- Always run `dotnet csharpier check .` before committing to verify formatting
|
||||
|
||||
## Project Setup and Structure
|
||||
|
||||
- Guide users through creating a new .NET project with the appropriate templates.
|
||||
- Explain the purpose of each generated file and folder to build understanding of the project structure.
|
||||
- Demonstrate how to organize code using feature folders or domain-driven design principles.
|
||||
- Show proper separation of concerns with models, services, and data access layers.
|
||||
- Explain the Program.cs and configuration system in ASP.NET Core 9 including environment-specific settings.
|
||||
- The project targets multiple frameworks: .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0
|
||||
- Main library is in `src/SharpCompress/`
|
||||
- Tests are in `tests/SharpCompress.Test/`
|
||||
- Performance tests are in `tests/SharpCompress.Performance/`
|
||||
- Test archives are in `tests/TestArchives/`
|
||||
- Build project is in `build/`
|
||||
- Use `dotnet build` to build the solution
|
||||
- Use `dotnet test` to run tests
|
||||
- Solution file: `SharpCompress.sln`
|
||||
|
||||
### Directory Structure
|
||||
```
|
||||
src/SharpCompress/
|
||||
├── Archives/ # IArchive implementations (Zip, Tar, Rar, 7Zip, GZip)
|
||||
├── Readers/ # IReader implementations (forward-only)
|
||||
├── Writers/ # IWriter implementations (forward-only)
|
||||
├── Compressors/ # Low-level compression streams (BZip2, Deflate, LZMA, etc.)
|
||||
├── Factories/ # Format detection and factory pattern
|
||||
├── Common/ # Shared types (ArchiveType, Entry, Options)
|
||||
├── Crypto/ # Encryption implementations
|
||||
└── IO/ # Stream utilities and wrappers
|
||||
|
||||
tests/SharpCompress.Test/
|
||||
├── Zip/, Tar/, Rar/, SevenZip/, GZip/, BZip2/ # Format-specific tests
|
||||
├── TestBase.cs # Base test class with helper methods
|
||||
└── TestArchives/ # Test data (not checked into main test project)
|
||||
```
|
||||
|
||||
### Factory Pattern
|
||||
All format types implement factory interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) for auto-detection:
|
||||
- `ReaderFactory.Open()` - Auto-detects format by probing stream
|
||||
- `WriterFactory.Open()` - Creates writer for specified `ArchiveType`
|
||||
- Factories located in: `src/SharpCompress/Factories/`
|
||||
|
||||
## Nullable Reference Types
|
||||
|
||||
@@ -44,21 +104,133 @@ applyTo: '**/*.cs'
|
||||
- Always use `is null` or `is not null` instead of `== null` or `!= null`.
|
||||
- Trust the C# null annotations and don't add null checks when the type system says a value cannot be null.
|
||||
|
||||
## SharpCompress-Specific Guidelines
|
||||
|
||||
### Supported Formats
|
||||
SharpCompress supports multiple archive and compression formats:
|
||||
- **Archive Formats**: Zip, Tar, 7Zip, Rar (read-only)
|
||||
- **Compression**: DEFLATE, BZip2, LZMA/LZMA2, PPMd, ZStandard (decompress only), Deflate64 (decompress only)
|
||||
- **Combined Formats**: Tar.GZip, Tar.BZip2, Tar.LZip, Tar.XZ, Tar.ZStandard
|
||||
- See [docs/FORMATS.md](docs/FORMATS.md) for complete format support matrix
|
||||
|
||||
### Stream Handling Rules
|
||||
- **Disposal**: As of version 0.21, SharpCompress closes wrapped streams by default
|
||||
- Use `ReaderOptions` or `WriterOptions` with `LeaveStreamOpen = true` to control stream disposal
|
||||
- Use `NonDisposingStream` wrapper when working with compression streams directly to prevent disposal
|
||||
- Always dispose of readers, writers, and archives in `using` blocks
|
||||
- For forward-only operations, use Reader/Writer APIs; for random access, use Archive APIs
|
||||
|
||||
### Async/Await Patterns
|
||||
- All I/O operations support async/await with `CancellationToken`
|
||||
- Async methods follow the naming convention: `MethodNameAsync`
|
||||
- Key async methods:
|
||||
- `WriteEntryToAsync` - Extract entry asynchronously
|
||||
- `WriteAllToDirectoryAsync` - Extract all entries asynchronously
|
||||
- `WriteAsync` - Write entry asynchronously
|
||||
- `WriteAllAsync` - Write directory asynchronously
|
||||
- `OpenEntryStreamAsync` - Open entry stream asynchronously
|
||||
- Always provide `CancellationToken` parameter in async methods
|
||||
|
||||
### Archive APIs vs Reader/Writer APIs
|
||||
- **Archive API**: Use for random access with seekable streams (e.g., `ZipArchive`, `TarArchive`)
|
||||
- **Reader API**: Use for forward-only reading on non-seekable streams (e.g., `ZipReader`, `TarReader`)
|
||||
- **Writer API**: Use for forward-only writing on streams (e.g., `ZipWriter`, `TarWriter`)
|
||||
- 7Zip only supports Archive API due to format limitations
|
||||
|
||||
### Tar-Specific Considerations
|
||||
- Tar format requires file size in the header
|
||||
- If no size is specified to TarWriter and the stream is not seekable, an exception will be thrown
|
||||
- Tar combined with compression (GZip, BZip2, LZip, XZ) is supported
|
||||
|
||||
### Zip-Specific Considerations
|
||||
- Supports Zip64 for large files (seekable streams only)
|
||||
- Supports PKWare and WinZip AES encryption
|
||||
- Multiple compression methods: None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA, PPMd
|
||||
- Encrypted LZMA is not supported
|
||||
|
||||
### Performance Considerations
|
||||
- For large files, use Reader/Writer APIs with non-seekable streams to avoid loading entire file in memory
|
||||
- Leverage async I/O for better scalability
|
||||
- Consider compression level trade-offs (speed vs. size)
|
||||
- Use appropriate buffer sizes for stream operations
|
||||
|
||||
## Testing
|
||||
|
||||
- Always include test cases for critical paths of the application.
|
||||
- Guide users through creating unit tests.
|
||||
- Test with multiple archive formats when making changes to core functionality.
|
||||
- Include tests for both Archive and Reader/Writer APIs when applicable.
|
||||
- Test async operations with cancellation tokens.
|
||||
- Do not emit "Act", "Arrange" or "Assert" comments.
|
||||
- Copy existing style in nearby files for test method names and capitalization.
|
||||
- Explain integration testing approaches for API endpoints.
|
||||
- Demonstrate how to mock dependencies for effective testing.
|
||||
- Show how to test authentication and authorization logic.
|
||||
- Explain test-driven development principles as applied to API development.
|
||||
- Use test archives from `tests/TestArchives` directory for consistency.
|
||||
- Test stream disposal and `LeaveStreamOpen` behavior.
|
||||
- Test edge cases: empty archives, large files, corrupted archives, encrypted archives.
|
||||
|
||||
## Performance Optimization
|
||||
### Test Organization
|
||||
- Base class: `TestBase` - Provides `TEST_ARCHIVES_PATH`, `SCRATCH_FILES_PATH`, temp directory management
|
||||
- Framework: xUnit with AwesomeAssertions
|
||||
- Test archives: `tests/TestArchives/` - Use existing archives, don't create new ones unnecessarily
|
||||
- Match naming style of nearby test files
|
||||
|
||||
- Guide users on implementing caching strategies (in-memory, distributed, response caching).
|
||||
- Explain asynchronous programming patterns and why they matter for API performance.
|
||||
- Demonstrate pagination, filtering, and sorting for large data sets.
|
||||
- Show how to implement compression and other performance optimizations.
|
||||
- Explain how to measure and benchmark API performance.
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Don't mix Archive and Reader APIs** - Archive needs seekable stream, Reader doesn't
|
||||
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
|
||||
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
|
||||
4. **Tar + non-seekable stream** - Must provide file size or it will throw
|
||||
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files
|
||||
|
||||
### Async Struct-Copy Bug in LZMA RangeCoder
|
||||
|
||||
When implementing async methods on mutable `struct` types (like `BitEncoder` and `BitDecoder` in the LZMA RangeCoder), be aware that the async state machine copies the struct when `await` is encountered. This means mutations to struct fields after the `await` point may not persist back to the original struct stored in arrays or fields.
|
||||
|
||||
**The Bug:**
|
||||
```csharp
|
||||
// BAD: async method on mutable struct
|
||||
public async ValueTask<uint> DecodeAsync(Decoder decoder, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var newBound = (decoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
|
||||
if (decoder._code < newBound)
|
||||
{
|
||||
decoder._range = newBound;
|
||||
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS; // Mutates _prob
|
||||
await decoder.Normalize2Async(cancellationToken).ConfigureAwait(false); // Struct gets copied here
|
||||
return 0; // Original _prob update may be lost
|
||||
}
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
**The Fix:**
|
||||
Refactor async methods on mutable structs to perform all struct mutations synchronously before any `await`, or use a helper method to separate the await from the struct mutation:
|
||||
|
||||
```csharp
|
||||
// GOOD: struct mutations happen synchronously, await is conditional
|
||||
public ValueTask<uint> DecodeAsync(Decoder decoder, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var newBound = (decoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
|
||||
if (decoder._code < newBound)
|
||||
{
|
||||
decoder._range = newBound;
|
||||
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS; // All mutations complete
|
||||
return DecodeAsyncHelper(decoder.Normalize2Async(cancellationToken), 0); // Await in helper
|
||||
}
|
||||
decoder._range -= newBound;
|
||||
decoder._code -= newBound;
|
||||
_prob -= (_prob) >> K_NUM_MOVE_BITS; // All mutations complete
|
||||
return DecodeAsyncHelper(decoder.Normalize2Async(cancellationToken), 1); // Await in helper
|
||||
}
|
||||
|
||||
private static async ValueTask<uint> DecodeAsyncHelper(ValueTask normalizeTask, uint result)
|
||||
{
|
||||
await normalizeTask.ConfigureAwait(false);
|
||||
return result;
|
||||
}
|
||||
```
|
||||
|
||||
**Why This Matters:**
|
||||
In LZMA, the `BitEncoder` and `BitDecoder` structs maintain adaptive probability models in their `_prob` field. When these structs are stored in arrays (e.g., `_models[m]`), the async state machine copy breaks the adaptive model, causing incorrect bit decoding and eventually `DataErrorException` exceptions.
|
||||
|
||||
**Related Files:**
|
||||
- `src/SharpCompress/Compressors/LZMA/RangeCoder/RangeCoderBit.Async.cs` - Fixed
|
||||
- `src/SharpCompress/Compressors/LZMA/RangeCoder/RangeCoderBitTree.Async.cs` - Uses readonly structs, so this pattern doesn't apply
|
||||
|
||||
@@ -12,5 +12,6 @@
|
||||
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
|
||||
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
|
||||
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
|
||||
<CentralPackageTransitivePinningEnabled>true</CentralPackageTransitivePinningEnabled>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
@@ -1,20 +1,24 @@
|
||||
<Project>
|
||||
<ItemGroup>
|
||||
<PackageVersion Include="Bullseye" Version="6.0.0" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.2.1" />
|
||||
<PackageVersion Include="Bullseye" Version="6.1.0" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
|
||||
<PackageVersion Include="Glob" Version="1.1.9" />
|
||||
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.0" />
|
||||
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.15" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.ILLink.Task" Version="10.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
|
||||
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
|
||||
<PackageVersion Include="SimpleExec" Version="12.0.0" />
|
||||
<PackageVersion Include="SimpleExec" Version="13.0.0" />
|
||||
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
|
||||
<PackageVersion Include="System.Buffers" Version="4.6.1" />
|
||||
<PackageVersion Include="System.Memory" Version="4.6.3" />
|
||||
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
|
||||
<PackageVersion Include="xunit" Version="2.9.3" />
|
||||
<PackageVersion Include="xunit.v3" Version="3.2.1" />
|
||||
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
|
||||
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
|
||||
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
|
||||
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
<GlobalPackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
|
||||
<GlobalPackageReference
|
||||
Include="Microsoft.VisualStudio.Threading.Analyzers"
|
||||
Version="17.14.15"
|
||||
/>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
156
README.md
156
README.md
@@ -1,9 +1,11 @@
|
||||
# SharpCompress
|
||||
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.8, .NET 8.0 and .NET 10.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd, unarc and unarj with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
|
||||
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
|
||||
|
||||
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [USAGE.md](docs/USAGE.md#async-examples) for examples.
|
||||
|
||||
GitHub Actions Build -
|
||||
[](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
|
||||
[](https://dndocs.com/d/sharpcompress/api/index.html)
|
||||
@@ -12,7 +14,7 @@ GitHub Actions Build -
|
||||
|
||||
Post Issues on Github!
|
||||
|
||||
Check the [Supported Formats](FORMATS.md) and [Basic Usage.](USAGE.md)
|
||||
Check the [Supported Formats](docs/FORMATS.md) and [Basic Usage.](docs/USAGE.md)
|
||||
|
||||
## Recommended Formats
|
||||
|
||||
@@ -36,155 +38,7 @@ Please do not email me directly to ask for help. If you think there is a real is
|
||||
|
||||
I'm always looking for help or ideas. Please submit code or email with ideas. Unfortunately, just letting me know you'd like to help is not enough because I really have no overall plan of what needs to be done. I'll definitely accept code submissions and add you as a member of the project!
|
||||
|
||||
## TODOs (always lots)
|
||||
|
||||
* RAR 5 decryption crc check support
|
||||
* 7Zip writing
|
||||
* Zip64 (Need writing and extend Reading)
|
||||
* Multi-volume Zip support.
|
||||
* ZStandard writing
|
||||
|
||||
## Version Log
|
||||
|
||||
* [Releases](https://github.com/adamhathcock/sharpcompress/releases)
|
||||
|
||||
### Version 0.18
|
||||
|
||||
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)
|
||||
|
||||
### Version 0.17.1
|
||||
|
||||
* Fix - [Bug Fix for .NET Core on Windows](https://github.com/adamhathcock/sharpcompress/pull/257)
|
||||
|
||||
### Version 0.17.0
|
||||
|
||||
* New - Full LZip support! Can read and write LZip files and Tars inside LZip files. [Make LZip a first class citizen. #241](https://github.com/adamhathcock/sharpcompress/issues/241)
|
||||
* New - XZ read support! Can read XZ files and Tars inside XZ files. [XZ in SharpCompress #91](https://github.com/adamhathcock/sharpcompress/issues/94)
|
||||
* Fix - [Regression - zip file writing on seekable streams always assumed stream start was 0. Introduced with Zip64 writing.](https://github.com/adamhathcock/sharpcompress/issues/244)
|
||||
* Fix - [Zip files with post-data descriptors can be properly skipped via decompression](https://github.com/adamhathcock/sharpcompress/issues/162)
|
||||
|
||||
### Version 0.16.2
|
||||
|
||||
* Fix [.NET 3.5 should support files and cryptography (was a regression from 0.16.0)](https://github.com/adamhathcock/sharpcompress/pull/251)
|
||||
* Fix [Zip per entry compression customization wrote the wrong method into the zip archive](https://github.com/adamhathcock/sharpcompress/pull/249)
|
||||
|
||||
### Version 0.16.1
|
||||
|
||||
* Fix [Preserve compression method when getting a compressed stream](https://github.com/adamhathcock/sharpcompress/pull/235)
|
||||
* Fix [RAR entry key normalization fix](https://github.com/adamhathcock/sharpcompress/issues/201)
|
||||
|
||||
### Version 0.16.0
|
||||
|
||||
* Breaking - [Progress Event Tracking rethink](https://github.com/adamhathcock/sharpcompress/pull/226)
|
||||
* Update to VS2017 - [VS2017](https://github.com/adamhathcock/sharpcompress/pull/231) - Framework targets have been changed.
|
||||
* New - [Add Zip64 writing](https://github.com/adamhathcock/sharpcompress/pull/211)
|
||||
* [Fix invalid/mismatching Zip version flags.](https://github.com/adamhathcock/sharpcompress/issues/164) - This allows nuget/System.IO.Packaging to read zip files generated by SharpCompress
|
||||
* [Fix 7Zip directory hiding](https://github.com/adamhathcock/sharpcompress/pull/215/files)
|
||||
* [Verify RAR CRC headers](https://github.com/adamhathcock/sharpcompress/pull/220)
|
||||
|
||||
### Version 0.15.2
|
||||
|
||||
* [Fix invalid headers](https://github.com/adamhathcock/sharpcompress/pull/210) - fixes an issue creating large-ish zip archives that was introduced with zip64 reading.
|
||||
|
||||
### Version 0.15.1
|
||||
|
||||
* [Zip64 extending information and ZipReader](https://github.com/adamhathcock/sharpcompress/pull/206)
|
||||
|
||||
### Version 0.15.0
|
||||
|
||||
* [Add zip64 support for ZipArchive extraction](https://github.com/adamhathcock/sharpcompress/pull/205)
|
||||
|
||||
### Version 0.14.1
|
||||
|
||||
* [.NET Assemblies aren't strong named](https://github.com/adamhathcock/sharpcompress/issues/158)
|
||||
* [Pkware encryption for Zip files didn't allow for multiple reads of an entry](https://github.com/adamhathcock/sharpcompress/issues/197)
|
||||
* [GZip Entry couldn't be read multiple times](https://github.com/adamhathcock/sharpcompress/issues/198)
|
||||
|
||||
### Version 0.14.0
|
||||
|
||||
* [Support for LZip reading in for Tars](https://github.com/adamhathcock/sharpcompress/pull/191)
|
||||
|
||||
### Version 0.13.1
|
||||
|
||||
* [Fix null password on ReaderFactory. Fix null options on SevenZipArchive](https://github.com/adamhathcock/sharpcompress/pull/188)
|
||||
* [Make PpmdProperties lazy to avoid unnecessary allocations.](https://github.com/adamhathcock/sharpcompress/pull/185)
|
||||
|
||||
### Version 0.13.0
|
||||
|
||||
* Breaking change: Big refactor of Options on API.
|
||||
* 7Zip supports Deflate
|
||||
|
||||
### Version 0.12.4
|
||||
|
||||
* Forward only zip issue fix https://github.com/adamhathcock/sharpcompress/issues/160
|
||||
* Try to fix frameworks again by copying targets from JSON.NET
|
||||
|
||||
### Version 0.12.3
|
||||
|
||||
* 7Zip fixes https://github.com/adamhathcock/sharpcompress/issues/73
|
||||
* Maybe all profiles will work with project.json now
|
||||
|
||||
### Version 0.12.2
|
||||
|
||||
* Support Profile 259 again
|
||||
|
||||
### Version 0.12.1
|
||||
|
||||
* Support Silverlight 5
|
||||
|
||||
### Version 0.12.0
|
||||
|
||||
* .NET Core RTM!
|
||||
* Bug fix for Tar long paths
|
||||
|
||||
### Version 0.11.6
|
||||
|
||||
* Bug fix for global header in Tar
|
||||
* Writers now have a leaveOpen `bool` overload. They won't close streams if not-requested to.
|
||||
|
||||
### Version 0.11.5
|
||||
|
||||
* Bug fix in Skip method
|
||||
|
||||
### Version 0.11.4
|
||||
|
||||
* SharpCompress is now endian neutral (matters for Mono platforms)
|
||||
* Fix for Inflate (need to change implementation)
|
||||
* Fixes for RAR detection
|
||||
|
||||
### Version 0.11.1
|
||||
|
||||
* Added Cancel on IReader
|
||||
* Removed .NET 2.0 support and LinqBridge dependency
|
||||
|
||||
### Version 0.11
|
||||
|
||||
* Been over a year, contains mainly fixes from contributors!
|
||||
* Possible breaking change: ArchiveEncoding is UTF8 by default now.
|
||||
* TAR supports writing long names using longlink
|
||||
* RAR Protect Header added
|
||||
|
||||
### Version 0.10.3
|
||||
|
||||
* Finally fixed Disposal issue when creating a new archive with the Archive API
|
||||
|
||||
### Version 0.10.2
|
||||
|
||||
* Fixed Rar Header reading for invalid extended time headers.
|
||||
* Windows Store assembly is now strong named
|
||||
* Known issues with Long Tar names being worked on
|
||||
* Updated to VS2013
|
||||
* Portable targets SL5 and Windows Phone 8 (up from SL4 and WP7)
|
||||
|
||||
### Version 0.10.1
|
||||
|
||||
* Fixed 7Zip extraction performance problem
|
||||
|
||||
### Version 0.10:
|
||||
|
||||
* Added support for RAR Decryption (thanks to https://github.com/hrasyid)
|
||||
* Embedded some BouncyCastle crypto classes to allow RAR Decryption and Winzip AES Decryption in Portable and Windows Store DLLs
|
||||
* Built in Release (I think)
|
||||
## Notes
|
||||
|
||||
XZ implementation based on: https://github.com/sambott/XZ.NET by @sambott
|
||||
|
||||
|
||||
@@ -18,9 +18,12 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
|
||||
Directory.Build.props = Directory.Build.props
|
||||
global.json = global.json
|
||||
.editorconfig = .editorconfig
|
||||
.gitignore = .gitignore
|
||||
Directory.Packages.props = Directory.Packages.props
|
||||
NuGet.config = NuGet.config
|
||||
.github\workflows\dotnetcore.yml = .github\workflows\dotnetcore.yml
|
||||
.github\workflows\nuget-release.yml = .github\workflows\nuget-release.yml
|
||||
README.md = README.md
|
||||
AGENTS.md = AGENTS.md
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Performance", "tests\SharpCompress.Performance\SharpCompress.Performance.csproj", "{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}"
|
||||
|
||||
174
USAGE.md
174
USAGE.md
@@ -1,174 +0,0 @@
|
||||
# SharpCompress Usage
|
||||
|
||||
## Stream Rules (changed with 0.21)
|
||||
|
||||
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
|
||||
|
||||
However, the .NET Framework often has classes that will dispose streams by default to make things "easy" like the following:
|
||||
|
||||
```C#
|
||||
using (var reader = new StreamReader(File.Open("foo")))
|
||||
{
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
In this example, reader should get disposed. However, stream rules should say the the `FileStream` created by `File.Open` should remain open. However, the .NET Framework closes it for you by default unless you override the constructor. In general, you should be writing Stream code like this:
|
||||
|
||||
```C#
|
||||
using (var fileStream = File.Open("foo"))
|
||||
using (var reader = new StreamReader(fileStream))
|
||||
{
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
To deal with the "correct" rules as well as the expectations of users, I've decided to always close wrapped streams as of 0.21.
|
||||
|
||||
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
|
||||
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
|
||||
## Samples
|
||||
|
||||
Also, look over the tests for more thorough [examples](https://github.com/adamhathcock/sharpcompress/tree/master/tests/SharpCompress.Test)
|
||||
|
||||
### Create Zip Archive from multiple files
|
||||
```C#
|
||||
using(var archive = ZipArchive.Create())
|
||||
{
|
||||
archive.AddEntry("file01.txt", "C:\\file01.txt");
|
||||
archive.AddEntry("file02.txt", "C:\\file02.txt");
|
||||
...
|
||||
|
||||
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
|
||||
}
|
||||
```
|
||||
|
||||
### Create Zip Archive from all files in a directory to a file
|
||||
|
||||
```C#
|
||||
using (var archive = ZipArchive.Create())
|
||||
{
|
||||
archive.AddAllFromDirectory("D:\\temp");
|
||||
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
|
||||
}
|
||||
```
|
||||
|
||||
### Create Zip Archive from all files in a directory and save in memory
|
||||
|
||||
```C#
|
||||
var memoryStream = new MemoryStream();
|
||||
using (var archive = ZipArchive.Create())
|
||||
{
|
||||
archive.AddAllFromDirectory("D:\\temp");
|
||||
archive.SaveTo(memoryStream, new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
LeaveStreamOpen = true
|
||||
});
|
||||
}
|
||||
//reset memoryStream to be usable now
|
||||
memoryStream.Position = 0;
|
||||
```
|
||||
|
||||
### Extract all files from a rar file to a directory using RarArchive
|
||||
|
||||
Note: Extracting a solid rar or 7z file needs to be done in sequential order to get acceptable decompression speed.
|
||||
It is explicitly recommended to use `ExtractAllEntries` when extracting an entire `IArchive` instead of iterating over all its `Entries`.
|
||||
Alternatively, use `IArchive.WriteToDirectory`.
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.Open("Test.rar"))
|
||||
{
|
||||
using (var reader = archive.ExtractAllEntries())
|
||||
{
|
||||
reader.WriteAllToDirectory(@"D:\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Iterate over all files from a Rar file using RarArchive
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.Open("Test.rar"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
|
||||
{
|
||||
Console.WriteLine($"{entry.Key}: {entry.Size} bytes");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Use ReaderFactory to autodetect archive type and Open the entry stream
|
||||
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
Console.WriteLine(reader.Entry.Key);
|
||||
reader.WriteEntryToDirectory(@"C:\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Use ReaderFactory to autodetect archive type and Open the entry stream
|
||||
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
{
|
||||
entryStream.CopyTo(...);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Use WriterFactory to write all files from a directory in a streaming manner.
|
||||
|
||||
```C#
|
||||
using (Stream stream = File.OpenWrite("C:\\temp.tgz"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)
|
||||
{
|
||||
LeaveOpenStream = true
|
||||
}))
|
||||
{
|
||||
writer.WriteAll("D:\\temp", "*", SearchOption.AllDirectories);
|
||||
}
|
||||
```
|
||||
|
||||
### Extract zip which has non-utf8 encoded filename(cp932)
|
||||
|
||||
```C#
|
||||
var opts = new SharpCompress.Readers.ReaderOptions();
|
||||
var encoding = Encoding.GetEncoding(932);
|
||||
opts.ArchiveEncoding = new SharpCompress.Common.ArchiveEncoding();
|
||||
opts.ArchiveEncoding.CustomDecoder = (data, x, y) =>
|
||||
{
|
||||
return encoding.GetString(data);
|
||||
};
|
||||
var tr = SharpCompress.Archives.Zip.ZipArchive.Open("test.zip", opts);
|
||||
foreach(var entry in tr.Entries)
|
||||
{
|
||||
Console.WriteLine($"{entry.Key}");
|
||||
}
|
||||
```
|
||||
213
build/Program.cs
213
build/Program.cs
@@ -1,7 +1,10 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading.Tasks;
|
||||
using GlobExpressions;
|
||||
using static Bullseye.Targets;
|
||||
using static SimpleExec.Command;
|
||||
@@ -11,7 +14,11 @@ const string Restore = "restore";
|
||||
const string Build = "build";
|
||||
const string Test = "test";
|
||||
const string Format = "format";
|
||||
const string CheckFormat = "check-format";
|
||||
const string Publish = "publish";
|
||||
const string DetermineVersion = "determine-version";
|
||||
const string UpdateVersion = "update-version";
|
||||
const string PushToNuGet = "push-to-nuget";
|
||||
|
||||
Target(
|
||||
Clean,
|
||||
@@ -42,12 +49,20 @@ Target(
|
||||
Target(
|
||||
Format,
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "tool restore");
|
||||
Run("dotnet", "csharpier format .");
|
||||
}
|
||||
);
|
||||
Target(
|
||||
CheckFormat,
|
||||
() =>
|
||||
{
|
||||
Run("dotnet", "tool restore");
|
||||
Run("dotnet", "csharpier check .");
|
||||
}
|
||||
);
|
||||
Target(Restore, [Format], () => Run("dotnet", "restore"));
|
||||
Target(Restore, [CheckFormat], () => Run("dotnet", "restore"));
|
||||
|
||||
Target(
|
||||
Build,
|
||||
@@ -61,7 +76,7 @@ Target(
|
||||
Target(
|
||||
Test,
|
||||
[Build],
|
||||
["net8.0", "net48"],
|
||||
["net10.0", "net48"],
|
||||
framework =>
|
||||
{
|
||||
IEnumerable<string> GetFiles(string d)
|
||||
@@ -90,6 +105,200 @@ Target(
|
||||
}
|
||||
);
|
||||
|
||||
Target(
|
||||
DetermineVersion,
|
||||
async () =>
|
||||
{
|
||||
var (version, isPrerelease) = await GetVersion();
|
||||
Console.WriteLine($"VERSION={version}");
|
||||
Console.WriteLine($"PRERELEASE={isPrerelease.ToString().ToLower()}");
|
||||
|
||||
// Write to environment file for GitHub Actions
|
||||
var githubOutput = Environment.GetEnvironmentVariable("GITHUB_OUTPUT");
|
||||
if (!string.IsNullOrEmpty(githubOutput))
|
||||
{
|
||||
File.AppendAllText(githubOutput, $"version={version}\n");
|
||||
File.AppendAllText(githubOutput, $"prerelease={isPrerelease.ToString().ToLower()}\n");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
Target(
|
||||
UpdateVersion,
|
||||
async () =>
|
||||
{
|
||||
var version = Environment.GetEnvironmentVariable("VERSION");
|
||||
if (string.IsNullOrEmpty(version))
|
||||
{
|
||||
var (detectedVersion, _) = await GetVersion();
|
||||
version = detectedVersion;
|
||||
}
|
||||
|
||||
Console.WriteLine($"Updating project file with version: {version}");
|
||||
|
||||
var projectPath = "src/SharpCompress/SharpCompress.csproj";
|
||||
var content = File.ReadAllText(projectPath);
|
||||
|
||||
// Get base version (without prerelease suffix)
|
||||
var baseVersion = version.Split('-')[0];
|
||||
|
||||
// Update VersionPrefix
|
||||
content = Regex.Replace(
|
||||
content,
|
||||
@"<VersionPrefix>[^<]*</VersionPrefix>",
|
||||
$"<VersionPrefix>{version}</VersionPrefix>"
|
||||
);
|
||||
|
||||
// Update AssemblyVersion
|
||||
content = Regex.Replace(
|
||||
content,
|
||||
@"<AssemblyVersion>[^<]*</AssemblyVersion>",
|
||||
$"<AssemblyVersion>{baseVersion}</AssemblyVersion>"
|
||||
);
|
||||
|
||||
// Update FileVersion
|
||||
content = Regex.Replace(
|
||||
content,
|
||||
@"<FileVersion>[^<]*</FileVersion>",
|
||||
$"<FileVersion>{baseVersion}</FileVersion>"
|
||||
);
|
||||
|
||||
File.WriteAllText(projectPath, content);
|
||||
Console.WriteLine($"Updated VersionPrefix to: {version}");
|
||||
Console.WriteLine($"Updated AssemblyVersion and FileVersion to: {baseVersion}");
|
||||
}
|
||||
);
|
||||
|
||||
Target(
|
||||
PushToNuGet,
|
||||
() =>
|
||||
{
|
||||
var apiKey = Environment.GetEnvironmentVariable("NUGET_API_KEY");
|
||||
if (string.IsNullOrEmpty(apiKey))
|
||||
{
|
||||
Console.WriteLine(
|
||||
"NUGET_API_KEY environment variable is not set. Skipping NuGet push."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
var packages = Directory.GetFiles("artifacts", "*.nupkg");
|
||||
if (packages.Length == 0)
|
||||
{
|
||||
Console.WriteLine("No packages found in artifacts directory.");
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var package in packages)
|
||||
{
|
||||
Console.WriteLine($"Pushing {package} to NuGet.org");
|
||||
try
|
||||
{
|
||||
// Note: API key is passed via command line argument which is standard practice for dotnet nuget push
|
||||
// The key is already in an environment variable and not displayed in normal output
|
||||
Run(
|
||||
"dotnet",
|
||||
$"nuget push \"{package}\" --api-key {apiKey} --source https://api.nuget.org/v3/index.json --skip-duplicate"
|
||||
);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine($"Failed to push {package}: {ex.Message}");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
Target("default", [Publish], () => Console.WriteLine("Done!"));
|
||||
|
||||
await RunTargetsAndExitAsync(args);
|
||||
|
||||
static async Task<(string version, bool isPrerelease)> GetVersion()
|
||||
{
|
||||
// Check if current commit has a version tag
|
||||
var currentTag = (await GetGitOutput("tag", "--points-at HEAD"))
|
||||
.Split('\n', StringSplitOptions.RemoveEmptyEntries)
|
||||
.FirstOrDefault(tag => Regex.IsMatch(tag.Trim(), @"^\d+\.\d+\.\d+$"));
|
||||
|
||||
if (!string.IsNullOrEmpty(currentTag))
|
||||
{
|
||||
// Tagged release - use the tag as version
|
||||
var version = currentTag.Trim();
|
||||
Console.WriteLine($"Building tagged release version: {version}");
|
||||
return (version, false);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Not tagged - create prerelease version
|
||||
var allTags = (await GetGitOutput("tag", "--list"))
|
||||
.Split('\n', StringSplitOptions.RemoveEmptyEntries)
|
||||
.Where(tag => Regex.IsMatch(tag.Trim(), @"^\d+\.\d+\.\d+$"))
|
||||
.Select(tag => tag.Trim())
|
||||
.ToList();
|
||||
|
||||
var lastTag = allTags.OrderBy(tag => Version.Parse(tag)).LastOrDefault() ?? "0.0.0";
|
||||
var lastVersion = Version.Parse(lastTag);
|
||||
|
||||
// Determine version increment based on branch
|
||||
var currentBranch = await GetCurrentBranch();
|
||||
Version nextVersion;
|
||||
|
||||
if (currentBranch == "release")
|
||||
{
|
||||
// Release branch: increment patch version
|
||||
nextVersion = new Version(lastVersion.Major, lastVersion.Minor, lastVersion.Build + 1);
|
||||
Console.WriteLine($"Building prerelease for release branch (patch increment)");
|
||||
}
|
||||
else
|
||||
{
|
||||
// Master or other branches: increment minor version
|
||||
nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
|
||||
Console.WriteLine($"Building prerelease for {currentBranch} branch (minor increment)");
|
||||
}
|
||||
|
||||
// Use commit count since the last version tag if available; otherwise, fall back to total count
|
||||
var revListArgs = allTags.Any() ? $"--count {lastTag}..HEAD" : "--count HEAD";
|
||||
var commitCount = (await GetGitOutput("rev-list", revListArgs)).Trim();
|
||||
|
||||
var version = $"{nextVersion}-beta.{commitCount}";
|
||||
Console.WriteLine($"Building prerelease version: {version}");
|
||||
return (version, true);
|
||||
}
|
||||
}
|
||||
|
||||
static async Task<string> GetCurrentBranch()
|
||||
{
|
||||
// In GitHub Actions, GITHUB_REF_NAME contains the branch name
|
||||
var githubRefName = Environment.GetEnvironmentVariable("GITHUB_REF_NAME");
|
||||
if (!string.IsNullOrEmpty(githubRefName))
|
||||
{
|
||||
return githubRefName;
|
||||
}
|
||||
|
||||
// Fallback to git command for local builds
|
||||
try
|
||||
{
|
||||
var (output, _) = await ReadAsync("git", "branch --show-current");
|
||||
return output.Trim();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine($"Warning: Could not determine current branch: {ex.Message}");
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
static async Task<string> GetGitOutput(string command, string args)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Use SimpleExec's Read to execute git commands in a cross-platform way
|
||||
var (output, _) = await ReadAsync("git", $"{command} {args}");
|
||||
return output;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new Exception($"Git command failed: git {command} {args}\n{ex.Message}", ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Bullseye" />
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"version": 2,
|
||||
"dependencies": {
|
||||
"net8.0": {
|
||||
"net10.0": {
|
||||
"Bullseye": {
|
||||
"type": "Direct",
|
||||
"requested": "[6.0.0, )",
|
||||
"resolved": "6.0.0",
|
||||
"contentHash": "vgwwXfzs7jJrskWH7saHRMgPzziq/e86QZNWY1MnMxd7e+De7E7EX4K3C7yrvaK9y02SJoLxNxcLG/q5qUAghw=="
|
||||
"requested": "[6.1.0, )",
|
||||
"resolved": "6.1.0",
|
||||
"contentHash": "fltnAJDe0BEX5eymXGUq+il2rSUA0pHqUonNDRH2TrvRu8SkU17mYG0IVpdmG2ibtfhdjNrv4CuTCxHOwcozCA=="
|
||||
},
|
||||
"Glob": {
|
||||
"type": "Direct",
|
||||
@@ -14,11 +14,51 @@
|
||||
"resolved": "1.1.9",
|
||||
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies": {
|
||||
"type": "Direct",
|
||||
"requested": "[1.0.3, )",
|
||||
"resolved": "1.0.3",
|
||||
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
|
||||
"dependencies": {
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
|
||||
}
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
}
|
||||
},
|
||||
"Microsoft.VisualStudio.Threading.Analyzers": {
|
||||
"type": "Direct",
|
||||
"requested": "[17.14.15, )",
|
||||
"resolved": "17.14.15",
|
||||
"contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw=="
|
||||
},
|
||||
"SimpleExec": {
|
||||
"type": "Direct",
|
||||
"requested": "[12.0.0, )",
|
||||
"resolved": "12.0.0",
|
||||
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
|
||||
"requested": "[13.0.0, )",
|
||||
"resolved": "13.0.0",
|
||||
"contentHash": "zcCR1pupa1wI1VqBULRiQKeHKKZOuJhi/K+4V5oO+rHJZlaOD53ViFo1c3PavDoMAfSn/FAXGAWpPoF57rwhYg=="
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
|
||||
"type": "Transitive",
|
||||
"resolved": "1.0.3",
|
||||
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
512
docs/API.md
Normal file
512
docs/API.md
Normal file
@@ -0,0 +1,512 @@
|
||||
# API Quick Reference
|
||||
|
||||
Quick reference for commonly used SharpCompress APIs.
|
||||
|
||||
## Factory Methods
|
||||
|
||||
### Opening Archives
|
||||
|
||||
```csharp
|
||||
// Auto-detect format
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
// Works with Zip, Tar, GZip, Rar, 7Zip, etc.
|
||||
}
|
||||
|
||||
// Specific format - Archive API
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip"))
|
||||
using (var archive = TarArchive.OpenArchive("file.tar"))
|
||||
using (var archive = RarArchive.OpenArchive("file.rar"))
|
||||
using (var archive = SevenZipArchive.OpenArchive("file.7z"))
|
||||
using (var archive = GZipArchive.OpenArchive("file.gz"))
|
||||
|
||||
// With options
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
Password = "password",
|
||||
LeaveStreamOpen = true,
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
using (var archive = ZipArchive.OpenArchive("encrypted.zip", options))
|
||||
```
|
||||
|
||||
### Creating Archives
|
||||
|
||||
```csharp
|
||||
// Writer Factory
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
// Write entries
|
||||
}
|
||||
|
||||
// Specific writer
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
using (var archive = TarArchive.CreateArchive())
|
||||
using (var archive = GZipArchive.CreateArchive())
|
||||
|
||||
// With options
|
||||
var options = new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9,
|
||||
LeaveStreamOpen = false
|
||||
};
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.SaveTo("output.zip", options);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Archive API Methods
|
||||
|
||||
### Reading/Extracting
|
||||
|
||||
```csharp
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip"))
|
||||
{
|
||||
// Get all entries
|
||||
IEnumerable<IArchiveEntry> entries = archive.Entries;
|
||||
|
||||
// Find specific entry
|
||||
var entry = archive.Entries.FirstOrDefault(e => e.Key == "file.txt");
|
||||
|
||||
// Extract all
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
|
||||
// Extract single entry
|
||||
var entry = archive.Entries.First();
|
||||
entry.WriteToFile(@"C:\output\file.txt");
|
||||
entry.WriteToFile(@"C:\output\file.txt", new ExtractionOptions { Overwrite = true });
|
||||
|
||||
// Get entry stream
|
||||
using (var stream = entry.OpenEntryStream())
|
||||
{
|
||||
stream.CopyTo(outputStream);
|
||||
}
|
||||
}
|
||||
|
||||
// Async extraction (requires IAsyncArchive)
|
||||
using (var asyncArchive = await ZipArchive.OpenAsyncArchive("file.zip"))
|
||||
{
|
||||
await asyncArchive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken: cancellationToken
|
||||
);
|
||||
}
|
||||
using (var stream = await entry.OpenEntryStreamAsync(cancellationToken))
|
||||
{
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
### Entry Properties
|
||||
|
||||
```csharp
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
string name = entry.Key; // Entry name/path
|
||||
long size = entry.Size; // Uncompressed size
|
||||
long compressedSize = entry.CompressedSize;
|
||||
bool isDir = entry.IsDirectory;
|
||||
DateTime? modTime = entry.LastModifiedTime;
|
||||
CompressionType compression = entry.CompressionType;
|
||||
}
|
||||
```
|
||||
|
||||
### Creating Archives
|
||||
|
||||
```csharp
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
// Add file
|
||||
archive.AddEntry("file.txt", @"C:\source\file.txt");
|
||||
|
||||
// Add multiple files
|
||||
archive.AddAllFromDirectory(@"C:\source");
|
||||
archive.AddAllFromDirectory(@"C:\source", "*.txt"); // Pattern
|
||||
|
||||
// Save to file
|
||||
archive.SaveTo("output.zip", CompressionType.Deflate);
|
||||
|
||||
// Save to stream
|
||||
archive.SaveTo(outputStream, new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9,
|
||||
LeaveStreamOpen = true
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Reader API Methods
|
||||
|
||||
### Forward-Only Reading
|
||||
|
||||
```csharp
|
||||
using (var stream = File.OpenRead("file.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
IArchiveEntry entry = reader.Entry;
|
||||
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
// Extract entry
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
reader.WriteEntryToFile(@"C:\output\file.txt");
|
||||
|
||||
// Or get stream
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
{
|
||||
entryStream.CopyTo(outputStream);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Async variants (use OpenAsyncReader to get IAsyncReader)
|
||||
using (var stream = File.OpenRead("file.zip"))
|
||||
using (var reader = await ReaderFactory.OpenAsyncReader(stream))
|
||||
{
|
||||
while (await reader.MoveToNextEntryAsync())
|
||||
{
|
||||
await reader.WriteEntryToFileAsync(
|
||||
@"C:\output\" + reader.Entry.Key,
|
||||
cancellationToken: cancellationToken
|
||||
);
|
||||
}
|
||||
|
||||
// Async extraction of all entries
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Writer API Methods
|
||||
|
||||
### Creating Archives (Streaming)
|
||||
|
||||
```csharp
|
||||
using (var stream = File.Create("output.zip"))
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
// Write single file
|
||||
using (var fileStream = File.OpenRead("source.txt"))
|
||||
{
|
||||
writer.Write("entry.txt", fileStream, DateTime.Now);
|
||||
}
|
||||
|
||||
// Write directory
|
||||
writer.WriteAll("C:\\source", "*", SearchOption.AllDirectories);
|
||||
writer.WriteAll("C:\\source", "*.txt", SearchOption.TopDirectoryOnly);
|
||||
|
||||
// Async variants
|
||||
using (var fileStream = File.OpenRead("source.txt"))
|
||||
{
|
||||
await writer.WriteAsync("entry.txt", fileStream, DateTime.Now, cancellationToken);
|
||||
}
|
||||
|
||||
await writer.WriteAllAsync("C:\\source", "*", SearchOption.AllDirectories, cancellationToken);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Options
|
||||
|
||||
### ReaderOptions
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
Password = "password", // For encrypted archives
|
||||
LeaveStreamOpen = true, // Don't close wrapped stream
|
||||
ArchiveEncoding = new ArchiveEncoding // Custom character encoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932)
|
||||
}
|
||||
};
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip", options))
|
||||
{
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
### WriterOptions
|
||||
|
||||
```csharp
|
||||
var options = new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9, // 0-9 for Deflate
|
||||
LeaveStreamOpen = true, // Don't close stream
|
||||
};
|
||||
archive.SaveTo("output.zip", options);
|
||||
```
|
||||
|
||||
### ExtractionOptions
|
||||
|
||||
```csharp
|
||||
var options = new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true, // Recreate directory structure
|
||||
Overwrite = true, // Overwrite existing files
|
||||
PreserveFileTime = true // Keep original timestamps
|
||||
};
|
||||
archive.WriteToDirectory(@"C:\output", options);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Compression Types
|
||||
|
||||
### Available Compressions
|
||||
|
||||
```csharp
|
||||
// For creating archives
|
||||
CompressionType.None // No compression (store)
|
||||
CompressionType.Deflate // DEFLATE (default for ZIP/GZip)
|
||||
CompressionType.Deflate64 // Deflate64
|
||||
CompressionType.BZip2 // BZip2
|
||||
CompressionType.LZMA // LZMA (for 7Zip, LZip, XZ)
|
||||
CompressionType.PPMd // PPMd (for ZIP)
|
||||
CompressionType.Rar // RAR compression (read-only)
|
||||
CompressionType.ZStandard // ZStandard
|
||||
ArchiveType.Arc
|
||||
ArchiveType.Arj
|
||||
ArchiveType.Ace
|
||||
|
||||
// For Tar archives with compression
|
||||
// Use WriterFactory to create compressed tar archives
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, CompressionType.GZip)) // Tar.GZip
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, CompressionType.BZip2)) // Tar.BZip2
|
||||
```
|
||||
|
||||
### Archive Types
|
||||
|
||||
```csharp
|
||||
ArchiveType.Zip
|
||||
ArchiveType.Tar
|
||||
ArchiveType.GZip
|
||||
ArchiveType.BZip2
|
||||
ArchiveType.Rar
|
||||
ArchiveType.SevenZip
|
||||
ArchiveType.XZ
|
||||
ArchiveType.ZStandard
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Patterns & Examples
|
||||
|
||||
### Extract with Error Handling
|
||||
|
||||
```csharp
|
||||
try
|
||||
{
|
||||
using (var archive = ZipArchive.Open("archive.zip",
|
||||
new ReaderOptions { Password = "password" }))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (PasswordRequiredException)
|
||||
{
|
||||
Console.WriteLine("Password required");
|
||||
}
|
||||
catch (InvalidArchiveException)
|
||||
{
|
||||
Console.WriteLine("Archive is invalid");
|
||||
}
|
||||
catch (SharpCompressException ex)
|
||||
{
|
||||
Console.WriteLine($"Error: {ex.Message}");
|
||||
}
|
||||
```
|
||||
|
||||
### Extract with Progress
|
||||
|
||||
```csharp
|
||||
var progress = new Progress<ProgressReport>(report =>
|
||||
{
|
||||
Console.WriteLine($"Extracting {report.EntryPath}: {report.PercentComplete}%");
|
||||
});
|
||||
|
||||
var options = new ReaderOptions { Progress = progress };
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
```
|
||||
|
||||
### Async Extract with Cancellation
|
||||
|
||||
```csharp
|
||||
var cts = new CancellationTokenSource();
|
||||
cts.CancelAfter(TimeSpan.FromMinutes(5));
|
||||
|
||||
try
|
||||
{
|
||||
using (var archive = await ZipArchive.OpenAsyncArchive("archive.zip"))
|
||||
{
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken: cts.Token
|
||||
);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Console.WriteLine("Extraction cancelled");
|
||||
}
|
||||
```
|
||||
|
||||
### Create with Custom Compression
|
||||
|
||||
```csharp
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory(@"D:\source");
|
||||
|
||||
// Fastest
|
||||
archive.SaveTo("fast.zip", new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 1
|
||||
});
|
||||
|
||||
// Balanced (default)
|
||||
archive.SaveTo("normal.zip", CompressionType.Deflate);
|
||||
|
||||
// Best compression
|
||||
archive.SaveTo("best.zip", new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Stream Processing (No File I/O)
|
||||
|
||||
```csharp
|
||||
using (var outputStream = new MemoryStream())
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
// Add content from memory
|
||||
using (var contentStream = new MemoryStream(Encoding.UTF8.GetBytes("Hello")))
|
||||
{
|
||||
archive.AddEntry("file.txt", contentStream);
|
||||
}
|
||||
|
||||
// Save to memory
|
||||
archive.SaveTo(outputStream, CompressionType.Deflate);
|
||||
|
||||
// Get bytes
|
||||
byte[] archiveBytes = outputStream.ToArray();
|
||||
}
|
||||
```
|
||||
|
||||
### Extract Specific Files
|
||||
|
||||
```csharp
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
var filesToExtract = new[] { "file1.txt", "file2.txt" };
|
||||
|
||||
foreach (var entry in archive.Entries.Where(e => filesToExtract.Contains(e.Key)))
|
||||
{
|
||||
entry.WriteToFile(@"C:\output\" + entry.Key);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### List Archive Contents
|
||||
|
||||
```csharp
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
Console.WriteLine($"[DIR] {entry.Key}");
|
||||
else
|
||||
Console.WriteLine($"[FILE] {entry.Key} ({entry.Size} bytes)");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Mistakes
|
||||
|
||||
### ✗ Wrong - Stream not disposed
|
||||
|
||||
```csharp
|
||||
var stream = File.OpenRead("archive.zip");
|
||||
var archive = ZipArchive.OpenArchive(stream);
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
// stream not disposed - leaked resource
|
||||
```
|
||||
|
||||
### ✓ Correct - Using blocks
|
||||
|
||||
```csharp
|
||||
using (var stream = File.OpenRead("archive.zip"))
|
||||
using (var archive = ZipArchive.OpenArchive(stream))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
// Both properly disposed
|
||||
```
|
||||
|
||||
### ✗ Wrong - Mixing API styles
|
||||
|
||||
```csharp
|
||||
// Loading entire archive then iterating
|
||||
using (var archive = ZipArchive.OpenArchive("large.zip"))
|
||||
{
|
||||
var entries = archive.Entries.ToList(); // Loads all in memory
|
||||
foreach (var e in entries)
|
||||
{
|
||||
e.WriteToFile(...); // Then extracts each
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### ✓ Correct - Use Reader for large files
|
||||
|
||||
```csharp
|
||||
// Streaming iteration
|
||||
using (var stream = File.OpenRead("large.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [USAGE.md](USAGE.md) - Complete code examples
|
||||
- [FORMATS.md](FORMATS.md) - Supported formats
|
||||
- [PERFORMANCE.md](PERFORMANCE.md) - API selection guide
|
||||
659
docs/ARCHITECTURE.md
Normal file
659
docs/ARCHITECTURE.md
Normal file
@@ -0,0 +1,659 @@
|
||||
# SharpCompress Architecture Guide
|
||||
|
||||
This guide explains the internal architecture and design patterns of SharpCompress for contributors.
|
||||
|
||||
## Overview
|
||||
|
||||
SharpCompress is organized into three main layers:
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────┐
|
||||
│ User-Facing APIs (Top Layer) │
|
||||
│ Archive, Reader, Writer Factories │
|
||||
├─────────────────────────────────────────┤
|
||||
│ Format-Specific Implementations │
|
||||
│ ZipArchive, TarReader, GZipWriter, │
|
||||
│ RarArchive, SevenZipArchive, etc. │
|
||||
├─────────────────────────────────────────┤
|
||||
│ Compression & Crypto (Bottom Layer) │
|
||||
│ Deflate, LZMA, BZip2, AES, CRC32 │
|
||||
└─────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Directory Structure
|
||||
|
||||
### `src/SharpCompress/`
|
||||
|
||||
#### `Archives/` - Archive Implementations
|
||||
Contains `IArchive` implementations for seekable, random-access APIs.
|
||||
|
||||
**Key Files:**
|
||||
- `AbstractArchive.cs` - Base class for all archives
|
||||
- `IArchive.cs` - Archive interface definition
|
||||
- `ArchiveFactory.cs` - Factory for opening archives
|
||||
- Format-specific: `ZipArchive.cs`, `TarArchive.cs`, `RarArchive.cs`, `SevenZipArchive.cs`, `GZipArchive.cs`
|
||||
|
||||
**Use Archive API when:**
|
||||
- Stream is seekable (file, memory)
|
||||
- Need random access to entries
|
||||
- Archive fits in memory
|
||||
- Simplicity is important
|
||||
|
||||
#### `Readers/` - Reader Implementations
|
||||
Contains `IReader` implementations for forward-only, non-seekable APIs.
|
||||
|
||||
**Key Files:**
|
||||
- `AbstractReader.cs` - Base reader class
|
||||
- `IReader.cs` - Reader interface
|
||||
- `ReaderFactory.cs` - Auto-detection factory
|
||||
- `ReaderOptions.cs` - Configuration for readers
|
||||
- Format-specific: `ZipReader.cs`, `TarReader.cs`, `GZipReader.cs`, `RarReader.cs`, etc.
|
||||
|
||||
**Use Reader API when:**
|
||||
- Stream is non-seekable (network, pipe, compressed)
|
||||
- Processing large files
|
||||
- Memory is limited
|
||||
- Forward-only processing is acceptable
|
||||
|
||||
#### `Writers/` - Writer Implementations
|
||||
Contains `IWriter` implementations for forward-only writing.
|
||||
|
||||
**Key Files:**
|
||||
- `AbstractWriter.cs` - Base writer class
|
||||
- `IWriter.cs` - Writer interface
|
||||
- `WriterFactory.cs` - Factory for creating writers
|
||||
- `WriterOptions.cs` - Configuration for writers
|
||||
- Format-specific: `ZipWriter.cs`, `TarWriter.cs`, `GZipWriter.cs`
|
||||
|
||||
#### `Factories/` - Format Detection
|
||||
Factory classes for auto-detecting archive format and creating appropriate readers/writers.
|
||||
|
||||
**Key Files:**
|
||||
- `Factory.cs` - Base factory class
|
||||
- `IFactory.cs` - Factory interface
|
||||
- Format-specific: `ZipFactory.cs`, `TarFactory.cs`, `RarFactory.cs`, etc.
|
||||
|
||||
**How It Works:**
|
||||
1. `ReaderFactory.OpenReader(stream)` probes stream signatures
|
||||
2. Identifies format by magic bytes
|
||||
3. Creates appropriate reader instance
|
||||
4. Returns generic `IReader` interface
|
||||
|
||||
#### `Common/` - Shared Types
|
||||
Common types, options, and enumerations used across formats.
|
||||
|
||||
**Key Files:**
|
||||
- `IEntry.cs` - Entry interface (file within archive)
|
||||
- `Entry.cs` - Entry implementation
|
||||
- `ArchiveType.cs` - Enum for archive formats
|
||||
- `CompressionType.cs` - Enum for compression methods
|
||||
- `ArchiveEncoding.cs` - Character encoding configuration
|
||||
- `ExtractionOptions.cs` - Extraction configuration
|
||||
- Format-specific headers: `Zip/Headers/`, `Tar/Headers/`, `Rar/Headers/`, etc.
|
||||
|
||||
#### `Compressors/` - Compression Algorithms
|
||||
Low-level compression streams implementing specific algorithms.
|
||||
|
||||
**Algorithms:**
|
||||
- `Deflate/` - DEFLATE compression (Zip default)
|
||||
- `BZip2/` - BZip2 compression
|
||||
- `LZMA/` - LZMA compression (7Zip, XZ, LZip)
|
||||
- `PPMd/` - Prediction by Partial Matching (Zip, 7Zip)
|
||||
- `ZStandard/` - ZStandard compression (decompression only)
|
||||
- `Xz/` - XZ format (decompression only)
|
||||
- `Rar/` - RAR-specific unpacking
|
||||
- `Arj/`, `Arc/`, `Ace/` - Legacy format decompression
|
||||
- `Filters/` - BCJ/BCJ2 filters for executable compression
|
||||
|
||||
**Each Compressor:**
|
||||
- Implements a `Stream` subclass
|
||||
- Provides both compression and decompression
|
||||
- Some are read-only (decompression only)
|
||||
|
||||
#### `Crypto/` - Encryption & Hashing
|
||||
Cryptographic functions and stream wrappers.
|
||||
|
||||
**Key Files:**
|
||||
- `Crc32Stream.cs` - CRC32 calculation wrapper
|
||||
- `BlockTransformer.cs` - Block cipher transformations
|
||||
- AES, PKWare, WinZip encryption implementations
|
||||
|
||||
#### `IO/` - Stream Utilities
|
||||
Stream wrappers and utilities.
|
||||
|
||||
**Key Classes:**
|
||||
- `SharpCompressStream` - Base stream class
|
||||
- `ProgressReportingStream` - Progress tracking wrapper
|
||||
- `MarkingBinaryReader` - Binary reader with position marks
|
||||
- `BufferedSubStream` - Buffered read-only substream
|
||||
- `ReadOnlySubStream` - Read-only view of parent stream
|
||||
- `NonDisposingStream` - Prevents wrapped stream disposal
|
||||
|
||||
---
|
||||
|
||||
## Design Patterns
|
||||
|
||||
### 1. Factory Pattern
|
||||
|
||||
**Purpose:** Auto-detect format and create appropriate reader/writer.
|
||||
|
||||
**Example:**
|
||||
```csharp
|
||||
// User calls factory
|
||||
using (var reader = ReaderFactory.OpenReader(stream)) // Returns IReader
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
// Process entry
|
||||
}
|
||||
}
|
||||
|
||||
// Behind the scenes:
|
||||
// 1. Factory.Open() probes stream signatures
|
||||
// 2. Detects format (Zip, Tar, Rar, etc.)
|
||||
// 3. Creates appropriate reader (ZipReader, TarReader, etc.)
|
||||
// 4. Returns as generic IReader interface
|
||||
```
|
||||
|
||||
**Files:**
|
||||
- `src/SharpCompress/Factories/ReaderFactory.cs`
|
||||
- `src/SharpCompress/Factories/WriterFactory.cs`
|
||||
- `src/SharpCompress/Factories/ArchiveFactory.cs`
|
||||
|
||||
### 2. Strategy Pattern
|
||||
|
||||
**Purpose:** Encapsulate compression algorithms as swappable strategies.
|
||||
|
||||
**Example:**
|
||||
```csharp
|
||||
// Different compression strategies
|
||||
CompressionType.Deflate // DEFLATE
|
||||
CompressionType.BZip2 // BZip2
|
||||
CompressionType.LZMA // LZMA
|
||||
CompressionType.PPMd // PPMd
|
||||
|
||||
// Writer uses strategy pattern
|
||||
var archive = ZipArchive.CreateArchive();
|
||||
archive.SaveTo("output.zip", CompressionType.Deflate); // Use Deflate
|
||||
archive.SaveTo("output.bz2", CompressionType.BZip2); // Use BZip2
|
||||
```
|
||||
|
||||
**Files:**
|
||||
- `src/SharpCompress/Compressors/` - Strategy implementations
|
||||
|
||||
### 3. Decorator Pattern
|
||||
|
||||
**Purpose:** Wrap streams with additional functionality.
|
||||
|
||||
**Example:**
|
||||
```csharp
|
||||
// Progress reporting decorator
|
||||
var progressStream = new ProgressReportingStream(baseStream, progressReporter);
|
||||
progressStream.Read(buffer, 0, buffer.Length); // Reports progress
|
||||
|
||||
// Non-disposing decorator
|
||||
var nonDisposingStream = new NonDisposingStream(baseStream);
|
||||
using (var compressor = new DeflateStream(nonDisposingStream))
|
||||
{
|
||||
// baseStream won't be disposed when compressor is disposed
|
||||
}
|
||||
```
|
||||
|
||||
**Files:**
|
||||
- `src/SharpCompress/IO/ProgressReportingStream.cs`
|
||||
- `src/SharpCompress/IO/NonDisposingStream.cs`
|
||||
|
||||
### 4. Template Method Pattern
|
||||
|
||||
**Purpose:** Define algorithm skeleton in base class, let subclasses fill details.
|
||||
|
||||
**Example:**
|
||||
```csharp
|
||||
// AbstractArchive defines common archive operations
|
||||
public abstract class AbstractArchive : IArchive
|
||||
{
|
||||
// Template methods
|
||||
public virtual void WriteToDirectory(string destinationDirectory, ExtractionOptions options)
|
||||
{
|
||||
// Common extraction logic
|
||||
foreach (var entry in Entries)
|
||||
{
|
||||
// Call subclass method
|
||||
entry.WriteToFile(destinationPath, options);
|
||||
}
|
||||
}
|
||||
|
||||
// Subclasses override format-specific details
|
||||
protected abstract Entry CreateEntry(EntryData data);
|
||||
}
|
||||
```
|
||||
|
||||
**Files:**
|
||||
- `src/SharpCompress/Archives/AbstractArchive.cs`
|
||||
- `src/SharpCompress/Readers/AbstractReader.cs`
|
||||
|
||||
### 5. Iterator Pattern
|
||||
|
||||
**Purpose:** Provide sequential access to entries.
|
||||
|
||||
**Example:**
|
||||
```csharp
|
||||
// Archive API - provides collection
|
||||
IEnumerable<IEntry> entries = archive.Entries;
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
// Random access - entries already in memory
|
||||
}
|
||||
|
||||
// Reader API - provides iterator
|
||||
IReader reader = ReaderFactory.OpenReader(stream);
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
// Forward-only iteration - one entry at a time
|
||||
var entry = reader.Entry;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Key Interfaces
|
||||
|
||||
### IArchive - Random Access API
|
||||
|
||||
```csharp
|
||||
public interface IArchive : IDisposable
|
||||
{
|
||||
IEnumerable<IEntry> Entries { get; }
|
||||
|
||||
void WriteToDirectory(string destinationDirectory,
|
||||
ExtractionOptions options = null);
|
||||
|
||||
IEntry FirstOrDefault(Func<IEntry, bool> predicate);
|
||||
|
||||
// ... format-specific methods
|
||||
}
|
||||
```
|
||||
|
||||
**Implementations:** `ZipArchive`, `TarArchive`, `RarArchive`, `SevenZipArchive`, `GZipArchive`
|
||||
|
||||
### IReader - Forward-Only API
|
||||
|
||||
```csharp
|
||||
public interface IReader : IDisposable
|
||||
{
|
||||
IEntry Entry { get; }
|
||||
|
||||
bool MoveToNextEntry();
|
||||
|
||||
void WriteEntryToDirectory(string destinationDirectory,
|
||||
ExtractionOptions options = null);
|
||||
|
||||
Stream OpenEntryStream();
|
||||
|
||||
// ... async variants
|
||||
}
|
||||
```
|
||||
|
||||
**Implementations:** `ZipReader`, `TarReader`, `RarReader`, `GZipReader`, etc.
|
||||
|
||||
### IWriter - Writing API
|
||||
|
||||
```csharp
|
||||
public interface IWriter : IDisposable
|
||||
{
|
||||
void Write(string entryPath, Stream source,
|
||||
DateTime? modificationTime = null);
|
||||
|
||||
void WriteAll(string sourceDirectory, string searchPattern,
|
||||
SearchOption searchOption);
|
||||
|
||||
// ... async variants
|
||||
}
|
||||
```
|
||||
|
||||
**Implementations:** `ZipWriter`, `TarWriter`, `GZipWriter`
|
||||
|
||||
### IEntry - Archive Entry
|
||||
|
||||
```csharp
|
||||
public interface IEntry
|
||||
{
|
||||
string Key { get; }
|
||||
uint Size { get; }
|
||||
uint CompressedSize { get; }
|
||||
bool IsDirectory { get; }
|
||||
DateTime? LastModifiedTime { get; }
|
||||
CompressionType CompressionType { get; }
|
||||
|
||||
void WriteToFile(string fullPath, ExtractionOptions options = null);
|
||||
void WriteToStream(Stream destinationStream);
|
||||
Stream OpenEntryStream();
|
||||
|
||||
// ... async variants
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Adding Support for a New Format
|
||||
|
||||
### Step 1: Understand the Format
|
||||
- Research format specification
|
||||
- Understand compression/encryption used
|
||||
- Study existing similar formats in codebase
|
||||
|
||||
### Step 2: Create Format Structure Classes
|
||||
|
||||
**Create:** `src/SharpCompress/Common/NewFormat/`
|
||||
|
||||
```csharp
|
||||
// Headers and data structures
|
||||
public class NewFormatHeader
|
||||
{
|
||||
public uint Magic { get; set; }
|
||||
public ushort Version { get; set; }
|
||||
// ... other fields
|
||||
|
||||
public static NewFormatHeader Read(BinaryReader reader)
|
||||
{
|
||||
// Deserialize from binary
|
||||
}
|
||||
}
|
||||
|
||||
public class NewFormatEntry
|
||||
{
|
||||
public string FileName { get; set; }
|
||||
public uint CompressedSize { get; set; }
|
||||
public uint UncompressedSize { get; set; }
|
||||
// ... other fields
|
||||
}
|
||||
```
|
||||
|
||||
### Step 3: Create Archive Implementation
|
||||
|
||||
**Create:** `src/SharpCompress/Archives/NewFormat/NewFormatArchive.cs`
|
||||
|
||||
```csharp
|
||||
public class NewFormatArchive : AbstractArchive
|
||||
{
|
||||
private NewFormatHeader _header;
|
||||
private List<NewFormatEntry> _entries;
|
||||
|
||||
public static NewFormatArchive OpenArchive(Stream stream)
|
||||
{
|
||||
var archive = new NewFormatArchive();
|
||||
archive._header = NewFormatHeader.Read(stream);
|
||||
archive.LoadEntries(stream);
|
||||
return archive;
|
||||
}
|
||||
|
||||
public override IEnumerable<IEntry> Entries => _entries.Select(e => new Entry(e));
|
||||
|
||||
protected override Stream OpenEntryStream(Entry entry)
|
||||
{
|
||||
// Return decompressed stream for entry
|
||||
}
|
||||
|
||||
// ... other abstract method implementations
|
||||
}
|
||||
```
|
||||
|
||||
### Step 4: Create Reader Implementation
|
||||
|
||||
**Create:** `src/SharpCompress/Readers/NewFormat/NewFormatReader.cs`
|
||||
|
||||
```csharp
|
||||
public class NewFormatReader : AbstractReader
|
||||
{
|
||||
private NewFormatHeader _header;
|
||||
private BinaryReader _reader;
|
||||
|
||||
public NewFormatReader(Stream stream)
|
||||
{
|
||||
_reader = new BinaryReader(stream);
|
||||
_header = NewFormatHeader.Read(_reader);
|
||||
}
|
||||
|
||||
public override bool MoveToNextEntry()
|
||||
{
|
||||
// Read next entry header
|
||||
if (!_reader.BaseStream.CanRead) return false;
|
||||
|
||||
var entryData = NewFormatEntry.Read(_reader);
|
||||
// ... set this.Entry
|
||||
return entryData != null;
|
||||
}
|
||||
|
||||
// ... other abstract method implementations
|
||||
}
|
||||
```
|
||||
|
||||
### Step 5: Create Factory
|
||||
|
||||
**Create:** `src/SharpCompress/Factories/NewFormatFactory.cs`
|
||||
|
||||
```csharp
|
||||
public class NewFormatFactory : Factory, IArchiveFactory, IReaderFactory
|
||||
{
|
||||
// Archive format magic bytes (signature)
|
||||
private static readonly byte[] NewFormatSignature = new byte[] { 0x4E, 0x46 }; // "NF"
|
||||
|
||||
public static NewFormatFactory Instance { get; } = new();
|
||||
|
||||
public IArchive CreateArchive(Stream stream)
|
||||
=> NewFormatArchive.OpenArchive(stream);
|
||||
|
||||
public IReader CreateReader(Stream stream, ReaderOptions options)
|
||||
=> new NewFormatReader(stream) { Options = options };
|
||||
|
||||
public bool Matches(Stream stream, ReadOnlySpan<byte> signature)
|
||||
=> signature.StartsWith(NewFormatSignature);
|
||||
}
|
||||
```
|
||||
|
||||
### Step 6: Register Factory
|
||||
|
||||
**Update:** `src/SharpCompress/Factories/ArchiveFactory.cs`
|
||||
|
||||
```csharp
|
||||
private static readonly IFactory[] Factories =
|
||||
{
|
||||
ZipFactory.Instance,
|
||||
TarFactory.Instance,
|
||||
RarFactory.Instance,
|
||||
SevenZipFactory.Instance,
|
||||
GZipFactory.Instance,
|
||||
NewFormatFactory.Instance, // Add here
|
||||
// ... other factories
|
||||
};
|
||||
```
|
||||
|
||||
### Step 7: Add Tests
|
||||
|
||||
**Create:** `tests/SharpCompress.Test/NewFormat/NewFormatTests.cs`
|
||||
|
||||
```csharp
|
||||
public class NewFormatTests : TestBase
|
||||
{
|
||||
[Fact]
|
||||
public void NewFormat_Extracts_Successfully()
|
||||
{
|
||||
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "archive.newformat");
|
||||
using (var archive = NewFormatArchive.OpenArchive(archivePath))
|
||||
{
|
||||
archive.WriteToDirectory(SCRATCH_FILES_PATH);
|
||||
// Assert extraction
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NewFormat_Reader_Works()
|
||||
{
|
||||
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "archive.newformat");
|
||||
using (var stream = File.OpenRead(archivePath))
|
||||
using (var reader = new NewFormatReader(stream))
|
||||
{
|
||||
Assert.True(reader.MoveToNextEntry());
|
||||
Assert.NotNull(reader.Entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Step 8: Add Test Archives
|
||||
|
||||
Place test files in `tests/TestArchives/Archives/NewFormat/` directory.
|
||||
|
||||
### Step 9: Document
|
||||
|
||||
Update `docs/FORMATS.md` with format support information.
|
||||
|
||||
---
|
||||
|
||||
## Compression Algorithm Implementation
|
||||
|
||||
### Creating a New Compression Stream
|
||||
|
||||
**Example:** Creating `CustomStream` for a custom compression algorithm
|
||||
|
||||
```csharp
|
||||
public class CustomStream : Stream
|
||||
{
|
||||
private readonly Stream _baseStream;
|
||||
private readonly bool _leaveOpen;
|
||||
|
||||
public CustomStream(Stream baseStream, bool leaveOpen = false)
|
||||
{
|
||||
_baseStream = baseStream;
|
||||
_leaveOpen = leaveOpen;
|
||||
}
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
// Decompress data from _baseStream into buffer
|
||||
// Return number of decompressed bytes
|
||||
}
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
// Compress data from buffer into _baseStream
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (disposing && !_leaveOpen)
|
||||
{
|
||||
_baseStream?.Dispose();
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Stream Handling Best Practices
|
||||
|
||||
### Disposal Pattern
|
||||
|
||||
```csharp
|
||||
// Correct: Nested using blocks
|
||||
using (var fileStream = File.OpenRead("archive.zip"))
|
||||
using (var archive = ZipArchive.OpenArchive(fileStream))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
// Both archive and fileStream properly disposed
|
||||
|
||||
// Correct: Using with options
|
||||
var options = new ReaderOptions { LeaveStreamOpen = true };
|
||||
var stream = File.OpenRead("archive.zip");
|
||||
using (var archive = ZipArchive.OpenArchive(stream, options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
stream.Dispose(); // Manually dispose if LeaveStreamOpen = true
|
||||
```
|
||||
|
||||
### NonDisposingStream Wrapper
|
||||
|
||||
```csharp
|
||||
// Prevent unwanted stream closure
|
||||
var baseStream = File.OpenRead("data.bin");
|
||||
var nonDisposing = new NonDisposingStream(baseStream);
|
||||
|
||||
using (var compressor = new DeflateStream(nonDisposing))
|
||||
{
|
||||
// Compressor won't close baseStream when disposed
|
||||
}
|
||||
|
||||
// baseStream still usable
|
||||
baseStream.Position = 0; // Works
|
||||
baseStream.Dispose(); // Manual disposal
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Memory Efficiency
|
||||
|
||||
1. **Avoid loading entire archive in memory** - Use Reader API for large files
|
||||
2. **Process entries sequentially** - Especially for solid archives
|
||||
3. **Use appropriate buffer sizes** - Larger buffers for network I/O
|
||||
4. **Dispose streams promptly** - Free resources when done
|
||||
|
||||
### Algorithm Selection
|
||||
|
||||
1. **Archive API** - Fast for small archives with random access
|
||||
2. **Reader API** - Efficient for large files or streaming
|
||||
3. **Solid archives** - Sequential extraction much faster
|
||||
4. **Compression levels** - Trade-off between speed and size
|
||||
|
||||
---
|
||||
|
||||
## Testing Guidelines
|
||||
|
||||
### Test Coverage
|
||||
|
||||
1. **Happy path** - Normal extraction works
|
||||
2. **Edge cases** - Empty archives, single file, many files
|
||||
3. **Corrupted data** - Handle gracefully
|
||||
4. **Error cases** - Missing passwords, unsupported compression
|
||||
5. **Async operations** - Both sync and async code paths
|
||||
|
||||
### Test Archives
|
||||
|
||||
- Use `tests/TestArchives/` for test data
|
||||
- Create format-specific subdirectories
|
||||
- Include encrypted, corrupted, and edge case archives
|
||||
- Don't recreate existing archives
|
||||
|
||||
### Test Patterns
|
||||
|
||||
```csharp
|
||||
[Fact]
|
||||
public void Archive_Extraction_Works()
|
||||
{
|
||||
// Arrange
|
||||
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "test.zip");
|
||||
|
||||
// Act
|
||||
using (var archive = ZipArchive.OpenArchive(testArchive))
|
||||
{
|
||||
archive.WriteToDirectory(SCRATCH_FILES_PATH);
|
||||
}
|
||||
|
||||
// Assert
|
||||
Assert.True(File.Exists(Path.Combine(SCRATCH_FILES_PATH, "file.txt")));
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [AGENTS.md](../AGENTS.md) - Development guidelines
|
||||
- [FORMATS.md](FORMATS.md) - Supported formats
|
||||
610
docs/ENCODING.md
Normal file
610
docs/ENCODING.md
Normal file
@@ -0,0 +1,610 @@
|
||||
# SharpCompress Character Encoding Guide
|
||||
|
||||
This guide explains how SharpCompress handles character encoding for archive entries (filenames, comments, etc.).
|
||||
|
||||
## Overview
|
||||
|
||||
Most archive formats store filenames and metadata as bytes. SharpCompress must convert these bytes to strings using the appropriate character encoding.
|
||||
|
||||
**Common Problem:** Archives created on systems with non-UTF8 encodings (especially Japanese, Chinese systems) appear with corrupted filenames when extracted on systems that assume UTF8.
|
||||
|
||||
---
|
||||
|
||||
## ArchiveEncoding Class
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```csharp
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
// Configure encoding before opening archive
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932) // cp932 for Japanese
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("japanese.zip", options))
|
||||
{
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
Console.WriteLine(entry.Key); // Now shows correct characters
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### ArchiveEncoding Properties
|
||||
|
||||
| Property | Purpose |
|
||||
|----------|---------|
|
||||
| `Default` | Default encoding for filenames (fallback) |
|
||||
| `CustomDecoder` | Custom decoding function for special cases |
|
||||
|
||||
### Setting for Different APIs
|
||||
|
||||
**Archive API:**
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip", options))
|
||||
{
|
||||
// Use archive with correct encoding
|
||||
}
|
||||
```
|
||||
|
||||
**Reader API:**
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
using (var stream = File.OpenRead("file.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream, options))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
// Filenames decoded correctly
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Encodings
|
||||
|
||||
### Asian Encodings
|
||||
|
||||
#### cp932 (Japanese)
|
||||
```csharp
|
||||
// Windows-31J, Shift-JIS variant used on Japanese Windows
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932)
|
||||
}
|
||||
};
|
||||
using (var archive = ZipArchive.OpenArchive("japanese.zip", options))
|
||||
{
|
||||
// Correctly decodes Japanese filenames
|
||||
}
|
||||
```
|
||||
|
||||
**When to use:**
|
||||
- Archives from Japanese Windows systems
|
||||
- Files with Japanese characters in names
|
||||
|
||||
#### gb2312 (Simplified Chinese)
|
||||
```csharp
|
||||
// Simplified Chinese
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("gb2312")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### gbk (Extended Simplified Chinese)
|
||||
```csharp
|
||||
// Extended Simplified Chinese (more characters than gb2312)
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("gbk")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### big5 (Traditional Chinese)
|
||||
```csharp
|
||||
// Traditional Chinese (Taiwan, Hong Kong)
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("big5")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### euc-jp (Japanese, Unix)
|
||||
```csharp
|
||||
// Extended Unix Code for Japanese
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("eucjp")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### euc-kr (Korean)
|
||||
```csharp
|
||||
// Extended Unix Code for Korean
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("euc-kr")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Western European Encodings
|
||||
|
||||
#### iso-8859-1 (Latin-1)
|
||||
```csharp
|
||||
// Western European (includes accented characters)
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("iso-8859-1")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
**When to use:**
|
||||
- Archives from French, German, Spanish systems
|
||||
- Files with accented characters (é, ñ, ü, etc.)
|
||||
|
||||
#### cp1252 (Windows-1252)
|
||||
```csharp
|
||||
// Windows Western European
|
||||
// Very similar to iso-8859-1 but with additional printable characters
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("cp1252")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
**When to use:**
|
||||
- Archives from older Western European Windows systems
|
||||
- Files with smart quotes and other Windows-specific characters
|
||||
|
||||
#### iso-8859-15 (Latin-9)
|
||||
```csharp
|
||||
// Western European with Euro symbol support
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("iso-8859-15")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Cyrillic Encodings
|
||||
|
||||
#### cp1251 (Windows Cyrillic)
|
||||
```csharp
|
||||
// Russian, Serbian, Bulgarian, etc.
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("cp1251")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### koi8-r (KOI8 Russian)
|
||||
```csharp
|
||||
// Russian (Unix standard)
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("koi8-r")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### UTF Encodings (Modern)
|
||||
|
||||
#### UTF-8 (Default)
|
||||
```csharp
|
||||
// Modern standard - usually correct for new archives
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.UTF8
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### UTF-16
|
||||
```csharp
|
||||
// Unicode - rarely used in archives
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.Unicode
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Encoding Auto-Detection
|
||||
|
||||
SharpCompress attempts to auto-detect encoding, but this isn't always reliable:
|
||||
|
||||
```csharp
|
||||
// Auto-detection (default)
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip")) // Uses UTF8 by default
|
||||
{
|
||||
// May show corrupted characters if archive uses different encoding
|
||||
}
|
||||
|
||||
// Explicit encoding (more reliable)
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip", options))
|
||||
{
|
||||
// Correct characters displayed
|
||||
}
|
||||
```
|
||||
|
||||
### When Manual Override is Needed
|
||||
|
||||
| Situation | Solution |
|
||||
|-----------|----------|
|
||||
| Archive shows corrupted characters | Specify the encoding explicitly |
|
||||
| Archives from specific region | Use that region's encoding |
|
||||
| Mixed encodings in archive | Use CustomDecoder |
|
||||
| Testing with international files | Try different encodings |
|
||||
|
||||
---
|
||||
|
||||
## Custom Decoder
|
||||
|
||||
For complex scenarios where a single encoding isn't sufficient:
|
||||
|
||||
### Basic Custom Decoder
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
CustomDecoder = (data, offset, length) =>
|
||||
{
|
||||
// Custom decoding logic
|
||||
var bytes = new byte[length];
|
||||
Array.Copy(data, offset, bytes, 0, length);
|
||||
|
||||
// Try UTF8 first
|
||||
try
|
||||
{
|
||||
return Encoding.UTF8.GetString(bytes);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Fallback to cp932 if UTF8 fails
|
||||
return Encoding.GetEncoding(932).GetString(bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("mixed.zip", options))
|
||||
{
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
Console.WriteLine(entry.Key); // Uses custom decoder
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced: Detect Encoding by Content
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
CustomDecoder = DetectAndDecode
|
||||
}
|
||||
};
|
||||
|
||||
private static string DetectAndDecode(byte[] data, int offset, int length)
|
||||
{
|
||||
var bytes = new byte[length];
|
||||
Array.Copy(data, offset, bytes, 0, length);
|
||||
|
||||
// Try UTF8 (most modern archives)
|
||||
try
|
||||
{
|
||||
var str = Encoding.UTF8.GetString(bytes);
|
||||
// Verify it decoded correctly (no replacement characters)
|
||||
if (!str.Contains('\uFFFD'))
|
||||
return str;
|
||||
}
|
||||
catch { }
|
||||
|
||||
// Try cp932 (Japanese)
|
||||
try
|
||||
{
|
||||
var str = Encoding.GetEncoding(932).GetString(bytes);
|
||||
if (!str.Contains('\uFFFD'))
|
||||
return str;
|
||||
}
|
||||
catch { }
|
||||
|
||||
// Fallback to iso-8859-1 (always succeeds)
|
||||
return Encoding.GetEncoding("iso-8859-1").GetString(bytes);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Code Examples
|
||||
|
||||
### Extract Archive with Japanese Filenames
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932) // cp932
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("japanese_files.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
// Files extracted with correct Japanese names
|
||||
```
|
||||
|
||||
### Extract Archive with Western European Filenames
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("iso-8859-1")
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("french_files.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
// Accented characters (é, è, ê, etc.) display correctly
|
||||
```
|
||||
|
||||
### Extract Archive with Chinese Filenames
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("gbk") // Simplified Chinese
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("chinese_files.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
```
|
||||
|
||||
### Extract Archive with Russian Filenames
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("cp1251") // Windows Cyrillic
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("russian_files.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
```
|
||||
|
||||
### Reader API with Encoding
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932)
|
||||
}
|
||||
};
|
||||
|
||||
using (var stream = File.OpenRead("japanese.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream, options))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
Console.WriteLine(reader.Entry.Key); // Correct characters
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Creating Archives with Correct Encoding
|
||||
|
||||
When creating archives, SharpCompress uses UTF8 by default (recommended):
|
||||
|
||||
```csharp
|
||||
// Create with UTF8 (default, recommended)
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory(@"D:\my_files");
|
||||
archive.SaveTo("output.zip", CompressionType.Deflate);
|
||||
// Archives created with UTF8 encoding
|
||||
}
|
||||
```
|
||||
|
||||
If you need to create archives for systems that expect specific encodings:
|
||||
|
||||
```csharp
|
||||
// Note: SharpCompress Writer API uses UTF8 for encoding
|
||||
// To create archives with other encodings, consider:
|
||||
// 1. Let users on those systems create archives
|
||||
// 2. Use system tools (7-Zip, WinRAR) with desired encoding
|
||||
// 3. Post-process archives if absolutely necessary
|
||||
|
||||
// For now, recommend modern UTF8-based archives
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting Encoding Issues
|
||||
|
||||
### Filenames Show Question Marks (?)
|
||||
|
||||
```
|
||||
✗ Wrong encoding detected
|
||||
test文件.txt → test???.txt
|
||||
```
|
||||
|
||||
**Solution:** Specify correct encoding explicitly
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("gbk") // Try different encodings
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Filenames Show Replacement Character ()
|
||||
|
||||
```
|
||||
✗ Invalid bytes for selected encoding
|
||||
café.txt → caf.txt
|
||||
```
|
||||
|
||||
**Solution:**
|
||||
1. Try a different encoding (see Common Encodings table)
|
||||
2. Use CustomDecoder with fallback encoding
|
||||
3. Archive might be corrupted
|
||||
|
||||
### Mixed Encodings in Single Archive
|
||||
|
||||
```csharp
|
||||
// Use CustomDecoder to handle mixed encodings
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
CustomDecoder = (data, offset, length) =>
|
||||
{
|
||||
// Try multiple encodings in priority order
|
||||
var bytes = new byte[length];
|
||||
Array.Copy(data, offset, bytes, 0, length);
|
||||
|
||||
foreach (var encoding in new[]
|
||||
{
|
||||
Encoding.UTF8,
|
||||
Encoding.GetEncoding(932),
|
||||
Encoding.GetEncoding("iso-8859-1")
|
||||
})
|
||||
{
|
||||
try
|
||||
{
|
||||
var str = encoding.GetString(bytes);
|
||||
if (!str.Contains('\uFFFD'))
|
||||
return str;
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
// Final fallback
|
||||
return Encoding.GetEncoding("iso-8859-1").GetString(bytes);
|
||||
}
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Encoding Reference Table
|
||||
|
||||
| Encoding | Code | Use Case |
|
||||
|----------|------|----------|
|
||||
| UTF-8 | (default) | Modern archives, recommended |
|
||||
| cp932 | 932 | Japanese Windows |
|
||||
| gb2312 | "gb2312" | Simplified Chinese |
|
||||
| gbk | "gbk" | Extended Simplified Chinese |
|
||||
| big5 | "big5" | Traditional Chinese |
|
||||
| iso-8859-1 | "iso-8859-1" | Western European |
|
||||
| cp1252 | "cp1252" | Windows Western European |
|
||||
| cp1251 | "cp1251" | Russian/Cyrillic |
|
||||
| euc-jp | "euc-jp" | Japanese Unix |
|
||||
| euc-kr | "euc-kr" | Korean |
|
||||
|
||||
---
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use UTF-8 for new archives** - Most modern systems support it
|
||||
2. **Ask the archive creator** - When receiving archives with corrupted names
|
||||
3. **Provide encoding options** - If your app handles user archives
|
||||
4. **Document your assumption** - Tell users what encoding you're using
|
||||
5. **Test with international files** - Before releasing production code
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [USAGE.md](USAGE.md#extract-zip-which-has-non-utf8-encoded-filenamycp932) - Usage examples
|
||||
@@ -10,7 +10,10 @@
|
||||
|
||||
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
|
||||
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
|
||||
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
|
||||
| Ace | None | Decompress | N/A | AceReader | N/A |
|
||||
| Arc | None, Packed, Squeezed, Crunched | Decompress | N/A | ArcReader | N/A |
|
||||
| Arj | None | Decompress | N/A | ArjReader | N/A |
|
||||
| Rar | Rar | Decompress | RarArchive | RarReader | N/A |
|
||||
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
|
||||
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
@@ -22,11 +25,28 @@
|
||||
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
|
||||
|
||||
1. SOLID Rars are only supported in the RarReader API.
|
||||
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.
|
||||
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading. See [Zip Format Notes](#zip-format-notes) for details on multi-volume archives and streaming behavior.
|
||||
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
|
||||
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
|
||||
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API. See [7Zip Format Notes](#7zip-format-notes) for details on async extraction behavior.
|
||||
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
|
||||
|
||||
### Zip Format Notes
|
||||
|
||||
- Multi-volume/split ZIP archives require ZipArchive (seekable streams) as ZipReader cannot seek across volume files.
|
||||
- ZipReader processes entries from LocalEntry headers (which include directory entries ending with `/`) and intentionally skips DirectoryEntry headers from the central directory, as they are redundant in streaming mode - all entry data comes from LocalEntry headers which ZipReader has already processed.
|
||||
|
||||
### 7Zip Format Notes
|
||||
|
||||
- **Async Extraction Performance**: When using async extraction methods (e.g., `ExtractAllEntries()` with `MoveToNextEntryAsync()`), each file creates its own decompression stream to avoid state corruption in the LZMA decoder. This is less efficient than synchronous extraction, which can reuse a single decompression stream for multiple files in the same folder.
|
||||
|
||||
**Performance Impact**: For archives with many small files in the same compression folder, async extraction will be slower than synchronous extraction because it must:
|
||||
1. Create a new LZMA decoder for each file
|
||||
2. Skip through the decompressed data to reach each file's starting position
|
||||
|
||||
**Recommendation**: For best performance with 7Zip archives, use synchronous extraction methods (`MoveToNextEntry()` and `WriteEntryToDirectory()`) when possible. Use async methods only when you need to avoid blocking the thread (e.g., in UI applications or async-only contexts).
|
||||
|
||||
**Technical Details**: 7Zip archives group files into "folders" (compression units), where all files in a folder share one continuous LZMA-compressed stream. The LZMA decoder maintains internal state (dictionary window, decoder positions) that assumes sequential, non-interruptible processing. Async operations can yield control during awaits, which would corrupt this shared state. To avoid this, async extraction creates a fresh decoder stream for each file.
|
||||
|
||||
## Compression Streams
|
||||
|
||||
For those who want to directly compress/decompress bits. The single file formats are represented here as well. However, BZip2, LZip and XZ have no metadata (GZip has a little) so using them without something like a Tar file makes little sense.
|
||||
142
docs/OLD_CHANGELOG.md
Normal file
142
docs/OLD_CHANGELOG.md
Normal file
@@ -0,0 +1,142 @@
|
||||
|
||||
# Version Log
|
||||
|
||||
* [Releases](https://github.com/adamhathcock/sharpcompress/releases)
|
||||
|
||||
## Version 0.18
|
||||
|
||||
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)
|
||||
|
||||
## Version 0.17.1
|
||||
|
||||
* Fix - [Bug Fix for .NET Core on Windows](https://github.com/adamhathcock/sharpcompress/pull/257)
|
||||
|
||||
## Version 0.17.0
|
||||
|
||||
* New - Full LZip support! Can read and write LZip files and Tars inside LZip files. [Make LZip a first class citizen. #241](https://github.com/adamhathcock/sharpcompress/issues/241)
|
||||
* New - XZ read support! Can read XZ files and Tars inside XZ files. [XZ in SharpCompress #91](https://github.com/adamhathcock/sharpcompress/issues/94)
|
||||
* Fix - [Regression - zip file writing on seekable streams always assumed stream start was 0. Introduced with Zip64 writing.](https://github.com/adamhathcock/sharpcompress/issues/244)
|
||||
* Fix - [Zip files with post-data descriptors can be properly skipped via decompression](https://github.com/adamhathcock/sharpcompress/issues/162)
|
||||
|
||||
## Version 0.16.2
|
||||
|
||||
* Fix [.NET 3.5 should support files and cryptography (was a regression from 0.16.0)](https://github.com/adamhathcock/sharpcompress/pull/251)
|
||||
* Fix [Zip per entry compression customization wrote the wrong method into the zip archive](https://github.com/adamhathcock/sharpcompress/pull/249)
|
||||
|
||||
## Version 0.16.1
|
||||
|
||||
* Fix [Preserve compression method when getting a compressed stream](https://github.com/adamhathcock/sharpcompress/pull/235)
|
||||
* Fix [RAR entry key normalization fix](https://github.com/adamhathcock/sharpcompress/issues/201)
|
||||
|
||||
## Version 0.16.0
|
||||
|
||||
* Breaking - [Progress Event Tracking rethink](https://github.com/adamhathcock/sharpcompress/pull/226)
|
||||
* Update to VS2017 - [VS2017](https://github.com/adamhathcock/sharpcompress/pull/231) - Framework targets have been changed.
|
||||
* New - [Add Zip64 writing](https://github.com/adamhathcock/sharpcompress/pull/211)
|
||||
* [Fix invalid/mismatching Zip version flags.](https://github.com/adamhathcock/sharpcompress/issues/164) - This allows nuget/System.IO.Packaging to read zip files generated by SharpCompress
|
||||
* [Fix 7Zip directory hiding](https://github.com/adamhathcock/sharpcompress/pull/215/files)
|
||||
* [Verify RAR CRC headers](https://github.com/adamhathcock/sharpcompress/pull/220)
|
||||
|
||||
## Version 0.15.2
|
||||
|
||||
* [Fix invalid headers](https://github.com/adamhathcock/sharpcompress/pull/210) - fixes an issue creating large-ish zip archives that was introduced with zip64 reading.
|
||||
|
||||
## Version 0.15.1
|
||||
|
||||
* [Zip64 extending information and ZipReader](https://github.com/adamhathcock/sharpcompress/pull/206)
|
||||
|
||||
## Version 0.15.0
|
||||
|
||||
* [Add zip64 support for ZipArchive extraction](https://github.com/adamhathcock/sharpcompress/pull/205)
|
||||
|
||||
## Version 0.14.1
|
||||
|
||||
* [.NET Assemblies aren't strong named](https://github.com/adamhathcock/sharpcompress/issues/158)
|
||||
* [Pkware encryption for Zip files didn't allow for multiple reads of an entry](https://github.com/adamhathcock/sharpcompress/issues/197)
|
||||
* [GZip Entry couldn't be read multiple times](https://github.com/adamhathcock/sharpcompress/issues/198)
|
||||
|
||||
## Version 0.14.0
|
||||
|
||||
* [Support for LZip reading in for Tars](https://github.com/adamhathcock/sharpcompress/pull/191)
|
||||
|
||||
## Version 0.13.1
|
||||
|
||||
* [Fix null password on ReaderFactory. Fix null options on SevenZipArchive](https://github.com/adamhathcock/sharpcompress/pull/188)
|
||||
* [Make PpmdProperties lazy to avoid unnecessary allocations.](https://github.com/adamhathcock/sharpcompress/pull/185)
|
||||
|
||||
## Version 0.13.0
|
||||
|
||||
* Breaking change: Big refactor of Options on API.
|
||||
* 7Zip supports Deflate
|
||||
|
||||
## Version 0.12.4
|
||||
|
||||
* Forward only zip issue fix https://github.com/adamhathcock/sharpcompress/issues/160
|
||||
* Try to fix frameworks again by copying targets from JSON.NET
|
||||
|
||||
## Version 0.12.3
|
||||
|
||||
* 7Zip fixes https://github.com/adamhathcock/sharpcompress/issues/73
|
||||
* Maybe all profiles will work with project.json now
|
||||
|
||||
## Version 0.12.2
|
||||
|
||||
* Support Profile 259 again
|
||||
|
||||
## Version 0.12.1
|
||||
|
||||
* Support Silverlight 5
|
||||
|
||||
## Version 0.12.0
|
||||
|
||||
* .NET Core RTM!
|
||||
* Bug fix for Tar long paths
|
||||
|
||||
## Version 0.11.6
|
||||
|
||||
* Bug fix for global header in Tar
|
||||
* Writers now have a leaveOpen `bool` overload. They won't close streams if not-requested to.
|
||||
|
||||
## Version 0.11.5
|
||||
|
||||
* Bug fix in Skip method
|
||||
|
||||
## Version 0.11.4
|
||||
|
||||
* SharpCompress is now endian neutral (matters for Mono platforms)
|
||||
* Fix for Inflate (need to change implementation)
|
||||
* Fixes for RAR detection
|
||||
|
||||
## Version 0.11.1
|
||||
|
||||
* Added Cancel on IReader
|
||||
* Removed .NET 2.0 support and LinqBridge dependency
|
||||
|
||||
## Version 0.11
|
||||
|
||||
* Been over a year, contains mainly fixes from contributors!
|
||||
* Possible breaking change: ArchiveEncoding is UTF8 by default now.
|
||||
* TAR supports writing long names using longlink
|
||||
* RAR Protect Header added
|
||||
|
||||
## Version 0.10.3
|
||||
|
||||
* Finally fixed Disposal issue when creating a new archive with the Archive API
|
||||
|
||||
## Version 0.10.2
|
||||
|
||||
* Fixed Rar Header reading for invalid extended time headers.
|
||||
* Windows Store assembly is now strong named
|
||||
* Known issues with Long Tar names being worked on
|
||||
* Updated to VS2013
|
||||
* Portable targets SL5 and Windows Phone 8 (up from SL4 and WP7)
|
||||
|
||||
## Version 0.10.1
|
||||
|
||||
* Fixed 7Zip extraction performance problem
|
||||
|
||||
## Version 0.10:
|
||||
|
||||
* Added support for RAR Decryption (thanks to https://github.com/hrasyid)
|
||||
* Embedded some BouncyCastle crypto classes to allow RAR Decryption and Winzip AES Decryption in Portable and Windows Store DLLs
|
||||
* Built in Release (I think)
|
||||
474
docs/PERFORMANCE.md
Normal file
474
docs/PERFORMANCE.md
Normal file
@@ -0,0 +1,474 @@
|
||||
# SharpCompress Performance Guide
|
||||
|
||||
This guide helps you optimize SharpCompress for performance in various scenarios.
|
||||
|
||||
## API Selection Guide
|
||||
|
||||
### Archive API vs Reader API
|
||||
|
||||
Choose the right API based on your use case:
|
||||
|
||||
| Aspect | Archive API | Reader API |
|
||||
|--------|------------|-----------|
|
||||
| **Stream Type** | Seekable only | Non-seekable OK |
|
||||
| **Memory Usage** | All entries in memory | One entry at a time |
|
||||
| **Random Access** | ✓ Yes | ✗ No |
|
||||
| **Best For** | Small-to-medium archives | Large or streaming data |
|
||||
| **Performance** | Fast for random access | Better for large files |
|
||||
|
||||
### Archive API (Fast for Random Access)
|
||||
|
||||
```csharp
|
||||
// Use when:
|
||||
// - Archive fits in memory
|
||||
// - You need random access to entries
|
||||
// - Stream is seekable (file, MemoryStream)
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
// Random access - all entries available
|
||||
var specific = archive.Entries.FirstOrDefault(e => e.Key == "file.txt");
|
||||
if (specific != null)
|
||||
{
|
||||
specific.WriteToFile(@"C:\output\file.txt");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Performance Characteristics:**
|
||||
- ✓ Instant entry lookup
|
||||
- ✓ Parallel extraction possible
|
||||
- ✗ Entire archive in memory
|
||||
- ✗ Can't process while downloading
|
||||
|
||||
### Reader API (Best for Large Files)
|
||||
|
||||
```csharp
|
||||
// Use when:
|
||||
// - Processing large archives (>100 MB)
|
||||
// - Streaming from network/pipe
|
||||
// - Memory is constrained
|
||||
// - Forward-only processing is acceptable
|
||||
|
||||
using (var stream = File.OpenRead("large.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
// Process one entry at a time
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Performance Characteristics:**
|
||||
- ✓ Minimal memory footprint
|
||||
- ✓ Works with non-seekable streams
|
||||
- ✓ Can process while downloading
|
||||
- ✗ Forward-only (no random access)
|
||||
- ✗ Entry lookup requires iteration
|
||||
|
||||
---
|
||||
|
||||
## Buffer Sizing
|
||||
|
||||
### Understanding Buffers
|
||||
|
||||
SharpCompress uses internal buffers for reading compressed data. Buffer size affects:
|
||||
- **Speed:** Larger buffers = fewer I/O operations = faster
|
||||
- **Memory:** Larger buffers = higher memory usage
|
||||
|
||||
### Recommended Buffer Sizes
|
||||
|
||||
| Scenario | Size | Notes |
|
||||
|----------|------|-------|
|
||||
| Embedded/IoT devices | 4-8 KB | Minimal memory usage |
|
||||
| Memory-constrained | 16-32 KB | Conservative default |
|
||||
| Standard use (default) | 64 KB | Recommended default |
|
||||
| Large file streaming | 256 KB | Better throughput |
|
||||
| High-speed SSD | 512 KB - 1 MB | Maximum throughput |
|
||||
|
||||
### How Buffer Size Affects Performance
|
||||
|
||||
```csharp
|
||||
// SharpCompress manages buffers internally
|
||||
// You can't directly set buffer size, but you can:
|
||||
|
||||
// 1. Use Stream.CopyTo with explicit buffer size
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
using (var fileStream = File.Create(@"C:\output\file.txt"))
|
||||
{
|
||||
// 64 KB buffer (default)
|
||||
entryStream.CopyTo(fileStream);
|
||||
|
||||
// Or specify larger buffer for faster copy
|
||||
entryStream.CopyTo(fileStream, bufferSize: 262144); // 256 KB
|
||||
}
|
||||
|
||||
// 2. Use custom buffer for writing
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
using (var fileStream = File.Create(@"C:\output\file.txt"))
|
||||
{
|
||||
byte[] buffer = new byte[262144]; // 256 KB
|
||||
int bytesRead;
|
||||
while ((bytesRead = entryStream.Read(buffer, 0, buffer.Length)) > 0)
|
||||
{
|
||||
fileStream.Write(buffer, 0, bytesRead);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Streaming Large Files
|
||||
|
||||
### Non-Seekable Stream Patterns
|
||||
|
||||
For processing archives from downloads or pipes:
|
||||
|
||||
```csharp
|
||||
// Download stream (non-seekable)
|
||||
using (var httpStream = await httpClient.GetStreamAsync(url))
|
||||
using (var reader = ReaderFactory.OpenReader(httpStream))
|
||||
{
|
||||
// Process entries as they arrive
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Performance Tips:**
|
||||
- Don't try to buffer the entire stream
|
||||
- Process entries immediately
|
||||
- Use async APIs for better responsiveness
|
||||
|
||||
### Download-Then-Extract vs Streaming
|
||||
|
||||
Choose based on your constraints:
|
||||
|
||||
| Approach | When to Use |
|
||||
|----------|------------|
|
||||
| **Download then extract** | Moderate size, need random access |
|
||||
| **Stream during download** | Large files, bandwidth limited, memory constrained |
|
||||
|
||||
```csharp
|
||||
// Download then extract (requires disk space)
|
||||
var archivePath = await DownloadFile(url, @"C:\temp\archive.zip");
|
||||
using (var archive = ZipArchive.OpenArchive(archivePath))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
|
||||
// Stream during download (on-the-fly extraction)
|
||||
using (var httpStream = await httpClient.GetStreamAsync(url))
|
||||
using (var reader = ReaderFactory.OpenReader(httpStream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Solid Archive Optimization
|
||||
|
||||
### Why Solid Archives Are Slow
|
||||
|
||||
Solid archives (Rar, 7Zip) group files together in a single compressed stream:
|
||||
|
||||
```
|
||||
Solid Archive Layout:
|
||||
[Header] [Compressed Stream] [Footer]
|
||||
├─ File1 compressed data
|
||||
├─ File2 compressed data
|
||||
├─ File3 compressed data
|
||||
└─ File4 compressed data
|
||||
```
|
||||
|
||||
Extracting File3 requires decompressing File1 and File2 first.
|
||||
|
||||
### Sequential vs Random Extraction
|
||||
|
||||
**Random Extraction (Slow):**
|
||||
```csharp
|
||||
using (var archive = RarArchive.OpenArchive("solid.rar"))
|
||||
{
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
entry.WriteToFile(@"C:\output\" + entry.Key); // ✗ Slow!
|
||||
// Each entry triggers full decompression from start
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Sequential Extraction (Fast):**
|
||||
```csharp
|
||||
using (var archive = RarArchive.OpenArchive("solid.rar"))
|
||||
{
|
||||
// Method 1: Use WriteToDirectory (recommended)
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
|
||||
// Method 2: Use ExtractAllEntries
|
||||
archive.ExtractAllEntries();
|
||||
|
||||
// Method 3: Use Reader API (also sequential)
|
||||
using (var reader = RarReader.Open(File.OpenRead("solid.rar")))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Performance Impact:**
|
||||
- Random extraction: O(n²) - very slow for many files
|
||||
- Sequential extraction: O(n) - 10-100x faster
|
||||
|
||||
### Best Practices for Solid Archives
|
||||
|
||||
1. **Always extract sequentially** when possible
|
||||
2. **Use Reader API** for large solid archives
|
||||
3. **Process entries in order** from the archive
|
||||
4. **Consider using 7Zip command-line** for scripted extractions
|
||||
|
||||
---
|
||||
|
||||
## Compression Level Trade-offs
|
||||
|
||||
### Deflate/GZip Levels
|
||||
|
||||
```csharp
|
||||
// Level 1 = Fastest, largest size
|
||||
// Level 6 = Default (balanced)
|
||||
// Level 9 = Slowest, best compression
|
||||
|
||||
// Write with different compression levels
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory(@"D:\data");
|
||||
|
||||
// Fast compression (level 1)
|
||||
archive.SaveTo("fast.zip", new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 1
|
||||
});
|
||||
|
||||
// Default compression (level 6)
|
||||
archive.SaveTo("default.zip", CompressionType.Deflate);
|
||||
|
||||
// Best compression (level 9)
|
||||
archive.SaveTo("best.zip", new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
**Speed vs Size:**
|
||||
| Level | Speed | Size | Use Case |
|
||||
|-------|-------|------|----------|
|
||||
| 1 | 10x | 90% | Network, streaming |
|
||||
| 6 | 1x | 75% | Default (good balance) |
|
||||
| 9 | 0.1x | 65% | Archival, static storage |
|
||||
|
||||
### BZip2 Block Size
|
||||
|
||||
```csharp
|
||||
// BZip2 block size affects memory and compression
|
||||
// 100K to 900K (default 900K)
|
||||
|
||||
// Smaller block size = lower memory, faster
|
||||
// Larger block size = better compression, slower
|
||||
|
||||
using (var archive = TarArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory(@"D:\data");
|
||||
|
||||
// These are preset in WriterOptions via CompressionLevel
|
||||
archive.SaveTo("archive.tar.bz2", CompressionType.BZip2);
|
||||
}
|
||||
```
|
||||
|
||||
### LZMA Settings
|
||||
|
||||
LZMA compression is very powerful but memory-intensive:
|
||||
|
||||
```csharp
|
||||
// LZMA (7Zip, .tar.lzma):
|
||||
// - Dictionary size: 16 KB to 1 GB (default 32 MB)
|
||||
// - Faster preset: smaller dictionary
|
||||
// - Better compression: larger dictionary
|
||||
|
||||
// Preset via CompressionType
|
||||
using (var archive = TarArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory(@"D:\data");
|
||||
archive.SaveTo("archive.tar.xz", CompressionType.LZMA); // Default settings
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Async Performance
|
||||
|
||||
### When Async Helps
|
||||
|
||||
Async is beneficial when:
|
||||
- **Long I/O operations** (network, slow disks)
|
||||
- **UI responsiveness** needed (Windows Forms, WPF, Blazor)
|
||||
- **Server applications** (ASP.NET, multiple concurrent operations)
|
||||
|
||||
```csharp
|
||||
// Async extraction (non-blocking)
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
// Thread can handle other work while I/O happens
|
||||
```
|
||||
|
||||
### When Async Doesn't Help
|
||||
|
||||
Async doesn't improve performance for:
|
||||
- **CPU-bound operations** (already fast)
|
||||
- **Local SSD I/O** (I/O is fast enough)
|
||||
- **Single-threaded scenarios** (no parallelism benefit)
|
||||
|
||||
```csharp
|
||||
// Sync extraction (simpler, same performance on fast I/O)
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
archive.WriteToDirectory(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
);
|
||||
}
|
||||
// Simple and fast - no async needed
|
||||
```
|
||||
|
||||
### Cancellation Pattern
|
||||
|
||||
```csharp
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
// Cancel after 5 minutes
|
||||
cts.CancelAfter(TimeSpan.FromMinutes(5));
|
||||
|
||||
try
|
||||
{
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cts.Token
|
||||
);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Console.WriteLine("Extraction cancelled");
|
||||
// Clean up partial extraction if needed
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Practical Performance Tips
|
||||
|
||||
### 1. Choose the Right API
|
||||
|
||||
| Scenario | API | Why |
|
||||
|----------|-----|-----|
|
||||
| Small archives | Archive | Faster random access |
|
||||
| Large archives | Reader | Lower memory |
|
||||
| Streaming | Reader | Works on non-seekable streams |
|
||||
| Download streams | Reader | Async extraction while downloading |
|
||||
|
||||
### 2. Batch Operations
|
||||
|
||||
```csharp
|
||||
// ✗ Slow - opens each archive separately
|
||||
foreach (var file in files)
|
||||
{
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
|
||||
// ✓ Better - process multiple entries at once
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Profile Your Code
|
||||
|
||||
```csharp
|
||||
var sw = Stopwatch.StartNew();
|
||||
using (var archive = ZipArchive.OpenArchive("large.zip"))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
Console.WriteLine($"Extraction took {sw.ElapsedMilliseconds}ms");
|
||||
|
||||
// Measure memory before/after
|
||||
var beforeMem = GC.GetTotalMemory(true);
|
||||
// ... do work ...
|
||||
var afterMem = GC.GetTotalMemory(true);
|
||||
Console.WriteLine($"Memory used: {(afterMem - beforeMem) / 1024 / 1024}MB");
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting Performance
|
||||
|
||||
### Extraction is Slow
|
||||
|
||||
1. **Check if solid archive** → Use sequential extraction
|
||||
2. **Check API** → Reader API might be faster for large files
|
||||
3. **Check compression level** → Higher levels are slower to decompress
|
||||
4. **Check I/O** → Network drives are much slower than SSD
|
||||
5. **Check buffer size** → May need larger buffers for network
|
||||
|
||||
### High Memory Usage
|
||||
|
||||
1. **Use Reader API** instead of Archive API
|
||||
2. **Process entries immediately** rather than buffering
|
||||
3. **Reduce compression level** if writing
|
||||
4. **Check for memory leaks** in your code
|
||||
|
||||
### CPU Usage at 100%
|
||||
|
||||
1. **Normal for compression** - especially with high compression levels
|
||||
2. **Consider lower level** for faster processing
|
||||
3. **Reduce parallelism** if processing multiple archives
|
||||
4. **Check if awaiting properly** in async code
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [PERFORMANCE.md](USAGE.md) - Usage examples with performance considerations
|
||||
- [FORMATS.md](FORMATS.md) - Format-specific performance notes
|
||||
335
docs/USAGE.md
Normal file
335
docs/USAGE.md
Normal file
@@ -0,0 +1,335 @@
|
||||
# SharpCompress Usage
|
||||
|
||||
## Async/Await Support (Beta)
|
||||
|
||||
SharpCompress now provides full async/await support for all I/O operations. All `Read`, `Write`, and extraction operations have async equivalents ending in `Async` that accept an optional `CancellationToken`. This enables better performance and scalability for I/O-bound operations.
|
||||
|
||||
**Key Async Methods:**
|
||||
- `reader.WriteEntryToAsync(stream, cancellationToken)` - Extract entry asynchronously
|
||||
- `reader.WriteAllToDirectoryAsync(path, options, cancellationToken)` - Extract all asynchronously
|
||||
- `writer.WriteAsync(filename, stream, modTime, cancellationToken)` - Write entry asynchronously
|
||||
- `writer.WriteAllAsync(directory, pattern, searchOption, cancellationToken)` - Write directory asynchronously
|
||||
- `entry.OpenEntryStreamAsync(cancellationToken)` - Open entry stream asynchronously
|
||||
|
||||
See [Async Examples](#async-examples) section below for usage patterns.
|
||||
|
||||
## Stream Rules
|
||||
|
||||
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
|
||||
|
||||
However, the .NET Framework often has classes that will dispose streams by default to make things "easy" like the following:
|
||||
|
||||
```C#
|
||||
using (var reader = new StreamReader(File.Open("foo")))
|
||||
{
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
In this example, reader should get disposed. However, stream rules should say the the `FileStream` created by `File.Open` should remain open. However, the .NET Framework closes it for you by default unless you override the constructor. In general, you should be writing Stream code like this:
|
||||
|
||||
```C#
|
||||
using (var fileStream = File.Open("foo"))
|
||||
using (var reader = new StreamReader(fileStream))
|
||||
{
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
To deal with the "correct" rules as well as the expectations of users, I've decided to always close wrapped streams as of 0.21.
|
||||
|
||||
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
|
||||
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
|
||||
## Samples
|
||||
|
||||
Also, look over the tests for more thorough [examples](https://github.com/adamhathcock/sharpcompress/tree/master/tests/SharpCompress.Test)
|
||||
|
||||
### Create Zip Archive from multiple files
|
||||
```C#
|
||||
using(var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddEntry("file01.txt", "C:\\file01.txt");
|
||||
archive.AddEntry("file02.txt", "C:\\file02.txt");
|
||||
...
|
||||
|
||||
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
|
||||
}
|
||||
```
|
||||
|
||||
### Create Zip Archive from all files in a directory to a file
|
||||
|
||||
```C#
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory("D:\\temp");
|
||||
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
|
||||
}
|
||||
```
|
||||
|
||||
### Create Zip Archive from all files in a directory and save in memory
|
||||
|
||||
```C#
|
||||
var memoryStream = new MemoryStream();
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory("D:\\temp");
|
||||
archive.SaveTo(memoryStream, new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
LeaveStreamOpen = true
|
||||
});
|
||||
}
|
||||
//reset memoryStream to be usable now
|
||||
memoryStream.Position = 0;
|
||||
```
|
||||
|
||||
### Extract all files from a rar file to a directory using RarArchive
|
||||
|
||||
Note: Extracting a solid rar or 7z file needs to be done in sequential order to get acceptable decompression speed.
|
||||
`ExtractAllEntries` is primarily intended for solid archives (like solid Rar) or 7Zip archives, where sequential extraction provides the best performance. For general/simple extraction with any supported archive type, use `archive.WriteToDirectory()` instead.
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.OpenArchive("Test.rar"))
|
||||
{
|
||||
// Simple extraction with RarArchive; this WriteToDirectory pattern works for all archive types
|
||||
archive.WriteToDirectory(@"D:\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Iterate over all files from a Rar file using RarArchive
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.OpenArchive("Test.rar"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
|
||||
{
|
||||
Console.WriteLine($"{entry.Key}: {entry.Size} bytes");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Extract solid Rar or 7Zip archives with progress reporting
|
||||
|
||||
`ExtractAllEntries` only works for solid archives (Rar) or 7Zip archives. For optimal performance with these archive types, use this method:
|
||||
|
||||
```C#
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
var progress = new Progress<ProgressReport>(report =>
|
||||
{
|
||||
Console.WriteLine($"Extracting {report.EntryPath}: {report.PercentComplete}%");
|
||||
});
|
||||
|
||||
using (var archive = RarArchive.OpenArchive("archive.rar", new ReaderOptions { Progress = progress })) // Must be solid Rar or 7Zip
|
||||
{
|
||||
archive.WriteToDirectory(@"D:\output", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Use ReaderFactory to autodetect archive type and Open the entry stream
|
||||
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
Console.WriteLine(reader.Entry.Key);
|
||||
reader.WriteEntryToDirectory(@"C:\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Use ReaderFactory to autodetect archive type and Open the entry stream
|
||||
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
{
|
||||
entryStream.CopyTo(...);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Use WriterFactory to write all files from a directory in a streaming manner.
|
||||
|
||||
```C#
|
||||
using (Stream stream = File.OpenWrite("C:\\temp.tgz"))
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)
|
||||
{
|
||||
LeaveOpenStream = true
|
||||
}))
|
||||
{
|
||||
writer.WriteAll("D:\\temp", "*", SearchOption.AllDirectories);
|
||||
}
|
||||
```
|
||||
|
||||
### Extract zip which has non-utf8 encoded filename(cp932)
|
||||
|
||||
```C#
|
||||
var opts = new SharpCompress.Readers.ReaderOptions();
|
||||
var encoding = Encoding.GetEncoding(932);
|
||||
opts.ArchiveEncoding = new SharpCompress.Common.ArchiveEncoding();
|
||||
opts.ArchiveEncoding.CustomDecoder = (data, x, y) =>
|
||||
{
|
||||
return encoding.GetString(data);
|
||||
};
|
||||
var tr = SharpCompress.Archives.Zip.ZipArchive.OpenArchive("test.zip", opts);
|
||||
foreach(var entry in tr.Entries)
|
||||
{
|
||||
Console.WriteLine($"{entry.Key}");
|
||||
}
|
||||
```
|
||||
|
||||
## Async Examples
|
||||
|
||||
### Async Reader Examples
|
||||
|
||||
**Extract single entry asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("archive.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
{
|
||||
using (var outputStream = File.Create("output.bin"))
|
||||
{
|
||||
await reader.WriteEntryToAsync(outputStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Extract all entries asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("archive.tar.gz"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"D:\temp",
|
||||
new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
},
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
**Open and process entry stream asynchronously:**
|
||||
```C#
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
|
||||
{
|
||||
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
|
||||
{
|
||||
// Process the decompressed stream asynchronously
|
||||
await ProcessStreamAsync(entryStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Async Writer Examples
|
||||
|
||||
**Write single file asynchronously:**
|
||||
```C#
|
||||
using (Stream archiveStream = File.OpenWrite("output.zip"))
|
||||
using (var writer = WriterFactory.OpenWriter(archiveStream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
using (Stream fileStream = File.OpenRead("input.txt"))
|
||||
{
|
||||
await writer.WriteAsync("entry.txt", fileStream, DateTime.Now, cancellationToken);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Write entire directory asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenWrite("backup.tar.gz"))
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
|
||||
{
|
||||
await writer.WriteAllAsync(
|
||||
@"D:\files",
|
||||
"*",
|
||||
SearchOption.AllDirectories,
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
**Write with progress tracking and cancellation:**
|
||||
```C#
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
// Set timeout or cancel from UI
|
||||
cts.CancelAfter(TimeSpan.FromMinutes(5));
|
||||
|
||||
using (Stream stream = File.OpenWrite("archive.zip"))
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
try
|
||||
{
|
||||
await writer.WriteAllAsync(@"D:\data", "*", SearchOption.AllDirectories, cts.Token);
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Console.WriteLine("Operation was cancelled");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Archive Async Examples
|
||||
|
||||
**Extract from archive asynchronously:**
|
||||
```C#
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
// Simple async extraction - works for all archive types
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits of Async Operations:**
|
||||
- Non-blocking I/O for better application responsiveness
|
||||
- Improved scalability for server applications
|
||||
- Support for cancellation via CancellationToken
|
||||
- Better resource utilization in async/await contexts
|
||||
- Compatible with modern .NET async patterns
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"sdk": {
|
||||
"version": "8.0.100",
|
||||
"version": "10.0.100",
|
||||
"rollForward": "latestFeature"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// Copyright (c) Six Labors.
|
||||
// Licensed under the Apache License, Version 2.0.
|
||||
|
||||
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
|
||||
#if !LEGACY_DOTNET
|
||||
#define SUPPORTS_RUNTIME_INTRINSICS
|
||||
#define SUPPORTS_HOTPATH
|
||||
#endif
|
||||
|
||||
103
src/SharpCompress/Archives/AbstractArchive.Async.cs
Normal file
103
src/SharpCompress/Archives/AbstractArchive.Async.cs
Normal file
@@ -0,0 +1,103 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract partial class AbstractArchive<TEntry, TVolume>
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
#region Async Support
|
||||
|
||||
// Async properties
|
||||
public virtual IAsyncEnumerable<TEntry> EntriesAsync => _lazyEntriesAsync;
|
||||
|
||||
public IAsyncEnumerable<TVolume> VolumesAsync => _lazyVolumesAsync;
|
||||
|
||||
protected virtual async IAsyncEnumerable<TEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<TVolume> volumes
|
||||
)
|
||||
{
|
||||
foreach (var item in LoadEntries(await volumes.ToListAsync()))
|
||||
{
|
||||
yield return item;
|
||||
}
|
||||
}
|
||||
|
||||
public virtual async ValueTask DisposeAsync()
|
||||
{
|
||||
if (!_disposed)
|
||||
{
|
||||
await foreach (var v in _lazyVolumesAsync)
|
||||
{
|
||||
v.Dispose();
|
||||
}
|
||||
foreach (var v in _lazyEntriesAsync.GetLoaded().Cast<Entry>())
|
||||
{
|
||||
v.Close();
|
||||
}
|
||||
_sourceStream?.Dispose();
|
||||
|
||||
_disposed = true;
|
||||
}
|
||||
}
|
||||
|
||||
private async ValueTask EnsureEntriesLoadedAsync()
|
||||
{
|
||||
await _lazyEntriesAsync.EnsureFullyLoaded();
|
||||
await _lazyVolumesAsync.EnsureFullyLoaded();
|
||||
}
|
||||
|
||||
private async IAsyncEnumerable<IArchiveEntry> EntriesAsyncCast()
|
||||
{
|
||||
await foreach (var entry in EntriesAsync)
|
||||
{
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
|
||||
IAsyncEnumerable<IArchiveEntry> IAsyncArchive.EntriesAsync => EntriesAsyncCast();
|
||||
|
||||
IAsyncEnumerable<IVolume> IAsyncArchive.VolumesAsync => VolumesAsyncCast();
|
||||
|
||||
private async IAsyncEnumerable<IVolume> VolumesAsyncCast()
|
||||
{
|
||||
await foreach (var volume in _lazyVolumesAsync)
|
||||
{
|
||||
yield return volume;
|
||||
}
|
||||
}
|
||||
|
||||
public async ValueTask<IAsyncReader> ExtractAllEntriesAsync()
|
||||
{
|
||||
if (!await IsSolidAsync() && Type != ArchiveType.SevenZip)
|
||||
{
|
||||
throw new SharpCompressException(
|
||||
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
|
||||
);
|
||||
}
|
||||
await EnsureEntriesLoadedAsync();
|
||||
return await CreateReaderForSolidExtractionAsync();
|
||||
}
|
||||
|
||||
public virtual ValueTask<bool> IsSolidAsync() => new(false);
|
||||
|
||||
public async ValueTask<bool> IsCompleteAsync()
|
||||
{
|
||||
await EnsureEntriesLoadedAsync();
|
||||
return await EntriesAsync.AllAsync(x => x.IsComplete);
|
||||
}
|
||||
|
||||
public async ValueTask<long> TotalSizeAsync() =>
|
||||
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
|
||||
|
||||
public async ValueTask<long> TotalUncompressedSizeAsync() =>
|
||||
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.Size);
|
||||
|
||||
public ValueTask<bool> IsEncryptedAsync() => new(IsEncrypted);
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -1,14 +1,13 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
|
||||
public abstract partial class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
@@ -17,11 +16,10 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
private bool _disposed;
|
||||
private readonly SourceStream? _sourceStream;
|
||||
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
|
||||
// Async fields - kept in original file per refactoring rules
|
||||
private readonly LazyAsyncReadOnlyCollection<TVolume> _lazyVolumesAsync;
|
||||
private readonly LazyAsyncReadOnlyCollection<TEntry> _lazyEntriesAsync;
|
||||
|
||||
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
|
||||
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
|
||||
protected ReaderOptions ReaderOptions { get; }
|
||||
|
||||
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
|
||||
@@ -31,6 +29,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
_sourceStream = sourceStream;
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
_lazyVolumesAsync = new LazyAsyncReadOnlyCollection<TVolume>(
|
||||
LoadVolumesAsync(_sourceStream)
|
||||
);
|
||||
_lazyEntriesAsync = new LazyAsyncReadOnlyCollection<TEntry>(
|
||||
LoadEntriesAsync(_lazyVolumesAsync)
|
||||
);
|
||||
}
|
||||
|
||||
internal AbstractArchive(ArchiveType type)
|
||||
@@ -39,25 +43,16 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
ReaderOptions = new();
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
|
||||
_lazyVolumesAsync = new LazyAsyncReadOnlyCollection<TVolume>(
|
||||
AsyncEnumerableEx.Empty<TVolume>()
|
||||
);
|
||||
_lazyEntriesAsync = new LazyAsyncReadOnlyCollection<TEntry>(
|
||||
AsyncEnumerableEx.Empty<TEntry>()
|
||||
);
|
||||
}
|
||||
|
||||
public ArchiveType Type { get; }
|
||||
|
||||
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
|
||||
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
|
||||
|
||||
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry) =>
|
||||
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
|
||||
|
||||
private static Stream CheckStreams(Stream stream)
|
||||
{
|
||||
if (!stream.CanSeek || !stream.CanRead)
|
||||
{
|
||||
throw new ArchiveException("Archive streams must be Readable and Seekable");
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
@@ -77,12 +72,15 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
public virtual long TotalUncompressSize =>
|
||||
public virtual long TotalUncompressedSize =>
|
||||
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
|
||||
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
|
||||
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
|
||||
|
||||
protected virtual IAsyncEnumerable<TVolume> LoadVolumesAsync(SourceStream sourceStream) =>
|
||||
LoadVolumes(sourceStream).ToAsyncEnumerable();
|
||||
|
||||
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
|
||||
|
||||
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
|
||||
@@ -99,38 +97,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
}
|
||||
}
|
||||
|
||||
void IArchiveExtractionListener.EnsureEntriesLoaded()
|
||||
private void EnsureEntriesLoaded()
|
||||
{
|
||||
_lazyEntries.EnsureFullyLoaded();
|
||||
_lazyVolumes.EnsureFullyLoaded();
|
||||
}
|
||||
|
||||
void IExtractionListener.FireCompressedBytesRead(
|
||||
long currentPartCompressedBytes,
|
||||
long compressedReadBytes
|
||||
) =>
|
||||
CompressedBytesRead?.Invoke(
|
||||
this,
|
||||
new CompressedBytesReadEventArgs(
|
||||
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
|
||||
compressedBytesRead: compressedReadBytes
|
||||
)
|
||||
);
|
||||
|
||||
void IExtractionListener.FireFilePartExtractionBegin(
|
||||
string name,
|
||||
long size,
|
||||
long compressedSize
|
||||
) =>
|
||||
FilePartExtractionBegin?.Invoke(
|
||||
this,
|
||||
new FilePartExtractionBeginEventArgs(
|
||||
compressedSize: compressedSize,
|
||||
size: size,
|
||||
name: name
|
||||
)
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Use this method to extract all entries in an archive in order.
|
||||
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
|
||||
@@ -146,21 +118,27 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
{
|
||||
if (!IsSolid && Type != ArchiveType.SevenZip)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
throw new SharpCompressException(
|
||||
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
|
||||
);
|
||||
}
|
||||
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
|
||||
EnsureEntriesLoaded();
|
||||
return CreateReaderForSolidExtraction();
|
||||
}
|
||||
|
||||
protected abstract IReader CreateReaderForSolidExtraction();
|
||||
protected abstract ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync();
|
||||
|
||||
/// <summary>
|
||||
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
|
||||
/// </summary>
|
||||
public virtual bool IsSolid => false;
|
||||
|
||||
/// <summary>
|
||||
/// Archive is ENCRYPTED (this means the Archive has password-protected files).
|
||||
/// </summary>
|
||||
public virtual bool IsEncrypted => false;
|
||||
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
|
||||
/// </summary>
|
||||
@@ -168,7 +146,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
{
|
||||
get
|
||||
{
|
||||
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
|
||||
EnsureEntriesLoaded();
|
||||
return Entries.All(x => x.IsComplete);
|
||||
}
|
||||
}
|
||||
|
||||
123
src/SharpCompress/Archives/AbstractWritableArchive.Async.cs
Normal file
123
src/SharpCompress/Archives/AbstractWritableArchive.Async.cs
Normal file
@@ -0,0 +1,123 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
// Async property moved from main file
|
||||
private IAsyncEnumerable<TEntry> OldEntriesAsync =>
|
||||
base.EntriesAsync.Where(x => !removedEntries.Contains(x));
|
||||
|
||||
private async ValueTask RebuildModifiedCollectionAsync()
|
||||
{
|
||||
if (pauseRebuilding)
|
||||
{
|
||||
return;
|
||||
}
|
||||
hasModifications = true;
|
||||
newEntries.RemoveAll(v => removedEntries.Contains(v));
|
||||
modifiedEntries.Clear();
|
||||
await foreach (var entry in OldEntriesAsync)
|
||||
{
|
||||
modifiedEntries.Add(entry);
|
||||
}
|
||||
modifiedEntries.AddRange(newEntries);
|
||||
}
|
||||
|
||||
public async ValueTask RemoveEntryAsync(TEntry entry)
|
||||
{
|
||||
if (!removedEntries.Contains(entry))
|
||||
{
|
||||
removedEntries.Add(entry);
|
||||
await RebuildModifiedCollectionAsync();
|
||||
}
|
||||
}
|
||||
|
||||
private async ValueTask<bool> DoesKeyMatchExistingAsync(
|
||||
string key,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
await foreach (
|
||||
var entry in EntriesAsync.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
var path = entry.Key;
|
||||
if (path is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
var p = path.Replace('/', '\\');
|
||||
if (p.Length > 0 && p[0] == '\\')
|
||||
{
|
||||
p = p.Substring(1);
|
||||
}
|
||||
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public async ValueTask<TEntry> AddEntryAsync(
|
||||
string key,
|
||||
Stream source,
|
||||
bool closeStream,
|
||||
long size = 0,
|
||||
DateTime? modified = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
{
|
||||
key = key.Substring(1);
|
||||
}
|
||||
if (await DoesKeyMatchExistingAsync(key, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
|
||||
}
|
||||
var entry = CreateEntry(key, source, size, modified, closeStream);
|
||||
newEntries.Add(entry);
|
||||
await RebuildModifiedCollectionAsync();
|
||||
return entry;
|
||||
}
|
||||
|
||||
public async ValueTask<TEntry> AddDirectoryEntryAsync(
|
||||
string key,
|
||||
DateTime? modified = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
{
|
||||
key = key.Substring(1);
|
||||
}
|
||||
if (await DoesKeyMatchExistingAsync(key, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
|
||||
}
|
||||
var entry = CreateDirectoryEntry(key, modified);
|
||||
newEntries.Add(entry);
|
||||
await RebuildModifiedCollectionAsync();
|
||||
return entry;
|
||||
}
|
||||
|
||||
public async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
//reset streams of new entries
|
||||
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
|
||||
await SaveToAsync(stream, options, OldEntriesAsync, newEntries, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -2,15 +2,18 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
: AbstractArchive<TEntry, TVolume>,
|
||||
IWritableArchive
|
||||
IWritableArchive,
|
||||
IWritableAsyncArchive
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
@@ -94,6 +97,9 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
DateTime? modified
|
||||
) => AddEntry(key, source, closeStream, size, modified);
|
||||
|
||||
IArchiveEntry IWritableArchive.AddDirectoryEntry(string key, DateTime? modified) =>
|
||||
AddDirectoryEntry(key, modified);
|
||||
|
||||
public TEntry AddEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
@@ -134,6 +140,40 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
return false;
|
||||
}
|
||||
|
||||
ValueTask IWritableAsyncArchive.RemoveEntryAsync(IArchiveEntry entry) =>
|
||||
RemoveEntryAsync((TEntry)entry);
|
||||
|
||||
async ValueTask<IArchiveEntry> IWritableAsyncArchive.AddEntryAsync(
|
||||
string key,
|
||||
Stream source,
|
||||
bool closeStream,
|
||||
long size,
|
||||
DateTime? modified,
|
||||
CancellationToken cancellationToken
|
||||
) => await AddEntryAsync(key, source, closeStream, size, modified, cancellationToken);
|
||||
|
||||
async ValueTask<IArchiveEntry> IWritableAsyncArchive.AddDirectoryEntryAsync(
|
||||
string key,
|
||||
DateTime? modified,
|
||||
CancellationToken cancellationToken
|
||||
) => await AddDirectoryEntryAsync(key, modified, cancellationToken);
|
||||
|
||||
public TEntry AddDirectoryEntry(string key, DateTime? modified = null)
|
||||
{
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
{
|
||||
key = key.Substring(1);
|
||||
}
|
||||
if (DoesKeyMatchExisting(key))
|
||||
{
|
||||
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
|
||||
}
|
||||
var entry = CreateDirectoryEntry(key, modified);
|
||||
newEntries.Add(entry);
|
||||
RebuildModifiedCollection();
|
||||
return entry;
|
||||
}
|
||||
|
||||
public void SaveTo(Stream stream, WriterOptions options)
|
||||
{
|
||||
//reset streams of new entries
|
||||
@@ -166,6 +206,8 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
bool closeStream
|
||||
);
|
||||
|
||||
protected abstract TEntry CreateDirectoryEntry(string key, DateTime? modified);
|
||||
|
||||
protected abstract void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
@@ -173,6 +215,14 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
IEnumerable<TEntry> newEntries
|
||||
);
|
||||
|
||||
protected abstract ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IAsyncEnumerable<TEntry> oldEntries,
|
||||
IEnumerable<TEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
base.Dispose();
|
||||
|
||||
157
src/SharpCompress/Archives/ArchiveFactory.Async.cs
Normal file
157
src/SharpCompress/Archives/ArchiveFactory.Async.cs
Normal file
@@ -0,0 +1,157 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static partial class ArchiveFactory
|
||||
{
|
||||
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
readerOptions ??= new ReaderOptions();
|
||||
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
|
||||
return factory.OpenAsyncArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
|
||||
string filePath,
|
||||
ReaderOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return OpenAsyncArchive(new FileInfo(filePath), options, cancellationToken);
|
||||
}
|
||||
|
||||
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
|
||||
return factory.OpenAsyncArchive(fileInfo, options);
|
||||
}
|
||||
|
||||
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var filesArray = fileInfos.ToArray();
|
||||
if (filesArray.Length == 0)
|
||||
{
|
||||
throw new InvalidOperationException("No files to open");
|
||||
}
|
||||
|
||||
var fileInfo = filesArray[0];
|
||||
if (filesArray.Length == 1)
|
||||
{
|
||||
return await OpenAsyncArchive(fileInfo, options, cancellationToken);
|
||||
}
|
||||
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken);
|
||||
return factory.OpenAsyncArchive(filesArray, options, cancellationToken);
|
||||
}
|
||||
|
||||
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
streams.NotNull(nameof(streams));
|
||||
var streamsArray = streams.ToArray();
|
||||
if (streamsArray.Length == 0)
|
||||
{
|
||||
throw new InvalidOperationException("No streams");
|
||||
}
|
||||
|
||||
var firstStream = streamsArray[0];
|
||||
if (streamsArray.Length == 1)
|
||||
{
|
||||
return await OpenAsyncArchive(firstStream, options, cancellationToken);
|
||||
}
|
||||
|
||||
firstStream.NotNull(nameof(firstStream));
|
||||
options ??= new ReaderOptions();
|
||||
|
||||
var factory = await FindFactoryAsync<IMultiArchiveFactory>(firstStream, cancellationToken);
|
||||
return factory.OpenAsyncArchive(streamsArray, options);
|
||||
}
|
||||
|
||||
public static ValueTask<T> FindFactoryAsync<T>(
|
||||
string path,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
where T : IFactory
|
||||
{
|
||||
path.NotNullOrEmpty(nameof(path));
|
||||
return FindFactoryAsync<T>(new FileInfo(path), cancellationToken);
|
||||
}
|
||||
|
||||
private static async ValueTask<T> FindFactoryAsync<T>(
|
||||
FileInfo finfo,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
where T : IFactory
|
||||
{
|
||||
finfo.NotNull(nameof(finfo));
|
||||
using Stream stream = finfo.OpenRead();
|
||||
return await FindFactoryAsync<T>(stream, cancellationToken);
|
||||
}
|
||||
|
||||
private static async ValueTask<T> FindFactoryAsync<T>(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
where T : IFactory
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
if (!stream.CanRead || !stream.CanSeek)
|
||||
{
|
||||
throw new ArgumentException("Stream should be readable and seekable");
|
||||
}
|
||||
|
||||
var factories = Factory.Factories.OfType<T>();
|
||||
|
||||
var startPosition = stream.Position;
|
||||
|
||||
foreach (var factory in factories)
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
if (await factory.IsArchiveAsync(stream, cancellationToken: cancellationToken))
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
return factory;
|
||||
}
|
||||
}
|
||||
|
||||
var extensions = string.Join(", ", factories.Select(item => item.Name));
|
||||
|
||||
throw new InvalidOperationException(
|
||||
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.IO;
|
||||
@@ -9,22 +11,15 @@ using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class ArchiveFactory
|
||||
public static partial class ArchiveFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Opens an Archive for random access
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
/// <returns></returns>
|
||||
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
public static IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
readerOptions ??= new ReaderOptions();
|
||||
stream = new SharpCompressStream(stream, bufferSize: readerOptions.BufferSize);
|
||||
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
|
||||
return FindFactory<IArchiveFactory>(stream).OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive Create(ArchiveType type)
|
||||
public static IWritableArchive CreateArchive(ArchiveType type)
|
||||
{
|
||||
var factory = Factory
|
||||
.Factories.OfType<IWriteableArchiveFactory>()
|
||||
@@ -32,41 +27,29 @@ public static class ArchiveFactory
|
||||
|
||||
if (factory != null)
|
||||
{
|
||||
return factory.CreateWriteableArchive();
|
||||
return factory.CreateArchive();
|
||||
}
|
||||
|
||||
throw new NotSupportedException("Cannot create Archives of type: " + type);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(string filePath, ReaderOptions? options = null)
|
||||
public static IArchive OpenArchive(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), options);
|
||||
return OpenArchive(new FileInfo(filePath), options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
public static IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
|
||||
return FindFactory<IArchiveFactory>(fileInfo).OpenArchive(fileInfo, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable FileInfo objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
|
||||
public static IArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? options = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var filesArray = fileInfos.ToArray();
|
||||
@@ -78,21 +61,16 @@ public static class ArchiveFactory
|
||||
var fileInfo = filesArray[0];
|
||||
if (filesArray.Length == 1)
|
||||
{
|
||||
return Open(fileInfo, options);
|
||||
return OpenArchive(fileInfo, options);
|
||||
}
|
||||
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
|
||||
return FindFactory<IMultiArchiveFactory>(fileInfo).OpenArchive(filesArray, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable FileInfo objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
|
||||
public static IArchive OpenArchive(IEnumerable<Stream> streams, ReaderOptions? options = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var streamsArray = streams.ToArray();
|
||||
@@ -104,29 +82,34 @@ public static class ArchiveFactory
|
||||
var firstStream = streamsArray[0];
|
||||
if (streamsArray.Length == 1)
|
||||
{
|
||||
return Open(firstStream, options);
|
||||
return OpenArchive(firstStream, options);
|
||||
}
|
||||
|
||||
firstStream.NotNull(nameof(firstStream));
|
||||
options ??= new ReaderOptions();
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
|
||||
return FindFactory<IMultiArchiveFactory>(firstStream).OpenArchive(streamsArray, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(
|
||||
string sourceArchive,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null
|
||||
)
|
||||
{
|
||||
using var archive = Open(sourceArchive);
|
||||
using var archive = OpenArchive(sourceArchive);
|
||||
archive.WriteToDirectory(destinationDirectory, options);
|
||||
}
|
||||
|
||||
private static T FindFactory<T>(FileInfo finfo)
|
||||
public static T FindFactory<T>(string path)
|
||||
where T : IFactory
|
||||
{
|
||||
path.NotNullOrEmpty(nameof(path));
|
||||
using Stream stream = File.OpenRead(path);
|
||||
return FindFactory<T>(stream);
|
||||
}
|
||||
|
||||
public static T FindFactory<T>(FileInfo finfo)
|
||||
where T : IFactory
|
||||
{
|
||||
finfo.NotNull(nameof(finfo));
|
||||
@@ -134,7 +117,7 @@ public static class ArchiveFactory
|
||||
return FindFactory<T>(stream);
|
||||
}
|
||||
|
||||
private static T FindFactory<T>(Stream stream)
|
||||
public static T FindFactory<T>(Stream stream)
|
||||
where T : IFactory
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
@@ -166,22 +149,14 @@ public static class ArchiveFactory
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsArchive(
|
||||
string filePath,
|
||||
out ArchiveType? type,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
public static bool IsArchive(string filePath, out ArchiveType? type)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
using Stream s = File.OpenRead(filePath);
|
||||
return IsArchive(s, out type, bufferSize);
|
||||
return IsArchive(s, out type);
|
||||
}
|
||||
|
||||
public static bool IsArchive(
|
||||
Stream stream,
|
||||
out ArchiveType? type,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
public static bool IsArchive(Stream stream, out ArchiveType? type)
|
||||
{
|
||||
type = null;
|
||||
stream.NotNull(nameof(stream));
|
||||
@@ -208,22 +183,12 @@ public static class ArchiveFactory
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
|
||||
/// </summary>
|
||||
/// <param name="part1"></param>
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<string> GetFileParts(string part1)
|
||||
{
|
||||
part1.NotNullOrEmpty(nameof(part1));
|
||||
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
|
||||
/// </summary>
|
||||
/// <param name="part1"></param>
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
|
||||
{
|
||||
part1.NotNull(nameof(part1));
|
||||
@@ -237,7 +202,7 @@ public static class ArchiveFactory
|
||||
if (part != null)
|
||||
{
|
||||
yield return part;
|
||||
while ((part = factory.GetFilePart(i++, part1)) != null) //tests split too
|
||||
while ((part = factory.GetFilePart(i++, part1)) != null)
|
||||
{
|
||||
yield return part;
|
||||
}
|
||||
@@ -246,6 +211,4 @@ public static class ArchiveFactory
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ internal abstract class ArchiveVolumeFactory
|
||||
//split 001, 002 ...
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
{
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
@@ -22,9 +23,13 @@ internal abstract class ArchiveVolumeFactory
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (item != null && item.Exists)
|
||||
{
|
||||
return item;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
class AutoArchiveFactory : IArchiveFactory
|
||||
{
|
||||
public string Name => nameof(AutoArchiveFactory);
|
||||
|
||||
public ArchiveType? KnownArchiveType => null;
|
||||
|
||||
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
|
||||
|
||||
public bool IsArchive(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
) => throw new NotSupportedException();
|
||||
|
||||
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
|
||||
|
||||
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(stream, readerOptions);
|
||||
|
||||
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(fileInfo, readerOptions);
|
||||
}
|
||||
86
src/SharpCompress/Archives/GZip/GZipArchive.Async.cs
Normal file
86
src/SharpCompress/Archives/GZip/GZipArchive.Async.cs
Normal file
@@ -0,0 +1,86 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.GZip;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public partial class GZipArchive
|
||||
{
|
||||
public ValueTask SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
|
||||
SaveToAsync(new FileInfo(filePath), cancellationToken);
|
||||
|
||||
public async ValueTask SaveToAsync(
|
||||
FileInfo fileInfo,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
protected override async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IEnumerable<GZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (Entries.Count > 1)
|
||||
{
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
foreach (var entry in newEntries.Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return new((IAsyncReader)GZipReader.OpenReader(stream));
|
||||
}
|
||||
|
||||
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<GZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = (await volumes.SingleAsync()).Stream;
|
||||
yield return new GZipArchiveEntry(
|
||||
this,
|
||||
await GZipFilePart.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
}
|
||||
201
src/SharpCompress/Archives/GZip/GZipArchive.Factory.cs
Normal file
201
src/SharpCompress/Archives/GZip/GZipArchive.Factory.cs
Normal file
@@ -0,0 +1,201 @@
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.GZip;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public partial class GZipArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IWritableArchiveOpenable,
|
||||
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
#endif
|
||||
{
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
path.NotNullOrEmpty(nameof(path));
|
||||
return (IWritableAsyncArchive)OpenArchive(
|
||||
new FileInfo(path),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return OpenArchive(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new GZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive CreateArchive() => new GZipArchive();
|
||||
|
||||
public static IWritableAsyncArchive CreateAsyncArchive() => new GZipArchive();
|
||||
|
||||
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsGZipFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsGZipFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(Stream stream)
|
||||
{
|
||||
Span<byte> header = stackalloc byte[10];
|
||||
|
||||
if (!stream.ReadFully(header))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public static async ValueTask<bool> IsGZipFileAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var header = ArrayPool<byte>.Shared.Rent(10);
|
||||
try
|
||||
{
|
||||
await stream.ReadFullyAsync(header, 0, 10, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
finally
|
||||
{
|
||||
ArrayPool<byte>.Shared.Return(header);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.IO;
|
||||
@@ -12,122 +14,20 @@ using SharpCompress.Writers.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
{
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new GZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static GZipArchive Create() => new();
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private GZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.GZip, sourceStream) { }
|
||||
|
||||
internal GZipArchive()
|
||||
: base(ArchiveType.GZip) { }
|
||||
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
sourceStream.LoadAllParts();
|
||||
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsGZipFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsGZipFile(stream);
|
||||
}
|
||||
|
||||
public void SaveTo(string filePath) => SaveTo(new FileInfo(filePath));
|
||||
|
||||
public void SaveTo(FileInfo fileInfo)
|
||||
@@ -136,28 +36,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
SaveTo(stream, new WriterOptions(CompressionType.GZip));
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(Stream stream)
|
||||
{
|
||||
// read the header on the first read
|
||||
Span<byte> header = stackalloc byte[10];
|
||||
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (!stream.ReadFully(header))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
internal GZipArchive()
|
||||
: base(ArchiveType.GZip) { }
|
||||
|
||||
protected override GZipArchiveEntry CreateEntryInternal(
|
||||
string filePath,
|
||||
Stream source,
|
||||
@@ -173,6 +51,11 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
}
|
||||
|
||||
protected override GZipArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => throw new NotSupportedException("GZip archives do not support directory entries.");
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
@@ -201,7 +84,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
var stream = volumes.Single().Stream;
|
||||
yield return new GZipArchiveEntry(
|
||||
this,
|
||||
new GZipFilePart(stream, ReaderOptions.ArchiveEncoding)
|
||||
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -209,6 +92,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return GZipReader.Open(stream);
|
||||
return GZipReader.OpenReader(stream);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
@@ -13,13 +15,20 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
|
||||
{
|
||||
//this is to reset the stream to be read multiple times
|
||||
var part = (GZipFilePart)Parts.Single();
|
||||
if (part.GetRawStream().Position != part.EntryStartPosition)
|
||||
var rawStream = part.GetRawStream();
|
||||
if (rawStream.CanSeek && rawStream.Position != part.EntryStartPosition)
|
||||
{
|
||||
part.GetRawStream().Position = part.EntryStartPosition;
|
||||
rawStream.Position = part.EntryStartPosition;
|
||||
}
|
||||
return Parts.Single().GetCompressedStream().NotNull();
|
||||
}
|
||||
|
||||
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
// GZip synchronous implementation is fast enough, just wrap it
|
||||
return new(OpenEntryStream());
|
||||
}
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
public IArchive Archive { get; }
|
||||
|
||||
@@ -58,7 +58,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
|
||||
{
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
return new NonDisposingStream(stream);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
|
||||
@@ -7,12 +7,6 @@ namespace SharpCompress.Archives;
|
||||
|
||||
public interface IArchive : IDisposable
|
||||
{
|
||||
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
|
||||
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
|
||||
|
||||
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
|
||||
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
|
||||
|
||||
IEnumerable<IArchiveEntry> Entries { get; }
|
||||
IEnumerable<IVolume> Volumes { get; }
|
||||
|
||||
@@ -44,5 +38,10 @@ public interface IArchive : IDisposable
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
long TotalUncompressSize { get; }
|
||||
long TotalUncompressedSize { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Returns whether the archive is encrypted.
|
||||
/// </summary>
|
||||
bool IsEncrypted { get; }
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -11,6 +13,12 @@ public interface IArchiveEntry : IEntry
|
||||
/// </summary>
|
||||
Stream OpenEntryStream();
|
||||
|
||||
/// <summary>
|
||||
/// Opens the current entry as a stream that will decompress as it is read asynchronously.
|
||||
/// Read the entire stream or use SkipEntry on EntryStream.
|
||||
/// </summary>
|
||||
ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to extract this entry.
|
||||
/// </summary>
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -6,61 +9,155 @@ namespace SharpCompress.Archives;
|
||||
|
||||
public static class IArchiveEntryExtensions
|
||||
{
|
||||
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
|
||||
/// <param name="archiveEntry">The archive entry to extract.</param>
|
||||
extension(IArchiveEntry archiveEntry)
|
||||
{
|
||||
if (archiveEntry.IsDirectory)
|
||||
/// <summary>
|
||||
/// Extract entry to the specified stream.
|
||||
/// </summary>
|
||||
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
|
||||
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
|
||||
public void WriteTo(Stream streamToWriteTo, IProgress<ProgressReport>? progress = null)
|
||||
{
|
||||
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
|
||||
if (archiveEntry.IsDirectory)
|
||||
{
|
||||
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
|
||||
}
|
||||
|
||||
using var entryStream = archiveEntry.OpenEntryStream();
|
||||
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
|
||||
sourceStream.CopyTo(streamToWriteTo, Constants.BufferSize);
|
||||
}
|
||||
|
||||
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
|
||||
streamListener.EnsureEntriesLoaded();
|
||||
streamListener.FireEntryExtractionBegin(archiveEntry);
|
||||
streamListener.FireFilePartExtractionBegin(
|
||||
archiveEntry.Key ?? "Key",
|
||||
archiveEntry.Size,
|
||||
archiveEntry.CompressedSize
|
||||
);
|
||||
var entryStream = archiveEntry.OpenEntryStream();
|
||||
using (entryStream)
|
||||
/// <summary>
|
||||
/// Extract entry to the specified stream asynchronously.
|
||||
/// </summary>
|
||||
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
|
||||
public async ValueTask WriteToAsync(
|
||||
Stream streamToWriteTo,
|
||||
IProgress<ProgressReport>? progress = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using Stream s = new ListeningStream(streamListener, entryStream);
|
||||
s.CopyTo(streamToWriteTo);
|
||||
if (archiveEntry.IsDirectory)
|
||||
{
|
||||
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
|
||||
}
|
||||
|
||||
using var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
|
||||
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
|
||||
await sourceStream
|
||||
.CopyToAsync(streamToWriteTo, Constants.BufferSize, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
streamListener.FireEntryExtractionEnd(archiveEntry);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(
|
||||
this IArchiveEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToDirectory(
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
entry.WriteToFile
|
||||
);
|
||||
private static Stream WrapWithProgress(
|
||||
Stream source,
|
||||
IArchiveEntry entry,
|
||||
IProgress<ProgressReport>? progress
|
||||
)
|
||||
{
|
||||
if (progress is null)
|
||||
{
|
||||
return source;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file
|
||||
/// </summary>
|
||||
public static void WriteToFile(
|
||||
this IArchiveEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options = null
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToFile(
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
(x, fm) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
entry.WriteTo(fs);
|
||||
}
|
||||
var entryPath = entry.Key ?? string.Empty;
|
||||
var totalBytes = GetEntrySizeSafe(entry);
|
||||
return new ProgressReportingStream(
|
||||
source,
|
||||
progress,
|
||||
entryPath,
|
||||
totalBytes,
|
||||
leaveOpen: true
|
||||
);
|
||||
}
|
||||
|
||||
private static long? GetEntrySizeSafe(IArchiveEntry entry)
|
||||
{
|
||||
try
|
||||
{
|
||||
var size = entry.Size;
|
||||
return size >= 0 ? size : null;
|
||||
}
|
||||
catch (NotImplementedException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
extension(IArchiveEntry entry)
|
||||
{
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public void WriteToDirectory(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToDirectory(
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
entry.WriteToFile
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory asynchronously, retaining filename
|
||||
/// </summary>
|
||||
public async ValueTask WriteToDirectoryAsync(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
await ExtractionMethods
|
||||
.WriteEntryToDirectoryAsync(
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
entry.WriteToFileAsync,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file
|
||||
/// </summary>
|
||||
public void WriteToFile(string destinationFileName, ExtractionOptions? options = null) =>
|
||||
ExtractionMethods.WriteEntryToFile(
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
(x, fm) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
entry.WriteTo(fs);
|
||||
}
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file asynchronously
|
||||
/// </summary>
|
||||
public async ValueTask WriteToFileAsync(
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
await ExtractionMethods
|
||||
.WriteEntryToFileAsync(
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
async (x, fm, ct) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
await entry.WriteToAsync(fs, null, ct).ConfigureAwait(false);
|
||||
},
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
@@ -10,76 +8,66 @@ namespace SharpCompress.Archives;
|
||||
|
||||
public static class IArchiveExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(
|
||||
this IArchive archive,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null
|
||||
)
|
||||
extension(IArchive archive)
|
||||
{
|
||||
using var reader = archive.ExtractAllEntries();
|
||||
reader.WriteAllToDirectory(destinationDirectory, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts the archive to the destination directory. Directories will be created as needed.
|
||||
/// </summary>
|
||||
/// <param name="archive">The archive to extract.</param>
|
||||
/// <param name="destination">The folder to extract into.</param>
|
||||
/// <param name="progressReport">Optional progress report callback.</param>
|
||||
/// <param name="cancellationToken">Optional cancellation token.</param>
|
||||
public static void ExtractToDirectory(
|
||||
this IArchive archive,
|
||||
string destination,
|
||||
Action<double>? progressReport = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
// Prepare for progress reporting
|
||||
var totalBytes = archive.TotalUncompressSize;
|
||||
var bytesRead = 0L;
|
||||
|
||||
// Tracking for created directories.
|
||||
var seenDirectories = new HashSet<string>();
|
||||
|
||||
// Extract
|
||||
foreach (var entry in archive.Entries)
|
||||
/// <summary>
|
||||
/// Extract to specific directory with progress reporting
|
||||
/// </summary>
|
||||
/// <param name="destinationDirectory">The folder to extract into.</param>
|
||||
/// <param name="options">Extraction options.</param>
|
||||
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
|
||||
public void WriteToDirectory(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
IProgress<ProgressReport>? progress = null
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (entry.IsDirectory)
|
||||
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
|
||||
{
|
||||
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (
|
||||
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
|
||||
&& seenDirectories.Add(dirPath)
|
||||
)
|
||||
{
|
||||
Directory.CreateDirectory(emptyDirectory);
|
||||
}
|
||||
continue;
|
||||
using var reader = archive.ExtractAllEntries();
|
||||
reader.WriteAllToDirectory(destinationDirectory, options);
|
||||
}
|
||||
|
||||
// Create each directory if not already created
|
||||
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (Path.GetDirectoryName(path) is { } directory)
|
||||
else
|
||||
{
|
||||
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
seenDirectories.Add(directory);
|
||||
}
|
||||
archive.WriteToDirectoryInternal(destinationDirectory, options, progress);
|
||||
}
|
||||
}
|
||||
|
||||
// Write file
|
||||
using var fs = File.OpenWrite(path);
|
||||
entry.WriteTo(fs);
|
||||
private void WriteToDirectoryInternal(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
IProgress<ProgressReport>? progress
|
||||
)
|
||||
{
|
||||
var totalBytes = archive.TotalUncompressedSize;
|
||||
var bytesRead = 0L;
|
||||
var seenDirectories = new HashSet<string>();
|
||||
|
||||
// Update progress
|
||||
bytesRead += entry.Size;
|
||||
progressReport?.Invoke(bytesRead / (double)totalBytes);
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
var dirPath = Path.Combine(
|
||||
destinationDirectory,
|
||||
entry.Key.NotNull("Entry Key is null")
|
||||
);
|
||||
if (
|
||||
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
|
||||
&& seenDirectories.Add(dirPath)
|
||||
)
|
||||
{
|
||||
Directory.CreateDirectory(parentDirectory);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
entry.WriteToDirectory(destinationDirectory, options);
|
||||
|
||||
bytesRead += entry.Size;
|
||||
progress?.Report(
|
||||
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
internal interface IArchiveExtractionListener : IExtractionListener
|
||||
{
|
||||
void EnsureEntriesLoaded();
|
||||
void FireEntryExtractionBegin(IArchiveEntry entry);
|
||||
void FireEntryExtractionEnd(IArchiveEntry entry);
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
@@ -24,12 +25,27 @@ public interface IArchiveFactory : IFactory
|
||||
/// </summary>
|
||||
/// <param name="stream">An open, readable and seekable stream.</param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
|
||||
IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Opens an Archive for random access asynchronously.
|
||||
/// </summary>
|
||||
/// <param name="stream">An open, readable and seekable stream.</param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo">the file to open.</param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
|
||||
IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Opens an Archive from a FileInfo object asynchronously.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo">the file to open.</param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
|
||||
}
|
||||
|
||||
40
src/SharpCompress/Archives/IArchiveOpenable.cs
Normal file
40
src/SharpCompress/Archives/IArchiveOpenable.cs
Normal file
@@ -0,0 +1,40 @@
|
||||
#if NET8_0_OR_GREATER
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IArchiveOpenable<TSync, TASync>
|
||||
where TSync : IArchive
|
||||
where TASync : IAsyncArchive
|
||||
{
|
||||
public static abstract TSync OpenArchive(string filePath, ReaderOptions? readerOptions = null);
|
||||
|
||||
public static abstract TSync OpenArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
|
||||
public static abstract TSync OpenArchive(Stream stream, ReaderOptions? readerOptions = null);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
}
|
||||
|
||||
#endif
|
||||
48
src/SharpCompress/Archives/IAsyncArchive.cs
Normal file
48
src/SharpCompress/Archives/IAsyncArchive.cs
Normal file
@@ -0,0 +1,48 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IAsyncArchive : IAsyncDisposable
|
||||
{
|
||||
IAsyncEnumerable<IArchiveEntry> EntriesAsync { get; }
|
||||
IAsyncEnumerable<IVolume> VolumesAsync { get; }
|
||||
|
||||
ArchiveType Type { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Use this method to extract all entries in an archive in order.
|
||||
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
|
||||
/// extracted sequentially for the best performance.
|
||||
/// </summary>
|
||||
ValueTask<IAsyncReader> ExtractAllEntriesAsync();
|
||||
|
||||
/// <summary>
|
||||
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
|
||||
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
|
||||
/// </summary>
|
||||
ValueTask<bool> IsSolidAsync();
|
||||
|
||||
/// <summary>
|
||||
/// This checks to see if all the known entries have IsComplete = true
|
||||
/// </summary>
|
||||
ValueTask<bool> IsCompleteAsync();
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files compressed in the archive.
|
||||
/// </summary>
|
||||
ValueTask<long> TotalSizeAsync();
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
ValueTask<long> TotalUncompressedSizeAsync();
|
||||
|
||||
/// <summary>
|
||||
/// Returns whether the archive is encrypted.
|
||||
/// </summary>
|
||||
ValueTask<bool> IsEncryptedAsync();
|
||||
}
|
||||
92
src/SharpCompress/Archives/IAsyncArchiveExtensions.cs
Normal file
92
src/SharpCompress/Archives/IAsyncArchiveExtensions.cs
Normal file
@@ -0,0 +1,92 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class IAsyncArchiveExtensions
|
||||
{
|
||||
extension(IAsyncArchive archive)
|
||||
{
|
||||
/// <summary>
|
||||
/// Extract to specific directory asynchronously with progress reporting and cancellation support
|
||||
/// </summary>
|
||||
/// <param name="archive">The archive to extract.</param>
|
||||
/// <param name="destinationDirectory">The folder to extract into.</param>
|
||||
/// <param name="options">Extraction options.</param>
|
||||
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
|
||||
/// <param name="cancellationToken">Optional cancellation token.</param>
|
||||
public async ValueTask WriteToDirectoryAsync(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
IProgress<ProgressReport>? progress = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (await archive.IsSolidAsync() || archive.Type == ArchiveType.SevenZip)
|
||||
{
|
||||
await using var reader = await archive.ExtractAllEntriesAsync();
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
destinationDirectory,
|
||||
options,
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
await archive.WriteToDirectoryAsyncInternal(
|
||||
destinationDirectory,
|
||||
options,
|
||||
progress,
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async ValueTask WriteToDirectoryAsyncInternal(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
IProgress<ProgressReport>? progress,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var totalBytes = await archive.TotalUncompressedSizeAsync();
|
||||
var bytesRead = 0L;
|
||||
var seenDirectories = new HashSet<string>();
|
||||
|
||||
await foreach (var entry in archive.EntriesAsync.WithCancellation(cancellationToken))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
var dirPath = Path.Combine(
|
||||
destinationDirectory,
|
||||
entry.Key.NotNull("Entry Key is null")
|
||||
);
|
||||
if (
|
||||
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
|
||||
&& seenDirectories.Add(dirPath)
|
||||
)
|
||||
{
|
||||
Directory.CreateDirectory(parentDirectory);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
await entry
|
||||
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
bytesRead += entry.Size;
|
||||
progress?.Report(
|
||||
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
@@ -25,12 +26,34 @@ public interface IMultiArchiveFactory : IFactory
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
|
||||
IArchive OpenArchive(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Opens a multi-part archive from streams asynchronously.
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable Stream objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
|
||||
IArchive OpenArchive(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Opens a multi-part archive from files asynchronously.
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
IAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
}
|
||||
|
||||
35
src/SharpCompress/Archives/IMultiArchiveOpenable.cs
Normal file
35
src/SharpCompress/Archives/IMultiArchiveOpenable.cs
Normal file
@@ -0,0 +1,35 @@
|
||||
#if NET8_0_OR_GREATER
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IMultiArchiveOpenable<TSync, TASync>
|
||||
where TSync : IArchive
|
||||
where TASync : IAsyncArchive
|
||||
{
|
||||
public static abstract TSync OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
|
||||
public static abstract TSync OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
}
|
||||
#endif
|
||||
@@ -1,13 +1,22 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IWritableArchive : IArchive
|
||||
public interface IWritableArchiveCommon
|
||||
{
|
||||
void RemoveEntry(IArchiveEntry entry);
|
||||
/// <summary>
|
||||
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
|
||||
/// </summary>
|
||||
/// <returns>IDisposeable to resume entry rebuilding</returns>
|
||||
IDisposable PauseEntryRebuilding();
|
||||
}
|
||||
|
||||
public interface IWritableArchive : IArchive, IWritableArchiveCommon
|
||||
{
|
||||
IArchiveEntry AddEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
@@ -16,11 +25,53 @@ public interface IWritableArchive : IArchive
|
||||
DateTime? modified = null
|
||||
);
|
||||
|
||||
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
|
||||
|
||||
/// <summary>
|
||||
/// Saves the archive to the specified stream using the given writer options.
|
||||
/// </summary>
|
||||
void SaveTo(Stream stream, WriterOptions options);
|
||||
|
||||
/// <summary>
|
||||
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
|
||||
/// Removes the specified entry from the archive.
|
||||
/// </summary>
|
||||
/// <returns>IDisposeable to resume entry rebuilding</returns>
|
||||
IDisposable PauseEntryRebuilding();
|
||||
void RemoveEntry(IArchiveEntry entry);
|
||||
}
|
||||
|
||||
public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
|
||||
{
|
||||
/// <summary>
|
||||
/// Asynchronously saves the archive to the specified stream using the given writer options.
|
||||
/// </summary>
|
||||
ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Asynchronously adds an entry to the archive with the specified key, source stream, and options.
|
||||
/// </summary>
|
||||
ValueTask<IArchiveEntry> AddEntryAsync(
|
||||
string key,
|
||||
Stream source,
|
||||
bool closeStream,
|
||||
long size = 0,
|
||||
DateTime? modified = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Asynchronously adds a directory entry to the archive with the specified key and modification time.
|
||||
/// </summary>
|
||||
ValueTask<IArchiveEntry> AddDirectoryEntryAsync(
|
||||
string key,
|
||||
DateTime? modified = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Removes the specified entry from the archive.
|
||||
/// </summary>
|
||||
ValueTask RemoveEntryAsync(IArchiveEntry entry);
|
||||
}
|
||||
|
||||
@@ -1,86 +1,70 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class IWritableArchiveExtensions
|
||||
{
|
||||
public static void AddEntry(
|
||||
this IWritableArchive writableArchive,
|
||||
string entryPath,
|
||||
string filePath
|
||||
)
|
||||
extension(IWritableArchive writableArchive)
|
||||
{
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
if (!fileInfo.Exists)
|
||||
public void AddAllFromDirectory(
|
||||
string filePath,
|
||||
string searchPattern = "*.*",
|
||||
SearchOption searchOption = SearchOption.AllDirectories
|
||||
)
|
||||
{
|
||||
throw new FileNotFoundException("Could not AddEntry: " + filePath);
|
||||
}
|
||||
writableArchive.AddEntry(
|
||||
entryPath,
|
||||
new FileInfo(filePath).OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
|
||||
public static void SaveTo(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
WriterOptions options
|
||||
) => writableArchive.SaveTo(new FileInfo(filePath), options);
|
||||
|
||||
public static void SaveTo(
|
||||
this IWritableArchive writableArchive,
|
||||
FileInfo fileInfo,
|
||||
WriterOptions options
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
writableArchive.SaveTo(stream, options);
|
||||
}
|
||||
|
||||
public static void AddAllFromDirectory(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
string searchPattern = "*.*",
|
||||
SearchOption searchOption = SearchOption.AllDirectories
|
||||
)
|
||||
{
|
||||
using (writableArchive.PauseEntryRebuilding())
|
||||
{
|
||||
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
|
||||
using (writableArchive.PauseEntryRebuilding())
|
||||
{
|
||||
var fileInfo = new FileInfo(path);
|
||||
writableArchive.AddEntry(
|
||||
path.Substring(filePath.Length),
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
foreach (
|
||||
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
|
||||
)
|
||||
{
|
||||
var fileInfo = new FileInfo(path);
|
||||
writableArchive.AddEntry(
|
||||
path.Substring(filePath.Length),
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static IArchiveEntry AddEntry(
|
||||
this IWritableArchive writableArchive,
|
||||
string key,
|
||||
FileInfo fileInfo
|
||||
)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
public IArchiveEntry AddEntry(string key, string file) =>
|
||||
writableArchive.AddEntry(key, new FileInfo(file));
|
||||
|
||||
public IArchiveEntry AddEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
long size = 0,
|
||||
DateTime? modified = null
|
||||
) => writableArchive.AddEntry(key, source, false, size, modified);
|
||||
|
||||
public IArchiveEntry AddEntry(string key, FileInfo fileInfo)
|
||||
{
|
||||
throw new ArgumentException("FileInfo does not exist.");
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
throw new ArgumentException("FileInfo does not exist.");
|
||||
}
|
||||
return writableArchive.AddEntry(
|
||||
key,
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
|
||||
public void SaveTo(string filePath, WriterOptions? options = null) =>
|
||||
writableArchive.SaveTo(new FileInfo(filePath), options ?? new(CompressionType.Deflate));
|
||||
|
||||
public void SaveTo(FileInfo fileInfo, WriterOptions? options = null)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
writableArchive.SaveTo(stream, options ?? new(CompressionType.Deflate));
|
||||
}
|
||||
return writableArchive.AddEntry(
|
||||
key,
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
10
src/SharpCompress/Archives/IWritableArchiveOpenable.cs
Normal file
10
src/SharpCompress/Archives/IWritableArchiveOpenable.cs
Normal file
@@ -0,0 +1,10 @@
|
||||
#if NET8_0_OR_GREATER
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IWritableArchiveOpenable
|
||||
: IArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
{
|
||||
public static abstract IWritableArchive CreateArchive();
|
||||
public static abstract IWritableAsyncArchive CreateAsyncArchive();
|
||||
}
|
||||
#endif
|
||||
@@ -0,0 +1,86 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class IWritableAsyncArchiveExtensions
|
||||
{
|
||||
extension(IWritableAsyncArchive writableArchive)
|
||||
{
|
||||
public async ValueTask AddAllFromDirectoryAsync(
|
||||
string filePath,
|
||||
string searchPattern = "*.*",
|
||||
SearchOption searchOption = SearchOption.AllDirectories
|
||||
)
|
||||
{
|
||||
using (writableArchive.PauseEntryRebuilding())
|
||||
{
|
||||
foreach (
|
||||
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
|
||||
)
|
||||
{
|
||||
var fileInfo = new FileInfo(path);
|
||||
await writableArchive.AddEntryAsync(
|
||||
path.Substring(filePath.Length),
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public ValueTask<IArchiveEntry> AddEntryAsync(string key, string file) =>
|
||||
writableArchive.AddEntryAsync(key, new FileInfo(file));
|
||||
|
||||
public ValueTask<IArchiveEntry> AddEntryAsync(
|
||||
string key,
|
||||
Stream source,
|
||||
long size = 0,
|
||||
DateTime? modified = null
|
||||
) => writableArchive.AddEntryAsync(key, source, false, size, modified);
|
||||
|
||||
public ValueTask<IArchiveEntry> AddEntryAsync(string key, FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
throw new ArgumentException("FileInfo does not exist.");
|
||||
}
|
||||
return writableArchive.AddEntryAsync(
|
||||
key,
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
|
||||
public ValueTask SaveToAsync(
|
||||
string filePath,
|
||||
WriterOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
writableArchive.SaveToAsync(
|
||||
new FileInfo(filePath),
|
||||
options ?? new(CompressionType.Deflate),
|
||||
cancellationToken
|
||||
);
|
||||
|
||||
public async ValueTask SaveToAsync(
|
||||
FileInfo fileInfo,
|
||||
WriterOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await writableArchive
|
||||
.SaveToAsync(stream, options ?? new(CompressionType.Deflate), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,5 +16,5 @@ public interface IWriteableArchiveFactory : Factories.IFactory
|
||||
/// Creates a new, empty archive, ready to be written.
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
IWritableArchive CreateWriteableArchive();
|
||||
IWritableArchive CreateArchive();
|
||||
}
|
||||
|
||||
@@ -36,4 +36,7 @@ internal class FileInfoRarArchiveVolume : RarVolume
|
||||
new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => FileParts;
|
||||
|
||||
internal override IAsyncEnumerable<RarFilePart> ReadFilePartsAsync() =>
|
||||
FileParts.ToAsyncEnumerable();
|
||||
}
|
||||
|
||||
53
src/SharpCompress/Archives/Rar/RarArchive.Async.cs
Normal file
53
src/SharpCompress/Archives/Rar/RarArchive.Async.cs
Normal file
@@ -0,0 +1,53 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Archives.Rar;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.Rar;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public partial class RarArchive
|
||||
{
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
if (!_disposed)
|
||||
{
|
||||
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
|
||||
{
|
||||
unpackV1.Dispose();
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
await base.DisposeAsync();
|
||||
}
|
||||
}
|
||||
|
||||
protected override async ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
|
||||
{
|
||||
if (await this.IsMultipartVolumeAsync())
|
||||
{
|
||||
var streams = await VolumesAsync
|
||||
.Select(volume =>
|
||||
{
|
||||
volume.Stream.Position = 0;
|
||||
return volume.Stream;
|
||||
})
|
||||
.ToListAsync();
|
||||
return (RarReader)RarReader.OpenReader(streams, ReaderOptions);
|
||||
}
|
||||
|
||||
var stream = (await VolumesAsync.FirstAsync()).Stream;
|
||||
stream.Position = 0;
|
||||
return (RarReader)RarReader.OpenReader(stream, ReaderOptions);
|
||||
}
|
||||
|
||||
public override async ValueTask<bool> IsSolidAsync() =>
|
||||
await (await VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsSolidArchiveAsync();
|
||||
}
|
||||
@@ -1,18 +1,36 @@
|
||||
using System.Linq;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Rar;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public static class RarArchiveExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
|
||||
/// </summary>
|
||||
public static bool IsFirstVolume(this RarArchive archive) =>
|
||||
archive.Volumes.First().IsFirstVolume;
|
||||
extension(IRarArchive archive)
|
||||
{
|
||||
/// <summary>
|
||||
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
|
||||
/// </summary>
|
||||
public bool IsFirstVolume() => archive.Volumes.Cast<RarVolume>().First().IsFirstVolume;
|
||||
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public static bool IsMultipartVolume(this RarArchive archive) =>
|
||||
archive.Volumes.First().IsMultiVolume;
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public bool IsMultipartVolume() => archive.Volumes.Cast<RarVolume>().First().IsMultiVolume;
|
||||
}
|
||||
|
||||
extension(IRarAsyncArchive archive)
|
||||
{
|
||||
/// <summary>
|
||||
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
|
||||
/// </summary>
|
||||
public async ValueTask<bool> IsFirstVolumeAsync() =>
|
||||
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsFirstVolume;
|
||||
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public async ValueTask<bool> IsMultipartVolumeAsync() =>
|
||||
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsMultiVolume;
|
||||
}
|
||||
}
|
||||
|
||||
187
src/SharpCompress/Archives/Rar/RarArchive.Factory.cs
Normal file
187
src/SharpCompress/Archives/Rar/RarArchive.Factory.cs
Normal file
@@ -0,0 +1,187 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.Compressors.Rar;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.Rar;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public partial class RarArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IArchiveOpenable<IRarArchive, IRarAsyncArchive>,
|
||||
IMultiArchiveOpenable<IRarArchive, IRarAsyncArchive>
|
||||
#endif
|
||||
{
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
path.NotNullOrEmpty(nameof(path));
|
||||
return (IRarAsyncArchive)OpenArchive(new FileInfo(path), readerOptions);
|
||||
}
|
||||
|
||||
public static IRarArchive OpenArchive(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
options ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IRarArchive OpenArchive(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
options ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IRarArchive OpenArchive(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
public static IRarArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IRarArchive OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsRarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsRarFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
try
|
||||
{
|
||||
MarkHeader.Read(stream, true, false);
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static async ValueTask<bool> IsRarFileAsync(
|
||||
Stream stream,
|
||||
ReaderOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
try
|
||||
{
|
||||
await MarkHeader
|
||||
.ReadAsync(stream, true, false, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
@@ -12,17 +14,26 @@ using SharpCompress.Readers.Rar;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
public interface IRarArchiveCommon
|
||||
{
|
||||
int MinVersion { get; }
|
||||
int MaxVersion { get; }
|
||||
}
|
||||
|
||||
public interface IRarArchive : IArchive, IRarArchiveCommon { }
|
||||
|
||||
public interface IRarAsyncArchive : IAsyncArchive, IRarArchiveCommon { }
|
||||
|
||||
public partial class RarArchive
|
||||
: AbstractArchive<RarArchiveEntry, RarVolume>,
|
||||
IRarArchive,
|
||||
IRarAsyncArchive
|
||||
{
|
||||
private bool _disposed;
|
||||
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
|
||||
new(() => new Compressors.Rar.UnpackV2017.Unpack());
|
||||
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private RarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Rar, sourceStream) { }
|
||||
|
||||
@@ -43,12 +54,17 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
|
||||
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
|
||||
|
||||
// Simple async property - kept in original file
|
||||
protected override IAsyncEnumerable<RarArchiveEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<RarVolume> volumes
|
||||
) => RarArchiveEntryFactory.GetEntriesAsync(this, volumes, ReaderOptions);
|
||||
|
||||
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
sourceStream.LoadAllParts(); //request all streams
|
||||
sourceStream.LoadAllParts();
|
||||
var streams = sourceStream.Streams.ToArray();
|
||||
var i = 0;
|
||||
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
|
||||
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions))
|
||||
{
|
||||
sourceStream.IsVolumes = true;
|
||||
streams[1].Position = 0;
|
||||
@@ -61,7 +77,6 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
));
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
|
||||
}
|
||||
|
||||
@@ -74,135 +89,19 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
volume.Stream.Position = 0;
|
||||
return volume.Stream;
|
||||
});
|
||||
return RarReader.Open(streams, ReaderOptions);
|
||||
return (RarReader)RarReader.OpenReader(streams, ReaderOptions);
|
||||
}
|
||||
|
||||
var stream = Volumes.First().Stream;
|
||||
stream.Position = 0;
|
||||
return RarReader.Open(stream, ReaderOptions);
|
||||
return (RarReader)RarReader.OpenReader(stream, ReaderOptions);
|
||||
}
|
||||
|
||||
public override bool IsSolid => Volumes.First().IsSolidArchive;
|
||||
|
||||
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
|
||||
|
||||
public virtual int MinVersion => Volumes.First().MinVersion;
|
||||
|
||||
public virtual int MaxVersion => Volumes.First().MaxVersion;
|
||||
|
||||
#region Creation
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
options ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
options ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static RarArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsRarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsRarFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
try
|
||||
{
|
||||
MarkHeader.Read(stream, true, false);
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
43
src/SharpCompress/Archives/Rar/RarArchiveEntry.Async.cs
Normal file
43
src/SharpCompress/Archives/Rar/RarArchiveEntry.Async.cs
Normal file
@@ -0,0 +1,43 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.Compressors.Rar;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public partial class RarArchiveEntry
|
||||
{
|
||||
public async ValueTask<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
RarStream stream;
|
||||
if (IsRarV3)
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV1.Value,
|
||||
FileHeader,
|
||||
await MultiVolumeReadOnlyAsyncStream.Create(
|
||||
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
|
||||
)
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
await MultiVolumeReadOnlyAsyncStream.Create(
|
||||
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
await stream.InitializeAsync(cancellationToken);
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
@@ -10,7 +12,7 @@ using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
public partial class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
{
|
||||
private readonly ICollection<RarFilePart> parts;
|
||||
private readonly RarArchive archive;
|
||||
@@ -68,20 +70,26 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
|
||||
public Stream OpenEntryStream()
|
||||
{
|
||||
RarStream stream;
|
||||
if (IsRarV3)
|
||||
{
|
||||
return new RarStream(
|
||||
stream = new RarStream(
|
||||
archive.UnpackV1.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
|
||||
);
|
||||
}
|
||||
|
||||
return new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
|
||||
);
|
||||
stream.Initialize();
|
||||
return stream;
|
||||
}
|
||||
|
||||
public bool IsComplete
|
||||
|
||||
@@ -17,6 +17,19 @@ internal static class RarArchiveEntryFactory
|
||||
}
|
||||
}
|
||||
|
||||
private static async IAsyncEnumerable<RarFilePart> GetFilePartsAsync(
|
||||
IAsyncEnumerable<RarVolume> parts
|
||||
)
|
||||
{
|
||||
await foreach (var rarPart in parts)
|
||||
{
|
||||
await foreach (var fp in rarPart.ReadFilePartsAsync())
|
||||
{
|
||||
yield return fp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<IEnumerable<RarFilePart>> GetMatchedFileParts(
|
||||
IEnumerable<RarVolume> parts
|
||||
)
|
||||
@@ -38,6 +51,27 @@ internal static class RarArchiveEntryFactory
|
||||
}
|
||||
}
|
||||
|
||||
private static async IAsyncEnumerable<IEnumerable<RarFilePart>> GetMatchedFilePartsAsync(
|
||||
IAsyncEnumerable<RarVolume> parts
|
||||
)
|
||||
{
|
||||
var groupedParts = new List<RarFilePart>();
|
||||
await foreach (var fp in GetFilePartsAsync(parts))
|
||||
{
|
||||
groupedParts.Add(fp);
|
||||
|
||||
if (!fp.FileHeader.IsSplitAfter)
|
||||
{
|
||||
yield return groupedParts;
|
||||
groupedParts = new List<RarFilePart>();
|
||||
}
|
||||
}
|
||||
if (groupedParts.Count > 0)
|
||||
{
|
||||
yield return groupedParts;
|
||||
}
|
||||
}
|
||||
|
||||
internal static IEnumerable<RarArchiveEntry> GetEntries(
|
||||
RarArchive archive,
|
||||
IEnumerable<RarVolume> rarParts,
|
||||
@@ -49,4 +83,16 @@ internal static class RarArchiveEntryFactory
|
||||
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
|
||||
}
|
||||
}
|
||||
|
||||
internal static async IAsyncEnumerable<RarArchiveEntry> GetEntriesAsync(
|
||||
RarArchive archive,
|
||||
IAsyncEnumerable<RarVolume> rarParts,
|
||||
ReaderOptions readerOptions
|
||||
)
|
||||
{
|
||||
await foreach (var groupedParts in GetMatchedFilePartsAsync(rarParts))
|
||||
{
|
||||
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ internal static class RarArchiveVolumeFactory
|
||||
//new style rar - ..part1 | /part01 | part001 ....
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
{
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
@@ -23,11 +24,13 @@ internal static class RarArchiveVolumeFactory
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
//old style - ...rar, .r00, .r01 ...
|
||||
m = Regex.Match(part1.Name, @"^(.*\.)([r-z{])(ar|[0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
{
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
@@ -40,12 +43,17 @@ internal static class RarArchiveVolumeFactory
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
else //split .001, .002 ....
|
||||
{
|
||||
return ArchiveVolumeFactory.GetFilePart(index, part1);
|
||||
}
|
||||
}
|
||||
|
||||
if (item != null && item.Exists)
|
||||
{
|
||||
return item;
|
||||
}
|
||||
|
||||
return null; //no more items
|
||||
}
|
||||
|
||||
@@ -14,6 +14,9 @@ internal class StreamRarArchiveVolume : RarVolume
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
|
||||
|
||||
internal override IAsyncEnumerable<RarFilePart> ReadFilePartsAsync() =>
|
||||
GetVolumeFilePartsAsync();
|
||||
|
||||
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
|
||||
new SeekableFilePart(markHeader, fileHeader, Index, Stream, ReaderOptions.Password);
|
||||
}
|
||||
|
||||
73
src/SharpCompress/Archives/SevenZip/SevenZipArchive.Async.cs
Normal file
73
src/SharpCompress/Archives/SevenZip/SevenZipArchive.Async.cs
Normal file
@@ -0,0 +1,73 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.SevenZip;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public partial class SevenZipArchive
|
||||
{
|
||||
private async ValueTask LoadFactoryAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (_database is null)
|
||||
{
|
||||
stream.Position = 0;
|
||||
var reader = new ArchiveReader();
|
||||
await reader.OpenAsync(
|
||||
stream,
|
||||
lookForHeader: ReaderOptions.LookForHeader,
|
||||
cancellationToken
|
||||
);
|
||||
_database = await reader.ReadDatabaseAsync(
|
||||
new PasswordProvider(ReaderOptions.Password),
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
protected override async IAsyncEnumerable<SevenZipArchiveEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<SevenZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = (await volumes.SingleAsync()).Stream;
|
||||
await LoadFactoryAsync(stream);
|
||||
if (_database is null)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
var entries = new SevenZipArchiveEntry[_database._files.Count];
|
||||
for (var i = 0; i < _database._files.Count; i++)
|
||||
{
|
||||
var file = _database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(
|
||||
this,
|
||||
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
{
|
||||
var isSolid = false;
|
||||
foreach (var entry in group)
|
||||
{
|
||||
entry.IsSolid = isSolid;
|
||||
isSolid = true;
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
|
||||
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
|
||||
new(new SevenZipReader(ReaderOptions, this));
|
||||
}
|
||||
210
src/SharpCompress/Archives/SevenZip/SevenZipArchive.Factory.cs
Normal file
210
src/SharpCompress/Archives/SevenZip/SevenZipArchive.Factory.cs
Normal file
@@ -0,0 +1,210 @@
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public partial class SevenZipArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IArchiveOpenable<IArchive, IAsyncArchive>,
|
||||
IMultiArchiveOpenable<IArchive, IAsyncArchive>
|
||||
#endif
|
||||
{
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
path.NotNullOrEmpty("path");
|
||||
return (IAsyncArchive)OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
public static IArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty("filePath");
|
||||
return OpenArchive(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
public static IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull("fileInfo");
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IArchive OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull("stream");
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsSevenZipFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsSevenZipFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
return SignatureMatch(stream);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static async ValueTask<bool> IsSevenZipFileAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
try
|
||||
{
|
||||
return await SignatureMatchAsync(stream, cancellationToken);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlySpan<byte> Signature => [(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C];
|
||||
|
||||
private static bool SignatureMatch(Stream stream)
|
||||
{
|
||||
var buffer = ArrayPool<byte>.Shared.Rent(6);
|
||||
try
|
||||
{
|
||||
stream.ReadExact(buffer, 0, 6);
|
||||
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
|
||||
}
|
||||
finally
|
||||
{
|
||||
ArrayPool<byte>.Shared.Return(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
private static async ValueTask<bool> SignatureMatchAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var buffer = ArrayPool<byte>.Shared.Rent(6);
|
||||
try
|
||||
{
|
||||
if (!await stream.ReadFullyAsync(buffer, 0, 6, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
|
||||
}
|
||||
finally
|
||||
{
|
||||
ArrayPool<byte>.Shared.Return(buffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.SevenZip;
|
||||
using SharpCompress.Compressors.LZMA.Utilites;
|
||||
@@ -10,99 +12,10 @@ using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
|
||||
public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
|
||||
{
|
||||
private ArchiveDatabase? _database;
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty("filePath");
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull("fileInfo");
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull("stream");
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
@@ -116,18 +29,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsSevenZipFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsSevenZipFile(stream);
|
||||
}
|
||||
|
||||
internal SevenZipArchive()
|
||||
: base(ArchiveType.SevenZip) { }
|
||||
|
||||
@@ -135,32 +36,45 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
IEnumerable<SevenZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
LoadFactory(stream);
|
||||
if (_database is null)
|
||||
foreach (var volume in volumes)
|
||||
{
|
||||
return Enumerable.Empty<SevenZipArchiveEntry>();
|
||||
}
|
||||
var entries = new SevenZipArchiveEntry[_database._files.Count];
|
||||
for (var i = 0; i < _database._files.Count; i++)
|
||||
{
|
||||
var file = _database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(
|
||||
this,
|
||||
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
{
|
||||
var isSolid = false;
|
||||
foreach (var entry in group)
|
||||
LoadFactory(volume.Stream);
|
||||
if (_database is null)
|
||||
{
|
||||
entry.IsSolid = isSolid;
|
||||
isSolid = true; //mark others in this group as solid - same as rar behaviour.
|
||||
yield break;
|
||||
}
|
||||
var entries = new SevenZipArchiveEntry[_database._files.Count];
|
||||
for (var i = 0; i < _database._files.Count; i++)
|
||||
{
|
||||
var file = _database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(
|
||||
this,
|
||||
new SevenZipFilePart(
|
||||
volume.Stream,
|
||||
_database,
|
||||
i,
|
||||
file,
|
||||
ReaderOptions.ArchiveEncoding
|
||||
)
|
||||
);
|
||||
}
|
||||
foreach (
|
||||
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
|
||||
)
|
||||
{
|
||||
var isSolid = false;
|
||||
foreach (var entry in group)
|
||||
{
|
||||
entry.IsSolid = isSolid;
|
||||
isSolid = true;
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
private void LoadFactory(Stream stream)
|
||||
@@ -174,28 +88,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
return SignatureMatch(stream);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlySpan<byte> Signature =>
|
||||
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
|
||||
|
||||
private static bool SignatureMatch(Stream stream)
|
||||
{
|
||||
var reader = new BinaryReader(stream);
|
||||
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
|
||||
return signatureBytes.SequenceEqual(Signature);
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction() =>
|
||||
new SevenZipReader(ReaderOptions, this);
|
||||
|
||||
@@ -205,18 +97,39 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
.GroupBy(x => x.FilePart.Folder)
|
||||
.Any(folder => folder.Count() > 1);
|
||||
|
||||
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
|
||||
|
||||
public override long TotalSize =>
|
||||
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
|
||||
|
||||
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
|
||||
internal sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
|
||||
{
|
||||
private readonly SevenZipArchive _archive;
|
||||
private SevenZipEntry? _currentEntry;
|
||||
private Stream? _currentFolderStream;
|
||||
private CFolder? _currentFolder;
|
||||
private Stream? _currentStream;
|
||||
private CFileItem? _currentItem;
|
||||
|
||||
/// <summary>
|
||||
/// Enables internal diagnostics for tests.
|
||||
/// When disabled (default), diagnostics properties return null to avoid exposing internal state.
|
||||
/// </summary>
|
||||
internal bool DiagnosticsEnabled { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Current folder instance used to decide whether the solid folder stream should be reused.
|
||||
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
|
||||
/// </summary>
|
||||
internal object? DiagnosticsCurrentFolder => DiagnosticsEnabled ? _currentFolder : null;
|
||||
|
||||
/// <summary>
|
||||
/// Current shared folder stream instance.
|
||||
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
|
||||
/// </summary>
|
||||
internal Stream? DiagnosticsCurrentFolderStream =>
|
||||
DiagnosticsEnabled ? _currentFolderStream : null;
|
||||
|
||||
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
|
||||
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
|
||||
: base(readerOptions, ArchiveType.SevenZip, false) => this._archive = archive;
|
||||
|
||||
public override SevenZipVolume Volume => _archive.Volumes.Single();
|
||||
|
||||
@@ -226,40 +139,164 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
stream.Position = 0;
|
||||
foreach (var dir in entries.Where(x => x.IsDirectory))
|
||||
{
|
||||
_currentEntry = dir;
|
||||
yield return dir;
|
||||
}
|
||||
foreach (
|
||||
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
|
||||
)
|
||||
// For solid archives (entries in the same folder share a compressed stream),
|
||||
// we must iterate entries sequentially and maintain the folder stream state
|
||||
// across entries in the same folder to avoid recreating the decompression
|
||||
// stream for each file, which breaks contiguous streaming.
|
||||
foreach (var entry in entries.Where(x => !x.IsDirectory))
|
||||
{
|
||||
_currentFolder = group.Key;
|
||||
if (group.Key is null)
|
||||
{
|
||||
_currentStream = Stream.Null;
|
||||
}
|
||||
else
|
||||
{
|
||||
_currentStream = _archive._database?.GetFolderStream(
|
||||
stream,
|
||||
_currentFolder,
|
||||
new PasswordProvider(Options.Password)
|
||||
);
|
||||
}
|
||||
foreach (var entry in group)
|
||||
{
|
||||
_currentItem = entry.FilePart.Header;
|
||||
yield return entry;
|
||||
}
|
||||
_currentEntry = entry;
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
|
||||
protected override EntryStream GetEntryStream() =>
|
||||
CreateEntryStream(
|
||||
new ReadOnlySubStream(
|
||||
_currentStream.NotNull("currentStream is not null"),
|
||||
_currentItem?.Size ?? 0
|
||||
)
|
||||
protected override EntryStream GetEntryStream(bool useSyncOverAsyncDispose)
|
||||
{
|
||||
var entry = _currentEntry.NotNull("currentEntry is not null");
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
return CreateEntryStream(Stream.Null, false);
|
||||
}
|
||||
|
||||
var folder = entry.FilePart.Folder;
|
||||
|
||||
// Check if we're starting a new folder - dispose old folder stream if needed
|
||||
if (folder != _currentFolder)
|
||||
{
|
||||
_currentFolderStream?.Dispose();
|
||||
_currentFolderStream = null;
|
||||
_currentFolder = folder;
|
||||
}
|
||||
|
||||
// Create the folder stream once per folder
|
||||
if (_currentFolderStream is null)
|
||||
{
|
||||
_currentFolderStream = _archive._database!.GetFolderStream(
|
||||
_archive.Volumes.Single().Stream,
|
||||
folder!,
|
||||
_archive._database.PasswordProvider
|
||||
);
|
||||
}
|
||||
|
||||
// Wrap with SyncOnlyStream to work around LZMA async bugs
|
||||
// Return a ReadOnlySubStream that reads from the shared folder stream
|
||||
return CreateEntryStream(
|
||||
new SyncOnlyStream(
|
||||
new ReadOnlySubStream(_currentFolderStream, entry.Size, leaveOpen: true)
|
||||
),
|
||||
useSyncOverAsyncDispose
|
||||
);
|
||||
}
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
_currentFolderStream?.Dispose();
|
||||
_currentFolderStream = null;
|
||||
base.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// WORKAROUND: Forces async operations to use synchronous equivalents.
|
||||
/// This is necessary because the LZMA decoder has bugs in its async implementation
|
||||
/// that cause state corruption (IndexOutOfRangeException, DataErrorException).
|
||||
///
|
||||
/// The proper fix would be to repair the LZMA decoder's async methods
|
||||
/// (LzmaStream.ReadAsync, Decoder.CodeAsync, OutWindow async operations),
|
||||
/// but that requires deep changes to the decoder state machine.
|
||||
/// </summary>
|
||||
private sealed class SyncOnlyStream : Stream
|
||||
{
|
||||
private readonly Stream _baseStream;
|
||||
|
||||
public SyncOnlyStream(Stream baseStream) => _baseStream = baseStream;
|
||||
|
||||
public override bool CanRead => _baseStream.CanRead;
|
||||
public override bool CanSeek => _baseStream.CanSeek;
|
||||
public override bool CanWrite => _baseStream.CanWrite;
|
||||
public override long Length => _baseStream.Length;
|
||||
public override long Position
|
||||
{
|
||||
get => _baseStream.Position;
|
||||
set => _baseStream.Position = value;
|
||||
}
|
||||
|
||||
public override void Flush() => _baseStream.Flush();
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count) =>
|
||||
_baseStream.Read(buffer, offset, count);
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin) =>
|
||||
_baseStream.Seek(offset, origin);
|
||||
|
||||
public override void SetLength(long value) => _baseStream.SetLength(value);
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) =>
|
||||
_baseStream.Write(buffer, offset, count);
|
||||
|
||||
// Force async operations to use sync equivalents to avoid LZMA decoder bugs
|
||||
public override Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return Task.FromResult(_baseStream.Read(buffer, offset, count));
|
||||
}
|
||||
|
||||
public override Task WriteAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
_baseStream.Write(buffer, offset, count);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public override Task FlushAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
_baseStream.Flush();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
#if !LEGACY_DOTNET
|
||||
public override ValueTask<int> ReadAsync(
|
||||
Memory<byte> buffer,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return new ValueTask<int>(_baseStream.Read(buffer.Span));
|
||||
}
|
||||
|
||||
public override ValueTask WriteAsync(
|
||||
ReadOnlyMemory<byte> buffer,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
_baseStream.Write(buffer.Span);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
#endif
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (disposing)
|
||||
{
|
||||
_baseStream.Dispose();
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
}
|
||||
|
||||
private class PasswordProvider : IPasswordProvider
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.SevenZip;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
@@ -10,6 +12,10 @@ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
|
||||
|
||||
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
|
||||
|
||||
public async ValueTask<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => (await FilePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
|
||||
|
||||
public IArchive Archive { get; }
|
||||
|
||||
public bool IsComplete => true;
|
||||
|
||||
161
src/SharpCompress/Archives/Tar/TarArchive.Async.cs
Normal file
161
src/SharpCompress/Archives/Tar/TarArchive.Async.cs
Normal file
@@ -0,0 +1,161 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.Tar;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.Tar;
|
||||
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public partial class TarArchive
|
||||
{
|
||||
protected override async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IAsyncEnumerable<TarArchiveEntry> oldEntries,
|
||||
IEnumerable<TarArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
foreach (var entry in newEntries)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return new((IAsyncReader)TarReader.OpenReader(stream));
|
||||
}
|
||||
|
||||
protected override async IAsyncEnumerable<TarArchiveEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<TarVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = (await volumes.SingleAsync()).Stream;
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
stream.Position = 0;
|
||||
}
|
||||
|
||||
// Always use async header reading in LoadEntriesAsync for consistency
|
||||
{
|
||||
// Use async header reading for async-only streams
|
||||
TarHeader? previousHeader = null;
|
||||
await foreach (
|
||||
var header in TarHeaderFactory.ReadHeaderAsync(
|
||||
StreamingMode.Seekable,
|
||||
stream,
|
||||
ReaderOptions.ArchiveEncoding
|
||||
)
|
||||
)
|
||||
{
|
||||
if (header != null)
|
||||
{
|
||||
if (header.EntryType == EntryType.LongName)
|
||||
{
|
||||
previousHeader = header;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (previousHeader != null)
|
||||
{
|
||||
var entry = new TarArchiveEntry(
|
||||
this,
|
||||
new TarFilePart(previousHeader, stream),
|
||||
CompressionType.None
|
||||
);
|
||||
|
||||
var oldStreamPos = stream.Position;
|
||||
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
using var memoryStream = new MemoryStream();
|
||||
await entryStream.CopyToAsync(memoryStream);
|
||||
memoryStream.Position = 0;
|
||||
var bytes = memoryStream.ToArray();
|
||||
|
||||
header.Name = ReaderOptions
|
||||
.ArchiveEncoding.Decode(bytes)
|
||||
.TrimNulls();
|
||||
}
|
||||
|
||||
stream.Position = oldStreamPos;
|
||||
|
||||
previousHeader = null;
|
||||
}
|
||||
yield return new TarArchiveEntry(
|
||||
this,
|
||||
new TarFilePart(header, stream),
|
||||
CompressionType.None
|
||||
);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new IncompleteArchiveException("Failed to read TAR header");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
198
src/SharpCompress/Archives/Tar/TarArchive.Factory.cs
Normal file
198
src/SharpCompress/Archives/Tar/TarArchive.Factory.cs
Normal file
@@ -0,0 +1,198 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public partial class TarArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IWritableArchiveOpenable,
|
||||
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
#endif
|
||||
{
|
||||
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return OpenArchive(new FileInfo(filePath), readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new TarArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(new FileInfo(path), readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsTarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsTarFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsTarFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
var reader = new BinaryReader(stream, Encoding.UTF8, false);
|
||||
var readSucceeded = tarHeader.Read(reader);
|
||||
var isEmptyArchive =
|
||||
tarHeader.Name?.Length == 0
|
||||
&& tarHeader.Size == 0
|
||||
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
// Catch all exceptions during tar header reading to determine if this is a valid tar file
|
||||
// Invalid tar files or corrupted streams will throw various exceptions
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static async ValueTask<bool> IsTarFileAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
try
|
||||
{
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
var reader = new AsyncBinaryReader(stream, false);
|
||||
var readSucceeded = await tarHeader.ReadAsync(reader);
|
||||
var isEmptyArchive =
|
||||
tarHeader.Name?.Length == 0
|
||||
&& tarHeader.Size == 0
|
||||
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
// Catch all exceptions during tar header reading to determine if this is a valid tar file
|
||||
// Invalid tar files or corrupted streams will throw various exceptions
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static IWritableArchive CreateArchive() => new TarArchive();
|
||||
|
||||
public static IWritableAsyncArchive CreateAsyncArchive() => new TarArchive();
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
@@ -13,132 +15,14 @@ using SharpCompress.Writers.Tar;
|
||||
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
{
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new TarArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsTarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsTarFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsTarFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
|
||||
var isEmptyArchive =
|
||||
tarHeader.Name?.Length == 0
|
||||
&& tarHeader.Size == 0
|
||||
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
}
|
||||
catch { }
|
||||
return false;
|
||||
}
|
||||
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
|
||||
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts();
|
||||
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private TarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Tar, sourceStream) { }
|
||||
|
||||
@@ -148,6 +32,10 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
stream.Position = 0;
|
||||
}
|
||||
TarHeader? previousHeader = null;
|
||||
foreach (
|
||||
var header in TarHeaderFactory.ReadHeader(
|
||||
@@ -178,7 +66,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
using var memoryStream = new MemoryStream();
|
||||
entryStream.CopyTo(memoryStream);
|
||||
entryStream.CopyTo(memoryStream, Constants.BufferSize);
|
||||
memoryStream.Position = 0;
|
||||
var bytes = memoryStream.ToArray();
|
||||
|
||||
@@ -203,8 +91,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
}
|
||||
}
|
||||
|
||||
public static TarArchive Create() => new();
|
||||
|
||||
protected override TarArchiveEntry CreateEntryInternal(
|
||||
string filePath,
|
||||
Stream source,
|
||||
@@ -222,6 +108,11 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
closeStream
|
||||
);
|
||||
|
||||
protected override TarArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => new TarWritableArchiveEntry(this, directoryPath, modified);
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
@@ -230,15 +121,25 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size
|
||||
);
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
writer.WriteDirectory(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -246,6 +147,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return TarReader.Open(stream);
|
||||
return TarReader.OpenReader(stream);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
|
||||
@@ -12,6 +14,9 @@ public class TarArchiveEntry : TarEntry, IArchiveEntry
|
||||
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
|
||||
|
||||
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
|
||||
new(OpenEntryStream());
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
public IArchive Archive { get; }
|
||||
|
||||
@@ -9,7 +9,8 @@ namespace SharpCompress.Archives.Tar;
|
||||
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
|
||||
{
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
private readonly Stream? stream;
|
||||
private readonly bool isDirectory;
|
||||
|
||||
internal TarWritableArchiveEntry(
|
||||
TarArchive archive,
|
||||
@@ -27,6 +28,22 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
isDirectory = false;
|
||||
}
|
||||
|
||||
internal TarWritableArchiveEntry(
|
||||
TarArchive archive,
|
||||
string directoryPath,
|
||||
DateTime? lastModified
|
||||
)
|
||||
: base(archive, null, CompressionType.None)
|
||||
{
|
||||
stream = null;
|
||||
Key = directoryPath;
|
||||
Size = 0;
|
||||
LastModifiedTime = lastModified;
|
||||
closeStream = false;
|
||||
isDirectory = true;
|
||||
}
|
||||
|
||||
public override long Crc => 0;
|
||||
@@ -47,23 +64,27 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
public override bool IsDirectory => isDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
{
|
||||
if (stream is null)
|
||||
{
|
||||
return Stream.Null;
|
||||
}
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
return new NonDisposingStream(stream);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream)
|
||||
if (closeStream && stream is not null)
|
||||
{
|
||||
stream.Dispose();
|
||||
}
|
||||
|
||||
132
src/SharpCompress/Archives/Zip/ZipArchive.Async.cs
Normal file
132
src/SharpCompress/Archives/Zip/ZipArchive.Async.cs
Normal file
@@ -0,0 +1,132 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public partial class ZipArchive
|
||||
{
|
||||
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<ZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var vols = await volumes.ToListAsync();
|
||||
var volsArray = vols.ToArray();
|
||||
|
||||
await foreach (
|
||||
var h in headerFactory.NotNull().ReadSeekableHeaderAsync(volsArray.Last().Stream)
|
||||
)
|
||||
{
|
||||
if (h != null)
|
||||
{
|
||||
switch (h.ZipHeaderType)
|
||||
{
|
||||
case ZipHeaderType.DirectoryEntry:
|
||||
{
|
||||
var deh = (DirectoryEntryHeader)h;
|
||||
Stream s;
|
||||
if (
|
||||
deh.RelativeOffsetOfEntryHeader + deh.CompressedSize
|
||||
> volsArray[deh.DiskNumberStart].Stream.Length
|
||||
)
|
||||
{
|
||||
var v = volsArray.Skip(deh.DiskNumberStart).ToArray();
|
||||
s = new SourceStream(
|
||||
v[0].Stream,
|
||||
i => i < v.Length ? v[i].Stream : null,
|
||||
new ReaderOptions() { LeaveStreamOpen = true }
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
s = volsArray[deh.DiskNumberStart].Stream;
|
||||
}
|
||||
|
||||
yield return new ZipArchiveEntry(
|
||||
this,
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
|
||||
);
|
||||
}
|
||||
break;
|
||||
case ZipHeaderType.DirectoryEnd:
|
||||
{
|
||||
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
|
||||
volsArray.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IEnumerable<ZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
foreach (var entry in newEntries)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
287
src/SharpCompress/Archives/Zip/ZipArchive.Factory.cs
Normal file
287
src/SharpCompress/Archives/Zip/ZipArchive.Factory.cs
Normal file
@@ -0,0 +1,287 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public partial class ZipArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IWritableArchiveOpenable,
|
||||
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
#endif
|
||||
{
|
||||
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return OpenArchive(new FileInfo(filePath), readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new ZipArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(path, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(string filePath, string? password = null) =>
|
||||
IsZipFile(new FileInfo(filePath), password);
|
||||
|
||||
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsZipFile(stream, password);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(Stream stream, string? password = null)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsZipMulti(Stream stream, string? password = null)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
|
||||
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
|
||||
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static async ValueTask<bool> IsZipFileAsync(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
var header = await headerFactory
|
||||
.ReadStreamHeaderAsync(stream)
|
||||
.Where(x => x.ZipHeaderType != ZipHeaderType.Split)
|
||||
.FirstOrDefaultAsync(cancellationToken);
|
||||
if (header is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static IWritableArchive CreateArchive() => new ZipArchive();
|
||||
|
||||
public static IWritableAsyncArchive CreateAsyncArchive() => new ZipArchive();
|
||||
|
||||
public static async ValueTask<bool> IsZipMultiAsync(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
|
||||
ZipHeader? x = null;
|
||||
await foreach (
|
||||
var h in z.ReadSeekableHeaderAsync(stream)
|
||||
.WithCancellation(cancellationToken)
|
||||
)
|
||||
{
|
||||
x = h;
|
||||
break;
|
||||
}
|
||||
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
@@ -14,21 +16,12 @@ using SharpCompress.Writers.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
{
|
||||
private readonly SeekableZipHeaderFactory? headerFactory;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the compression level applied to files added to the archive,
|
||||
/// if the compression method is set to deflate
|
||||
/// </summary>
|
||||
public CompressionLevel DeflateCompressionLevel { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal ZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Zip, sourceStream) =>
|
||||
headerFactory = new SeekableZipHeaderFactory(
|
||||
@@ -36,219 +29,39 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
sourceStream.ReaderOptions.ArchiveEncoding
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new ZipArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(
|
||||
string filePath,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
|
||||
|
||||
public static bool IsZipFile(
|
||||
FileInfo fileInfo,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsZipFile(stream, password, bufferSize);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsZipMulti(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
|
||||
{
|
||||
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
|
||||
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
|
||||
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
internal ZipArchive()
|
||||
: base(ArchiveType.Zip) { }
|
||||
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
|
||||
{
|
||||
stream.LoadAllParts(); //request all streams
|
||||
stream.Position = 0;
|
||||
stream.LoadAllParts();
|
||||
//stream.Position = 0;
|
||||
|
||||
var streams = stream.Streams.ToList();
|
||||
var idx = 0;
|
||||
if (streams.Count() > 1) //test part 2 - true = multipart not split
|
||||
if (streams.Count() > 1)
|
||||
{
|
||||
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
|
||||
streams[1].Position -= 4;
|
||||
//check if second stream is zip header without changing position
|
||||
var headerProbeStream = streams[1];
|
||||
var startPosition = headerProbeStream.Position;
|
||||
headerProbeStream.Position = startPosition + 4;
|
||||
var isZip = IsZipFile(headerProbeStream, ReaderOptions.Password);
|
||||
headerProbeStream.Position = startPosition;
|
||||
if (isZip)
|
||||
{
|
||||
stream.IsVolumes = true;
|
||||
|
||||
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
|
||||
var tmp = streams[0];
|
||||
streams.RemoveAt(0);
|
||||
streams.Add(tmp);
|
||||
|
||||
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
return streams.Select(a => new ZipVolume(a, ReaderOptions, idx++));
|
||||
}
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
|
||||
}
|
||||
|
||||
internal ZipArchive()
|
||||
: base(ArchiveType.Zip) { }
|
||||
|
||||
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
|
||||
{
|
||||
var vols = volumes.ToArray();
|
||||
@@ -306,14 +119,24 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
writer.WriteDirectory(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -325,12 +148,22 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
bool closeStream
|
||||
) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
|
||||
public static ZipArchive Create() => new();
|
||||
protected override ZipArchiveEntry CreateDirectoryEntry(
|
||||
string directoryPath,
|
||||
DateTime? modified
|
||||
) => new ZipWritableArchiveEntry(this, directoryPath, modified);
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
((IStreamStack)stream).StackSeek(0);
|
||||
return ZipReader.Open(stream, ReaderOptions, Entries);
|
||||
//stream.Position = 0;
|
||||
return ZipReader.OpenReader(stream, ReaderOptions, Entries);
|
||||
}
|
||||
|
||||
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return new((IAsyncReader)ZipReader.OpenReader(stream));
|
||||
}
|
||||
}
|
||||
|
||||
22
src/SharpCompress/Archives/Zip/ZipArchiveEntry.Async.cs
Normal file
22
src/SharpCompress/Archives/Zip/ZipArchiveEntry.Async.cs
Normal file
@@ -0,0 +1,22 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public partial class ZipArchiveEntry
|
||||
{
|
||||
public async ValueTask<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var part = Parts.Single();
|
||||
if (part is SeekableZipFilePart seekablePart)
|
||||
{
|
||||
return (await seekablePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
|
||||
}
|
||||
return OpenEntryStream();
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,12 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
public partial class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
{
|
||||
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
|
||||
: base(part) => Archive = archive;
|
||||
|
||||
@@ -14,6 +14,7 @@ internal static class ZipArchiveVolumeFactory
|
||||
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
{
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
@@ -24,11 +25,16 @@ internal static class ZipArchiveVolumeFactory
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
else //split - 001, 002 ...
|
||||
{
|
||||
return ArchiveVolumeFactory.GetFilePart(index, part1);
|
||||
}
|
||||
|
||||
if (item != null && item.Exists)
|
||||
{
|
||||
return item;
|
||||
}
|
||||
|
||||
return null; //no more items
|
||||
}
|
||||
|
||||
@@ -9,7 +9,8 @@ namespace SharpCompress.Archives.Zip;
|
||||
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
{
|
||||
private readonly bool closeStream;
|
||||
private readonly Stream stream;
|
||||
private readonly Stream? stream;
|
||||
private readonly bool isDirectory;
|
||||
private bool isDisposed;
|
||||
|
||||
internal ZipWritableArchiveEntry(
|
||||
@@ -27,6 +28,22 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
Size = size;
|
||||
LastModifiedTime = lastModified;
|
||||
this.closeStream = closeStream;
|
||||
isDirectory = false;
|
||||
}
|
||||
|
||||
internal ZipWritableArchiveEntry(
|
||||
ZipArchive archive,
|
||||
string directoryPath,
|
||||
DateTime? lastModified
|
||||
)
|
||||
: base(archive, null)
|
||||
{
|
||||
stream = null;
|
||||
Key = directoryPath;
|
||||
Size = 0;
|
||||
LastModifiedTime = lastModified;
|
||||
closeStream = false;
|
||||
isDirectory = true;
|
||||
}
|
||||
|
||||
public override long Crc => 0;
|
||||
@@ -47,24 +64,28 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
public override bool IsDirectory => isDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
|
||||
|
||||
Stream IWritableArchiveEntry.Stream => stream;
|
||||
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
|
||||
|
||||
public override Stream OpenEntryStream()
|
||||
{
|
||||
if (stream is null)
|
||||
{
|
||||
return Stream.Null;
|
||||
}
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
return new NonDisposingStream(stream);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
{
|
||||
if (closeStream && !isDisposed)
|
||||
if (closeStream && !isDisposed && stream is not null)
|
||||
{
|
||||
stream.Dispose();
|
||||
isDisposed = true;
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
using System;
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: CLSCompliant(true)]
|
||||
// CLSCompliant(false) is required because ZStandard integration uses unsafe code
|
||||
[assembly: CLSCompliant(false)]
|
||||
[assembly: InternalsVisibleTo(
|
||||
"SharpCompress.Test,PublicKey=0024000004800000940000000602000000240000525341310004000001000100158bebf1433f76dffc356733c138babea7a47536c65ed8009b16372c6f4edbb20554db74a62687f56b97c20a6ce8c4b123280279e33c894e7b3aa93ab3c573656fde4db576cfe07dba09619ead26375b25d2c4a8e43f7be257d712b0dd2eb546f67adb09281338618a58ac834fc038dd7e2740a7ab3591826252e4f4516306dc"
|
||||
)]
|
||||
|
||||
65
src/SharpCompress/Common/Ace/AceCrc.cs
Normal file
65
src/SharpCompress/Common/Ace/AceCrc.cs
Normal file
@@ -0,0 +1,65 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Ace
|
||||
{
|
||||
public class AceCrc
|
||||
{
|
||||
// CRC-32 lookup table (standard polynomial 0xEDB88320, reflected)
|
||||
private static readonly uint[] Crc32Table = GenerateTable();
|
||||
|
||||
private static uint[] GenerateTable()
|
||||
{
|
||||
var table = new uint[256];
|
||||
|
||||
for (int i = 0; i < 256; i++)
|
||||
{
|
||||
uint crc = (uint)i;
|
||||
|
||||
for (int j = 0; j < 8; j++)
|
||||
{
|
||||
if ((crc & 1) != 0)
|
||||
{
|
||||
crc = (crc >> 1) ^ 0xEDB88320u;
|
||||
}
|
||||
else
|
||||
{
|
||||
crc >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
table[i] = crc;
|
||||
}
|
||||
|
||||
return table;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculate ACE CRC-32 checksum.
|
||||
/// ACE CRC-32 uses standard CRC-32 polynomial (0xEDB88320, reflected)
|
||||
/// with init=0xFFFFFFFF but NO final XOR.
|
||||
/// </summary>
|
||||
public static uint AceCrc32(ReadOnlySpan<byte> data)
|
||||
{
|
||||
uint crc = 0xFFFFFFFFu;
|
||||
|
||||
foreach (byte b in data)
|
||||
{
|
||||
crc = (crc >> 8) ^ Crc32Table[(crc ^ b) & 0xFF];
|
||||
}
|
||||
|
||||
return crc; // No final XOR for ACE
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ACE CRC-16 is the lower 16 bits of the ACE CRC-32.
|
||||
/// </summary>
|
||||
public static ushort AceCrc16(ReadOnlySpan<byte> data)
|
||||
{
|
||||
return (ushort)(AceCrc32(data) & 0xFFFF);
|
||||
}
|
||||
}
|
||||
}
|
||||
68
src/SharpCompress/Common/Ace/AceEntry.cs
Normal file
68
src/SharpCompress/Common/Ace/AceEntry.cs
Normal file
@@ -0,0 +1,68 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Ace.Headers;
|
||||
|
||||
namespace SharpCompress.Common.Ace
|
||||
{
|
||||
public class AceEntry : Entry
|
||||
{
|
||||
private readonly AceFilePart _filePart;
|
||||
|
||||
internal AceEntry(AceFilePart filePart)
|
||||
{
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart == null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
return _filePart.Header.Crc32;
|
||||
}
|
||||
}
|
||||
|
||||
public override string? Key => _filePart?.Header.Filename;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.PackedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart.Header.CompressionType == Headers.CompressionType.Stored)
|
||||
{
|
||||
return CompressionType.None;
|
||||
}
|
||||
return CompressionType.AceLZ77;
|
||||
}
|
||||
}
|
||||
|
||||
public override long Size => _filePart?.Header.OriginalSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.Header.DateTime;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => _filePart.Header.IsFileEncrypted;
|
||||
|
||||
public override bool IsDirectory => _filePart.Header.IsDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
}
|
||||
}
|
||||
52
src/SharpCompress/Common/Ace/AceFilePart.cs
Normal file
52
src/SharpCompress/Common/Ace/AceFilePart.cs
Normal file
@@ -0,0 +1,52 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Ace.Headers;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Ace
|
||||
{
|
||||
public class AceFilePart : FilePart
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
internal AceFileHeader Header { get; set; }
|
||||
|
||||
internal AceFilePart(AceFileHeader localAceHeader, Stream seekableStream)
|
||||
: base(localAceHeader.ArchiveEncoding)
|
||||
{
|
||||
_stream = seekableStream;
|
||||
Header = localAceHeader;
|
||||
}
|
||||
|
||||
internal override string? FilePartName => Header.Filename;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionType)
|
||||
{
|
||||
case Headers.CompressionType.Stored:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.PackedSize
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionQuality
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream? GetRawStream() => _stream;
|
||||
}
|
||||
}
|
||||
35
src/SharpCompress/Common/Ace/AceVolume.cs
Normal file
35
src/SharpCompress/Common/Ace/AceVolume.cs
Normal file
@@ -0,0 +1,35 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arj;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.Ace
|
||||
{
|
||||
public class AceVolume : Volume
|
||||
{
|
||||
public AceVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
|
||||
public override bool IsFirstVolume
|
||||
{
|
||||
get { return true; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ArjArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public override bool IsMultiVolume
|
||||
{
|
||||
get { return false; }
|
||||
}
|
||||
|
||||
internal IEnumerable<AceFilePart> GetVolumeFileParts()
|
||||
{
|
||||
return new List<AceFilePart>();
|
||||
}
|
||||
}
|
||||
}
|
||||
111
src/SharpCompress/Common/Ace/Headers/AceFileHeader.Async.cs
Normal file
111
src/SharpCompress/Common/Ace/Headers/AceFileHeader.Async.cs
Normal file
@@ -0,0 +1,111 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arc;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
public sealed partial class AceFileHeader
|
||||
{
|
||||
/// <summary>
|
||||
/// Asynchronously reads the next file entry header from the stream.
|
||||
/// Returns null if no more entries or end of archive.
|
||||
/// Supports both ACE 1.0 and ACE 2.0 formats.
|
||||
/// </summary>
|
||||
public override async ValueTask<AceHeader?> ReadAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var headerData = await ReadHeaderAsync(stream, cancellationToken);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
int offset = 0;
|
||||
|
||||
// Header type (1 byte)
|
||||
HeaderType = headerData[offset++];
|
||||
|
||||
// Skip recovery record headers (ACE 2.0 feature)
|
||||
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
|
||||
{
|
||||
// Skip to next header
|
||||
return null;
|
||||
}
|
||||
|
||||
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
|
||||
{
|
||||
// Unknown header type - skip
|
||||
return null;
|
||||
}
|
||||
|
||||
// Header flags (2 bytes)
|
||||
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Packed size (4 bytes)
|
||||
PackedSize = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// Original size (4 bytes)
|
||||
OriginalSize = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// File date/time in DOS format (4 bytes)
|
||||
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
|
||||
DateTime = ConvertDosDateTime(dosDateTime);
|
||||
offset += 4;
|
||||
|
||||
// File attributes (4 bytes)
|
||||
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// CRC32 (4 bytes)
|
||||
Crc32 = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// Compression type (1 byte)
|
||||
byte compressionType = headerData[offset++];
|
||||
CompressionType = GetCompressionType(compressionType);
|
||||
|
||||
// Compression quality/parameter (1 byte)
|
||||
byte compressionQuality = headerData[offset++];
|
||||
CompressionQuality = GetCompressionQuality(compressionQuality);
|
||||
|
||||
// Parameters (2 bytes)
|
||||
Parameters = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Reserved (2 bytes) - skip
|
||||
offset += 2;
|
||||
|
||||
// Filename length (2 bytes)
|
||||
var filenameLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Filename
|
||||
if (offset + filenameLength <= headerData.Length)
|
||||
{
|
||||
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
|
||||
offset += filenameLength;
|
||||
}
|
||||
|
||||
// Handle comment if present
|
||||
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
|
||||
{
|
||||
// Comment length (2 bytes)
|
||||
if (offset + 2 <= headerData.Length)
|
||||
{
|
||||
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2 + commentLength; // Skip comment
|
||||
}
|
||||
}
|
||||
|
||||
// Store the data start position
|
||||
DataStartPosition = stream.Position;
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
175
src/SharpCompress/Common/Ace/Headers/AceFileHeader.cs
Normal file
175
src/SharpCompress/Common/Ace/Headers/AceFileHeader.cs
Normal file
@@ -0,0 +1,175 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Xml.Linq;
|
||||
using SharpCompress.Common.Arc;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
{
|
||||
/// <summary>
|
||||
/// ACE file entry header
|
||||
/// </summary>
|
||||
public sealed partial class AceFileHeader : AceHeader
|
||||
{
|
||||
public long DataStartPosition { get; private set; }
|
||||
public long PackedSize { get; set; }
|
||||
public long OriginalSize { get; set; }
|
||||
public DateTime DateTime { get; set; }
|
||||
public int Attributes { get; set; }
|
||||
public uint Crc32 { get; set; }
|
||||
public CompressionType CompressionType { get; set; }
|
||||
public CompressionQuality CompressionQuality { get; set; }
|
||||
public ushort Parameters { get; set; }
|
||||
public string Filename { get; set; } = string.Empty;
|
||||
public List<byte> Comment { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// File data offset in the archive
|
||||
/// </summary>
|
||||
public ulong DataOffset { get; set; }
|
||||
|
||||
public bool IsDirectory => (Attributes & 0x10) != 0;
|
||||
|
||||
public bool IsContinuedFromPrev =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_PREV) != 0;
|
||||
|
||||
public bool IsContinuedToNext =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_NEXT) != 0;
|
||||
|
||||
public int DictionarySize
|
||||
{
|
||||
get
|
||||
{
|
||||
int bits = Parameters & 0x0F;
|
||||
return bits < 10 ? 1024 : 1 << bits;
|
||||
}
|
||||
}
|
||||
|
||||
public AceFileHeader(IArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding, AceHeaderType.FILE) { }
|
||||
|
||||
/// <summary>
|
||||
/// Reads the next file entry header from the stream.
|
||||
/// Returns null if no more entries or end of archive.
|
||||
/// Supports both ACE 1.0 and ACE 2.0 formats.
|
||||
/// </summary>
|
||||
public override AceHeader? Read(Stream stream)
|
||||
{
|
||||
var headerData = ReadHeader(stream);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
int offset = 0;
|
||||
|
||||
// Header type (1 byte)
|
||||
HeaderType = headerData[offset++];
|
||||
|
||||
// Skip recovery record headers (ACE 2.0 feature)
|
||||
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
|
||||
{
|
||||
// Skip to next header
|
||||
return null;
|
||||
}
|
||||
|
||||
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
|
||||
{
|
||||
// Unknown header type - skip
|
||||
return null;
|
||||
}
|
||||
|
||||
// Header flags (2 bytes)
|
||||
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Packed size (4 bytes)
|
||||
PackedSize = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// Original size (4 bytes)
|
||||
OriginalSize = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// File date/time in DOS format (4 bytes)
|
||||
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
|
||||
DateTime = ConvertDosDateTime(dosDateTime);
|
||||
offset += 4;
|
||||
|
||||
// File attributes (4 bytes)
|
||||
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// CRC32 (4 bytes)
|
||||
Crc32 = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// Compression type (1 byte)
|
||||
byte compressionType = headerData[offset++];
|
||||
CompressionType = GetCompressionType(compressionType);
|
||||
|
||||
// Compression quality/parameter (1 byte)
|
||||
byte compressionQuality = headerData[offset++];
|
||||
CompressionQuality = GetCompressionQuality(compressionQuality);
|
||||
|
||||
// Parameters (2 bytes)
|
||||
Parameters = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Reserved (2 bytes) - skip
|
||||
offset += 2;
|
||||
|
||||
// Filename length (2 bytes)
|
||||
var filenameLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Filename
|
||||
if (offset + filenameLength <= headerData.Length)
|
||||
{
|
||||
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
|
||||
offset += filenameLength;
|
||||
}
|
||||
|
||||
// Handle comment if present
|
||||
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
|
||||
{
|
||||
// Comment length (2 bytes)
|
||||
if (offset + 2 <= headerData.Length)
|
||||
{
|
||||
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2 + commentLength; // Skip comment
|
||||
}
|
||||
}
|
||||
|
||||
// Store the data start position
|
||||
DataStartPosition = stream.Position;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
// ReadAsync moved to AceFileHeader.Async.cs
|
||||
|
||||
public CompressionType GetCompressionType(byte value) =>
|
||||
value switch
|
||||
{
|
||||
0 => CompressionType.Stored,
|
||||
1 => CompressionType.Lz77,
|
||||
2 => CompressionType.Blocked,
|
||||
_ => CompressionType.Unknown,
|
||||
};
|
||||
|
||||
public CompressionQuality GetCompressionQuality(byte value) =>
|
||||
value switch
|
||||
{
|
||||
0 => CompressionQuality.None,
|
||||
1 => CompressionQuality.Fastest,
|
||||
2 => CompressionQuality.Fast,
|
||||
3 => CompressionQuality.Normal,
|
||||
4 => CompressionQuality.Good,
|
||||
5 => CompressionQuality.Best,
|
||||
_ => CompressionQuality.Unknown,
|
||||
};
|
||||
}
|
||||
}
|
||||
69
src/SharpCompress/Common/Ace/Headers/AceHeader.Async.cs
Normal file
69
src/SharpCompress/Common/Ace/Headers/AceHeader.Async.cs
Normal file
@@ -0,0 +1,69 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
public abstract partial class AceHeader
|
||||
{
|
||||
public abstract ValueTask<AceHeader?> ReadAsync(
|
||||
Stream reader,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public async ValueTask<byte[]> ReadHeaderAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
// Read header CRC (2 bytes) and header size (2 bytes)
|
||||
var headerBytes = new byte[4];
|
||||
if (!await stream.ReadFullyAsync(headerBytes, 0, 4, cancellationToken))
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
|
||||
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
|
||||
if (HeaderSize == 0)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
// Read the header data
|
||||
var body = new byte[HeaderSize];
|
||||
if (!await stream.ReadFullyAsync(body, 0, HeaderSize, cancellationToken))
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
// Verify crc
|
||||
var checksum = AceCrc.AceCrc16(body);
|
||||
if (checksum != HeaderCrc)
|
||||
{
|
||||
throw new InvalidDataException("Header checksum is invalid");
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Asynchronously checks if the stream is an ACE archive
|
||||
/// </summary>
|
||||
/// <param name="stream">The stream to read from</param>
|
||||
/// <param name="cancellationToken">Cancellation token</param>
|
||||
/// <returns>True if the stream is an ACE archive, false otherwise</returns>
|
||||
public static async ValueTask<bool> IsArchiveAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytes = new byte[14];
|
||||
if (!await stream.ReadFullyAsync(bytes, 0, 14, cancellationToken))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return CheckMagicBytes(bytes, 7);
|
||||
}
|
||||
}
|
||||
157
src/SharpCompress/Common/Ace/Headers/AceHeader.cs
Normal file
157
src/SharpCompress/Common/Ace/Headers/AceHeader.cs
Normal file
@@ -0,0 +1,157 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arj.Headers;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
{
|
||||
/// <summary>
|
||||
/// Header type constants
|
||||
/// </summary>
|
||||
public enum AceHeaderType
|
||||
{
|
||||
MAIN = 0,
|
||||
FILE = 1,
|
||||
RECOVERY32 = 2,
|
||||
RECOVERY64A = 3,
|
||||
RECOVERY64B = 4,
|
||||
}
|
||||
|
||||
public abstract partial class AceHeader
|
||||
{
|
||||
// ACE signature: bytes at offset 7 should be "**ACE**"
|
||||
private static readonly byte[] AceSignature =
|
||||
[
|
||||
(byte)'*',
|
||||
(byte)'*',
|
||||
(byte)'A',
|
||||
(byte)'C',
|
||||
(byte)'E',
|
||||
(byte)'*',
|
||||
(byte)'*',
|
||||
];
|
||||
|
||||
public AceHeader(IArchiveEncoding archiveEncoding, AceHeaderType type)
|
||||
{
|
||||
AceHeaderType = type;
|
||||
ArchiveEncoding = archiveEncoding;
|
||||
}
|
||||
|
||||
public IArchiveEncoding ArchiveEncoding { get; }
|
||||
public AceHeaderType AceHeaderType { get; }
|
||||
|
||||
public ushort HeaderFlags { get; set; }
|
||||
public ushort HeaderCrc { get; set; }
|
||||
public ushort HeaderSize { get; set; }
|
||||
public byte HeaderType { get; set; }
|
||||
|
||||
public bool IsFileEncrypted =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.FILE_ENCRYPTED) != 0;
|
||||
public bool Is64Bit =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MEMORY_64BIT) != 0;
|
||||
|
||||
public bool IsSolid =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.SOLID_MAIN) != 0;
|
||||
|
||||
public bool IsMultiVolume =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MULTIVOLUME) != 0;
|
||||
|
||||
public abstract AceHeader? Read(Stream reader);
|
||||
|
||||
// Async methods moved to AceHeader.Async.cs
|
||||
|
||||
public byte[] ReadHeader(Stream stream)
|
||||
{
|
||||
// Read header CRC (2 bytes) and header size (2 bytes)
|
||||
var headerBytes = new byte[4];
|
||||
if (!stream.ReadFully(headerBytes))
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
|
||||
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
|
||||
if (HeaderSize == 0)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
// Read the header data
|
||||
var body = new byte[HeaderSize];
|
||||
if (!stream.ReadFully(body))
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
// Verify crc
|
||||
var checksum = AceCrc.AceCrc16(body);
|
||||
if (checksum != HeaderCrc)
|
||||
{
|
||||
throw new InvalidDataException("Header checksum is invalid");
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
public static bool IsArchive(Stream stream)
|
||||
{
|
||||
// ACE files have a specific signature
|
||||
// First two bytes are typically 0x60 0xEA (signature bytes)
|
||||
// At offset 7, there should be "**ACE**" (7 bytes)
|
||||
var bytes = new byte[14];
|
||||
if (stream.Read(bytes, 0, 14) != 14)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for "**ACE**" at offset 7
|
||||
return CheckMagicBytes(bytes, 7);
|
||||
}
|
||||
|
||||
protected static bool CheckMagicBytes(byte[] headerBytes, int offset)
|
||||
{
|
||||
// Check for "**ACE**" at specified offset
|
||||
for (int i = 0; i < AceSignature.Length; i++)
|
||||
{
|
||||
if (headerBytes[offset + i] != AceSignature[i])
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected DateTime ConvertDosDateTime(uint dosDateTime)
|
||||
{
|
||||
try
|
||||
{
|
||||
int second = (int)(dosDateTime & 0x1F) * 2;
|
||||
int minute = (int)((dosDateTime >> 5) & 0x3F);
|
||||
int hour = (int)((dosDateTime >> 11) & 0x1F);
|
||||
int day = (int)((dosDateTime >> 16) & 0x1F);
|
||||
int month = (int)((dosDateTime >> 21) & 0x0F);
|
||||
int year = (int)((dosDateTime >> 25) & 0x7F) + 1980;
|
||||
|
||||
if (
|
||||
day < 1
|
||||
|| day > 31
|
||||
|| month < 1
|
||||
|| month > 12
|
||||
|| hour > 23
|
||||
|| minute > 59
|
||||
|| second > 59
|
||||
)
|
||||
{
|
||||
return DateTime.MinValue;
|
||||
}
|
||||
|
||||
return new DateTime(year, month, day, hour, minute, second);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return DateTime.MinValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
83
src/SharpCompress/Common/Ace/Headers/AceMainHeader.Async.cs
Normal file
83
src/SharpCompress/Common/Ace/Headers/AceMainHeader.Async.cs
Normal file
@@ -0,0 +1,83 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
public sealed partial class AceMainHeader
|
||||
{
|
||||
/// <summary>
|
||||
/// Asynchronously reads the main archive header from the stream.
|
||||
/// Returns header if this is a valid ACE archive.
|
||||
/// Supports both ACE 1.0 and ACE 2.0 formats.
|
||||
/// </summary>
|
||||
public override async ValueTask<AceHeader?> ReadAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var headerData = await ReadHeaderAsync(stream, cancellationToken);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
int offset = 0;
|
||||
|
||||
// Header type should be 0 for main header
|
||||
if (headerData[offset++] != HeaderType)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Header flags (2 bytes)
|
||||
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Skip signature "**ACE**" (7 bytes)
|
||||
if (!CheckMagicBytes(headerData, offset))
|
||||
{
|
||||
throw new InvalidDataException("Invalid ACE archive signature.");
|
||||
}
|
||||
offset += 7;
|
||||
|
||||
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
|
||||
AceVersion = headerData[offset++];
|
||||
ExtractVersion = headerData[offset++];
|
||||
|
||||
// Host OS (1 byte)
|
||||
if (offset < headerData.Length)
|
||||
{
|
||||
var hostOsByte = headerData[offset++];
|
||||
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
|
||||
}
|
||||
// Volume number (1 byte)
|
||||
VolumeNumber = headerData[offset++];
|
||||
|
||||
// Creation date/time (4 bytes)
|
||||
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
|
||||
DateTime = ConvertDosDateTime(dosDateTime);
|
||||
offset += 4;
|
||||
|
||||
// Reserved fields (8 bytes)
|
||||
if (offset + 8 <= headerData.Length)
|
||||
{
|
||||
offset += 8;
|
||||
}
|
||||
|
||||
// Skip additional fields based on flags
|
||||
// Handle comment if present
|
||||
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
|
||||
{
|
||||
if (offset + 2 <= headerData.Length)
|
||||
{
|
||||
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2 + commentLength;
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
101
src/SharpCompress/Common/Ace/Headers/AceMainHeader.cs
Normal file
101
src/SharpCompress/Common/Ace/Headers/AceMainHeader.cs
Normal file
@@ -0,0 +1,101 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Ace.Headers;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
{
|
||||
/// <summary>
|
||||
/// ACE main archive header
|
||||
/// </summary>
|
||||
public sealed partial class AceMainHeader : AceHeader
|
||||
{
|
||||
public byte ExtractVersion { get; set; }
|
||||
public byte CreatorVersion { get; set; }
|
||||
public HostOS HostOS { get; set; }
|
||||
public byte VolumeNumber { get; set; }
|
||||
public DateTime DateTime { get; set; }
|
||||
public string Advert { get; set; } = string.Empty;
|
||||
public List<byte> Comment { get; set; } = new();
|
||||
public byte AceVersion { get; private set; }
|
||||
|
||||
public AceMainHeader(IArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding, AceHeaderType.MAIN) { }
|
||||
|
||||
/// <summary>
|
||||
/// Reads the main archive header from the stream.
|
||||
/// Returns header if this is a valid ACE archive.
|
||||
/// Supports both ACE 1.0 and ACE 2.0 formats.
|
||||
/// </summary>
|
||||
public override AceHeader? Read(Stream stream)
|
||||
{
|
||||
var headerData = ReadHeader(stream);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
int offset = 0;
|
||||
|
||||
// Header type should be 0 for main header
|
||||
if (headerData[offset++] != HeaderType)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Header flags (2 bytes)
|
||||
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Skip signature "**ACE**" (7 bytes)
|
||||
if (!CheckMagicBytes(headerData, offset))
|
||||
{
|
||||
throw new InvalidDataException("Invalid ACE archive signature.");
|
||||
}
|
||||
offset += 7;
|
||||
|
||||
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
|
||||
AceVersion = headerData[offset++];
|
||||
ExtractVersion = headerData[offset++];
|
||||
|
||||
// Host OS (1 byte)
|
||||
if (offset < headerData.Length)
|
||||
{
|
||||
var hostOsByte = headerData[offset++];
|
||||
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
|
||||
}
|
||||
// Volume number (1 byte)
|
||||
VolumeNumber = headerData[offset++];
|
||||
|
||||
// Creation date/time (4 bytes)
|
||||
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
|
||||
DateTime = ConvertDosDateTime(dosDateTime);
|
||||
offset += 4;
|
||||
|
||||
// Reserved fields (8 bytes)
|
||||
if (offset + 8 <= headerData.Length)
|
||||
{
|
||||
offset += 8;
|
||||
}
|
||||
|
||||
// Skip additional fields based on flags
|
||||
// Handle comment if present
|
||||
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
|
||||
{
|
||||
if (offset + 2 <= headerData.Length)
|
||||
{
|
||||
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2 + commentLength;
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
// ReadAsync moved to AceMainHeader.Async.cs
|
||||
}
|
||||
}
|
||||
16
src/SharpCompress/Common/Ace/Headers/CompressionQuality.cs
Normal file
16
src/SharpCompress/Common/Ace/Headers/CompressionQuality.cs
Normal file
@@ -0,0 +1,16 @@
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
{
|
||||
/// <summary>
|
||||
/// Compression quality
|
||||
/// </summary>
|
||||
public enum CompressionQuality
|
||||
{
|
||||
None,
|
||||
Fastest,
|
||||
Fast,
|
||||
Normal,
|
||||
Good,
|
||||
Best,
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
13
src/SharpCompress/Common/Ace/Headers/CompressionType.cs
Normal file
13
src/SharpCompress/Common/Ace/Headers/CompressionType.cs
Normal file
@@ -0,0 +1,13 @@
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
{
|
||||
/// <summary>
|
||||
/// Compression types
|
||||
/// </summary>
|
||||
public enum CompressionType
|
||||
{
|
||||
Stored,
|
||||
Lz77,
|
||||
Blocked,
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
33
src/SharpCompress/Common/Ace/Headers/HeaderFlags.cs
Normal file
33
src/SharpCompress/Common/Ace/Headers/HeaderFlags.cs
Normal file
@@ -0,0 +1,33 @@
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
{
|
||||
/// <summary>
|
||||
/// Header flags (main + file, overlapping meanings)
|
||||
/// </summary>
|
||||
public static class HeaderFlags
|
||||
{
|
||||
// Shared / low bits
|
||||
public const ushort ADDSIZE = 0x0001; // extra size field present
|
||||
public const ushort COMMENT = 0x0002; // comment present
|
||||
public const ushort MEMORY_64BIT = 0x0004;
|
||||
public const ushort AV_STRING = 0x0008; // AV string present
|
||||
public const ushort SOLID = 0x0010; // solid file
|
||||
public const ushort LOCKED = 0x0020;
|
||||
public const ushort PROTECTED = 0x0040;
|
||||
|
||||
// Main header specific
|
||||
public const ushort V20FORMAT = 0x0100;
|
||||
public const ushort SFX = 0x0200;
|
||||
public const ushort LIMITSFXJR = 0x0400;
|
||||
public const ushort MULTIVOLUME = 0x0800;
|
||||
public const ushort ADVERT = 0x1000;
|
||||
public const ushort RECOVERY = 0x2000;
|
||||
public const ushort LOCKED_MAIN = 0x4000;
|
||||
public const ushort SOLID_MAIN = 0x8000;
|
||||
|
||||
// File header specific (same bits, different meaning)
|
||||
public const ushort NTSECURITY = 0x0400;
|
||||
public const ushort CONTINUED_PREV = 0x1000;
|
||||
public const ushort CONTINUED_NEXT = 0x2000;
|
||||
public const ushort FILE_ENCRYPTED = 0x4000; // file encrypted (file header)
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user