Compare commits

...

274 Commits
2.5.0 ... 2.6.0

Author SHA1 Message Date
Matt Nadareski
0fcedfafbe Bump version to 2.6 2023-01-06 13:04:52 -08:00
Matt Nadareski
d075ce4ebd Relicense to MIT
I am relicensing this code with the permission of all code contributors to this port of the code. All code that this is based on is unaffected by this.
2023-01-06 11:43:50 -08:00
Matt Nadareski
3c6fcb4e4d Update to UnshieldSharp 1.6.9 2023-01-06 11:23:29 -08:00
Matt Nadareski
5d37b7947e Use license expression 2023-01-06 10:57:55 -08:00
Matt Nadareski
537b5c83f2 Add commit hash to artifacts 2023-01-06 10:56:49 -08:00
Matt Nadareski
44d2c90aea Update to WiseUnpacker 1.0.4 2023-01-05 23:19:44 -08:00
Matt Nadareski
af0c984b08 Skip trying to parse null debug data 2023-01-05 22:42:54 -08:00
Matt Nadareski
e9f01c7a10 Make ImpulseReactor check safer for null values 2023-01-05 22:30:56 -08:00
TheRogueArchivist
0c468e3489 Fix typo in SafeDisc detection (#224)
* Fix typo in SafeDisc detection.
2023-01-05 20:34:05 -08:00
TheRogueArchivist
aff43b7625 Add encrypted Link Data Security file detection (#223)
* Add encrypted Link Data Security file (LDSCRYPT) detection.

* Update CD-Cops notes.
2023-01-05 14:55:21 -08:00
Matt Nadareski
dc53ebd378 Try outputting directly to the correct folder 2023-01-05 13:15:06 -08:00
Matt Nadareski
7a7518d92a Buillders not Builder 2023-01-05 13:13:22 -08:00
Matt Nadareski
ee182b8ea7 Remove old Nuget remnants, fix relative path 2023-01-05 13:07:45 -08:00
Matt Nadareski
c1bf48480c Avoid moves, reference directly 2023-01-05 13:02:25 -08:00
Matt Nadareski
b5bb88553c Try quotes again, just differently 2023-01-05 12:57:12 -08:00
Matt Nadareski
9b9fc09bdf Add trailing slash for mv 2023-01-05 12:00:46 -08:00
Matt Nadareski
5709c88232 Disable Nuget, rely on dotnet instead 2023-01-05 12:00:13 -08:00
Matt Nadareski
522c9fba08 Try using different mv command 2023-01-05 11:55:14 -08:00
Matt Nadareski
3a7b15f231 Add Nuget packages to auto-build 2023-01-05 11:43:18 -08:00
TheRogueArchivist
cb2c96ef7d Add basic HyperTech CrackProof detection (#222)
* Add basic HyperTech CrackProof detection

* Add super basic, incomplete HyperTech CrackProof detection.

* Address PR comments
2023-01-05 11:24:40 -08:00
Matt Nadareski
f89f691ee3 Archive scanning is not content scanning 2023-01-05 11:00:29 -08:00
Matt Nadareski
a0f59b774a Enable content scanning flag in Test 2023-01-05 10:54:34 -08:00
Matt Nadareski
65adf2109d Add content scanning flag in Scanner/Test 2023-01-05 10:53:06 -08:00
Matt Nadareski
92da1695bf Add path scanning flag to Test 2023-01-05 10:49:01 -08:00
Matt Nadareski
e46011beff Add path scanning flag in Scanner 2023-01-05 10:46:13 -08:00
Matt Nadareski
e733dea80e CD-Cops path checks are case-sensitive 2023-01-05 10:38:11 -08:00
Matt Nadareski
6a27161322 Add WiseInst for old NE Wise 2023-01-04 23:53:46 -08:00
Matt Nadareski
6bb4193167 Hook up NE Wise extraction thru WiseUnpacker again 2023-01-04 23:31:52 -08:00
Matt Nadareski
52c34250ff Add NE Wise note for later 2023-01-04 23:13:45 -08:00
Matt Nadareski
cd72cccbaa Fix incorrect printer output 2023-01-04 23:00:00 -08:00
Matt Nadareski
ad9689440a Use NE format property too (nw) 2023-01-04 22:53:52 -08:00
Matt Nadareski
dd193352e2 Remove unused directory creation 2023-01-04 22:44:14 -08:00
Matt Nadareski
a376041be4 Separate out Wise PE extraction 2023-01-04 22:43:11 -08:00
Matt Nadareski
7d14eb35ba Support PKZIP-compressed Wise installer data 2023-01-04 22:37:44 -08:00
Matt Nadareski
fdad3b0c87 Print first 16 bytes of unknown resources 2023-01-04 20:29:41 -08:00
Matt Nadareski
8ebaa59b5f Add precursor position check for segment validity 2023-01-04 20:24:31 -08:00
Matt Nadareski
6037be404c Fix bad LaserLok check 2023-01-04 20:22:20 -08:00
Matt Nadareski
1ead65126f WIP bzip2 code 2023-01-04 20:19:15 -08:00
Matt Nadareski
cfa1bc8875 Port ADPCM compression from stormlib 2023-01-04 11:03:03 -08:00
Matt Nadareski
cfbac7a9ab Ensure required field marked as such 2023-01-04 10:27:37 -08:00
Matt Nadareski
6f62777033 Split files for clarity 2023-01-04 10:23:03 -08:00
Matt Nadareski
0a523d74d5 x, not e 2023-01-04 10:13:36 -08:00
Matt Nadareski
907d0eb724 Enable experimental extraction feature 2023-01-04 10:10:42 -08:00
Matt Nadareski
edcf20885c Extraction is possible 2023-01-04 10:05:21 -08:00
Matt Nadareski
bb838c5b1f Fix error for small executables 2023-01-03 23:43:34 -08:00
Matt Nadareski
fa90618200 Remove one more note in README 2023-01-03 23:14:24 -08:00
Matt Nadareski
b421d8f818 Order of operations is important 2023-01-03 23:01:43 -08:00
Matt Nadareski
c1151b7e93 Minor cleanup and bugfixing 2023-01-03 22:32:22 -08:00
Matt Nadareski
b1d6b1be9c Fix errantly deleted line in csproj 2023-01-03 22:24:41 -08:00
Matt Nadareski
cb45405e09 Remove remnants of Dtf and LibMSPackSharp 2023-01-03 22:21:29 -08:00
Matt Nadareski
6ef9f2856a Remove Dtf and LibMSPackSharp 2023-01-03 22:15:53 -08:00
Matt Nadareski
c14300ffbb Port MS-ZIP for MS-CAB 2023-01-03 22:11:57 -08:00
Matt Nadareski
42b4c40d87 Port LZX init and decompression 2023-01-03 19:00:21 -08:00
Matt Nadareski
1c78dac79f Port LZX read lengths 2023-01-03 16:11:30 -08:00
Matt Nadareski
bb6a045dd3 Port LZX macros 2023-01-03 16:08:50 -08:00
Matt Nadareski
b273d8314a Migrate some easy LZX stuff 2023-01-03 14:46:30 -08:00
Matt Nadareski
015b895177 Start trying to get Quantum archives to extract 2023-01-03 13:53:44 -08:00
Matt Nadareski
fce9ce4eb4 Handle unknown data better 2023-01-03 10:59:41 -08:00
Matt Nadareski
e6463adb65 Add Quantum to README 2023-01-03 10:46:09 -08:00
Matt Nadareski
b07fd29753 Minor tweaks from issues found during extraction 2023-01-03 10:42:57 -08:00
Matt Nadareski
2416a035c7 Add Quantum archive models/builder/wrapper 2023-01-03 09:28:16 -08:00
Matt Nadareski
c2c125fd29 Add extract all to BFPK 2023-01-03 09:19:35 -08:00
Matt Nadareski
580bf0494d Fix writing cabinet file data 2023-01-03 00:19:03 -08:00
Matt Nadareski
eefc52d1dd Be safer with uneven lengths 2023-01-03 00:14:42 -08:00
Matt Nadareski
c28de855e2 Take sligthly more academic approach 2023-01-03 00:12:31 -08:00
Matt Nadareski
5e2d7505da Consistency tweaks (nw) 2023-01-02 23:54:40 -08:00
Matt Nadareski
f550c96541 Take some cues from libmspack 2023-01-02 23:35:09 -08:00
Matt Nadareski
d222eec1b1 I want to get off the Quantum ride 2023-01-02 22:03:36 -08:00
Matt Nadareski
be129262da Next level is a set of values, not a single one 2023-01-02 15:04:00 -08:00
Matt Nadareski
87080c906d Port more of the easy stuff over 2023-01-02 15:01:24 -08:00
Matt Nadareski
cdeaf09ed6 Start doing a better job with MSZIP 2023-01-02 11:19:06 -08:00
Matt Nadareski
b17f8dac7a Start fixing Quantum with safeguards, hook up 2023-01-02 10:17:27 -08:00
Matt Nadareski
ec758e5c18 Finalize Quantum port to C# (pass 1) 2023-01-02 00:03:03 -08:00
Matt Nadareski
350f9630df Convert macros to methods, mostly 2023-01-01 23:45:13 -08:00
Matt Nadareski
557c760197 Move Quantum macro notes to Compression 2023-01-01 22:06:09 -08:00
Matt Nadareski
895f40414d Some Quantum things to models / compression 2023-01-01 22:02:54 -08:00
Matt Nadareski
96fbb38b1c Bring in the rest of CAB code commented 2023-01-01 21:34:56 -08:00
Matt Nadareski
650d01d7be Update README with SharpZipLib 2022-12-31 11:44:49 -08:00
Matt Nadareski
3977342a67 Fix empty-not-null data 2022-12-31 11:39:35 -08:00
Matt Nadareski
8a61f01e1b Handle last blocks more efficiently 2022-12-31 11:35:22 -08:00
Matt Nadareski
75a4371f36 Handle MS-CAB compression type masking better 2022-12-31 11:21:29 -08:00
Matt Nadareski
86ee4786a0 Add mostly-working MS-ZIP based on zlib 2022-12-31 10:57:49 -08:00
Matt Nadareski
ef710463ae Port more CAB code (nw, commented out) 2022-12-30 23:07:41 -08:00
Matt Nadareski
e6b153bcbd Add last models from headers 2022-12-30 21:25:46 -08:00
Matt Nadareski
9c9eb8ca7b Align to correct boundary, look for executables 2022-12-30 14:00:10 -08:00
Matt Nadareski
295f438ff1 Forgot the after_build tag 2022-12-30 11:15:06 -08:00
TheRogueArchivist
cc1ad3e690 Update Cenega ProtectDVD (#221)
* Add new export check for Cenega ProtectDVD.

* Update notes and add link to DRML.
2022-12-30 09:54:44 -08:00
Matt Nadareski
3ebb3822dd Revert changes to launch.json 2022-12-30 09:38:11 -08:00
Matt Nadareski
6e22bd4c8d Fix hidden resource parsing 2022-12-30 09:35:35 -08:00
Matt Nadareski
1027956892 Lock section names, scan for hidden resources 2022-12-30 09:09:42 -08:00
Matt Nadareski
a46d52ddbb Shout out DRML in README 2022-12-29 23:34:00 -08:00
Matt Nadareski
fd5e78eb8a Open the path to future .NET versions 2022-12-29 23:01:28 -08:00
Matt Nadareski
8a326cbb91 Fix build zipfile names 2022-12-29 22:15:55 -08:00
Matt Nadareski
09a7893021 Try to get better at artifacts 2022-12-29 22:01:07 -08:00
Matt Nadareski
9c10af58d8 Remove AnyCPU from configuration 2022-12-29 21:27:45 -08:00
Matt Nadareski
81fbe251ba Specify all files in publish directories 2022-12-29 21:22:08 -08:00
Matt Nadareski
5203f0ea57 Attempt to fix AppVeyor build issues 2022-12-29 21:13:42 -08:00
Matt Nadareski
51644c2178 Update to latest LibMSPackSharp 2022-12-29 21:11:12 -08:00
Matt Nadareski
f326a20019 Runtime identifiers, explicit .NET Framework 4.8 2022-12-29 21:06:14 -08:00
Matt Nadareski
daea4ea460 Add CrypKey PE content checks 2022-12-28 23:28:38 -08:00
Matt Nadareski
c9c14bcebf Add note of where to find implementation 2022-12-28 23:07:23 -08:00
Matt Nadareski
9dc21c01f1 Add Microsoft LZ-compressed files support 2022-12-28 22:54:56 -08:00
Matt Nadareski
555dbd592c Re-disable MS-CAB extraction on .NET 6.0 2022-12-28 22:31:46 -08:00
Matt Nadareski
5e027b75b2 Update README with CExe extraction support 2022-12-28 22:21:18 -08:00
Matt Nadareski
01a504dab7 Remove unused using 2022-12-28 22:20:32 -08:00
Matt Nadareski
70eeaaac28 Replace direct LZ ports with cleaned versions 2022-12-28 22:18:23 -08:00
Matt Nadareski
0397d529bb Add LZ decompression to CExe 2022-12-28 17:24:55 -08:00
Matt Nadareski
25bed747f2 First ported implementation of LZ 2022-12-28 17:24:30 -08:00
Matt Nadareski
d17b90e782 Fix constant strings 2022-12-28 17:15:42 -08:00
Matt Nadareski
7868e22a95 Fix constant strings 2022-12-28 17:10:42 -08:00
Matt Nadareski
81f6e2057e Add note for future work 2022-12-28 15:37:09 -08:00
Matt Nadareski
1c3e37ee2b Add and use XZP constants 2022-12-28 15:35:54 -08:00
Matt Nadareski
27756db621 Add and use WAD constants 2022-12-28 15:30:22 -08:00
Matt Nadareski
7f71b04ef7 Add and use VPK constants 2022-12-28 15:27:10 -08:00
Matt Nadareski
c48522e6c0 Add and use VBSP constants 2022-12-28 15:22:16 -08:00
Matt Nadareski
fbf629dd8b Add and use SGA constants 2022-12-28 15:17:34 -08:00
Matt Nadareski
404b2889ff Add SFFS constants 2022-12-28 15:13:38 -08:00
Matt Nadareski
5ccb9d16a8 Add and use PE constants 2022-12-28 15:09:31 -08:00
Matt Nadareski
66b562f24b Add and use PAK constants 2022-12-28 15:03:41 -08:00
Matt Nadareski
ef25b88717 Get CExe prepped for LZ 2022-12-28 15:01:59 -08:00
Matt Nadareski
f22f7273a9 Add and use MZ constants 2022-12-28 14:50:48 -08:00
Matt Nadareski
ea9902c946 Add and use MS-CAB constants 2022-12-28 14:40:40 -08:00
Matt Nadareski
051c38c6df Add and use MoPaQ constants 2022-12-28 14:39:22 -08:00
Matt Nadareski
6b832026b4 Add and use NE constants 2022-12-28 14:26:12 -08:00
Matt Nadareski
81d7151f8f Create new Compression library (nw) 2022-12-28 14:18:50 -08:00
Matt Nadareski
283672e909 Add and use NCF constants 2022-12-28 14:02:09 -08:00
Matt Nadareski
94dfba4b4f Add LZ models 2022-12-28 10:46:33 -08:00
Matt Nadareski
2b66efd11b Add LE/LX constants 2022-12-28 10:31:28 -08:00
Matt Nadareski
14fb3f5758 Add and use IS-CAB constants 2022-12-28 10:21:19 -08:00
Matt Nadareski
9354f0f092 Add and use GCF constants 2022-12-28 10:14:00 -08:00
Matt Nadareski
c3636a0743 Add and use BSP constants 2022-12-28 10:04:10 -08:00
Matt Nadareski
e07b66812c Add and use BFPK constants 2022-12-28 09:57:22 -08:00
Matt Nadareski
c1d231db60 Create Compression models subfolder 2022-12-28 09:47:25 -08:00
TheRogueArchivist
757ab1f228 Fix FreeLock False Positive (#219)
* Fix FreeLock False Positive
2022-12-27 23:30:46 -08:00
TheRogueArchivist
6e42e7fb6b Remove dummy Roxxe file (#217)
* Remove dummy Roxxe file. The notes have been moved to DRML.
2022-12-27 23:13:53 -08:00
TheRogueArchivist
92f88efd4e Fix false positives in Bitpool and LaserLok (#218)
* Fix false positives in Bitpool and LaserLok.
2022-12-27 23:13:47 -08:00
Matt Nadareski
7ba2194d97 Add CExe extraction (partial) 2022-12-27 23:12:52 -08:00
Matt Nadareski
a5f2e2f5c8 Use slightly different zlib port 2022-12-27 22:25:49 -08:00
Matt Nadareski
8658c24ef0 Be more overzealous when locking 2022-12-27 22:25:16 -08:00
Matt Nadareski
0cded076e4 Fix invocation of scannable classes 2022-12-27 22:11:01 -08:00
Matt Nadareski
135c0b6d38 Use slightly different zlib port 2022-12-27 21:47:56 -08:00
Matt Nadareski
769fe42a7a Add TAR skeleton models, for kicks 2022-12-27 21:46:42 -08:00
Matt Nadareski
ab6fcd73e0 Seal as many of the models as possible 2022-12-27 17:12:55 -08:00
Matt Nadareski
3278420d72 Create placeholder for IS-CAB info printing 2022-12-27 16:56:00 -08:00
Matt Nadareski
98395387a7 Add IS-CAB models and builder 2022-12-27 16:54:06 -08:00
TheRogueArchivist
180a097213 Remove dummy Alcatraz file (#215)
* Remove dummy Alcatraz file. The notes have been moved to DRML.
2022-12-27 11:00:17 -08:00
TheRogueArchivist
18ac00080a Move Bitpool comments into DRML (#213)
* Move Bitpool comments into DRML, tidying up BOS,
2022-12-27 11:00:00 -08:00
Matt Nadareski
4da713702a Add header matchers for Wise 2022-12-27 10:53:28 -08:00
Matt Nadareski
68be17de66 Fix SGA builder and wrapper 2022-12-27 10:19:11 -08:00
Matt Nadareski
18cdf9d7ed Use new SGA extraction 2022-12-27 01:07:46 -08:00
Matt Nadareski
c389ea1e49 Add SGA extraction 2022-12-27 00:55:24 -08:00
Matt Nadareski
1f65b0352d Add SGA wrapper 2022-12-26 23:22:03 -08:00
Matt Nadareski
0e63b6638c Do some more work on MSZIP (nw) 2022-12-26 21:39:52 -08:00
Matt Nadareski
fcda1f119b Split MS-CAB wrapper 2022-12-26 15:04:17 -08:00
Matt Nadareski
bb130849ee Fix build with missed changes 2022-12-26 14:52:12 -08:00
Matt Nadareski
7b209eec6c Update to latest LibMSPackSharp 2022-12-26 14:46:17 -08:00
Matt Nadareski
9e7a84d2d6 Remove generic "Valve" 2022-12-26 14:45:49 -08:00
Matt Nadareski
baf43ea307 Remove remnants of HLLibSharp 2022-12-26 14:42:41 -08:00
Matt Nadareski
f524f0da3e Remove HLLibSharp as submodule 2022-12-26 14:37:59 -08:00
Matt Nadareski
49781c47a9 Add executables to support table 2022-12-26 13:01:37 -08:00
Matt Nadareski
aa11ce807a Better container support matrix 2022-12-26 12:58:03 -08:00
Matt Nadareski
702115c55a Make interfaces public 2022-12-26 12:36:09 -08:00
Matt Nadareski
17cb1bf9b0 Add XZP wrapper, extraction, and use it 2022-12-26 12:33:58 -08:00
Matt Nadareski
590c4e0a23 Make extension checks better 2022-12-26 11:43:04 -08:00
Matt Nadareski
fd6196a880 Add WAD wrapper, extraction, and use it 2022-12-26 11:34:17 -08:00
Matt Nadareski
2875f7ff7a Add VBSP wrapper, extraction, and use it 2022-12-26 10:58:16 -08:00
Matt Nadareski
50fe127a8d Add PAK wrapper, extraction, and use it 2022-12-26 10:26:26 -08:00
Matt Nadareski
94ebe5b707 Add NCF wrapper 2022-12-25 23:33:52 -08:00
Matt Nadareski
dc3914e976 Use new GCF extraction code 2022-12-25 22:55:48 -08:00
Matt Nadareski
ef2f037909 Add GCF extraction 2022-12-25 22:53:01 -08:00
Matt Nadareski
374f286585 Add GCF wrapper 2022-12-25 21:27:06 -08:00
Matt Nadareski
34cd933f78 Reset launch.json 2022-12-24 22:31:21 -08:00
Matt Nadareski
f8d533e592 Use new BSP extarction code 2022-12-24 22:30:29 -08:00
Matt Nadareski
21263cf0fd Add BSP extraction 2022-12-24 22:02:30 -08:00
Matt Nadareski
1a62ac2006 Add BSP wrapper 2022-12-24 20:15:58 -08:00
Matt Nadareski
8f3d4d5fb2 Use new VPK extraction code 2022-12-24 15:31:38 -08:00
Matt Nadareski
81902455ff Fix VPK extraction 2022-12-24 15:25:56 -08:00
Matt Nadareski
fbb33d9ef8 Add VPK to info 2022-12-24 14:24:34 -08:00
Matt Nadareski
b025c7a7fa Add VPK wrapper (nw) 2022-12-24 13:49:03 -08:00
Matt Nadareski
d8aec5aa97 Migrate recent HLLib work to BOS
It's better suited for the model of BOS and not an update to HLLibSharp
2022-12-24 12:57:10 -08:00
Matt Nadareski
3a862f343c Add headers and extensions for Valve 2022-12-23 23:27:14 -08:00
Matt Nadareski
69724cfb1c Sync to newest HLLibSharp 2022-12-23 23:13:45 -08:00
Matt Nadareski
f75cdea678 Sync to newest HLLibSharp 2022-12-23 23:04:40 -08:00
Matt Nadareski
32e650eff5 Update README comment 2022-12-23 15:46:42 -08:00
Matt Nadareski
ab022f9049 Sync to newest HLLibSharp 2022-12-23 15:26:48 -08:00
Matt Nadareski
7962e148fa Sync to newest HLLibSharp 2022-12-23 14:29:34 -08:00
Matt Nadareski
efdf3a0691 Sync to newest HLLibSharp 2022-12-23 13:13:57 -08:00
Matt Nadareski
000ab5e856 Sync to newest HLLibSharp 2022-12-23 12:36:48 -08:00
Matt Nadareski
4873133c92 Sync to newest HLLibSharp 2022-12-23 11:10:55 -08:00
Matt Nadareski
360bbef43a Remove redundant note in README 2022-12-22 22:31:45 -08:00
Matt Nadareski
9a21f4987d Sync to newest HLLibSharp 2022-12-22 22:24:13 -08:00
Matt Nadareski
63948767ef Use more granular file opening 2022-12-22 22:03:32 -08:00
Matt Nadareski
e2098f6f71 Disable StormLibSharp for .NET 6.0 2022-12-22 21:58:26 -08:00
Matt Nadareski
609c30da38 Sync to newest HLLibSharp 2022-12-22 21:51:33 -08:00
Matt Nadareski
0896842268 Builder -> Builders 2022-12-22 16:02:10 -08:00
Matt Nadareski
ec9506b9eb Sync to newest HLLibSharp 2022-12-22 15:53:50 -08:00
Matt Nadareski
526526975c Sync to newest HLLibSharp 2022-12-22 15:18:51 -08:00
Matt Nadareski
098734c471 Remove outdated note 2022-12-22 13:06:18 -08:00
Matt Nadareski
47df62534e Add back ActiveMARK entry point checks 2022-12-20 14:40:43 -08:00
Matt Nadareski
7295001892 Read entry point data in safe way 2022-12-20 14:19:48 -08:00
Matt Nadareski
7c820b7fd2 Start re-adding entry point 2022-12-20 13:03:25 -08:00
Matt Nadareski
36429cc1e9 Fix build 2022-12-20 12:21:34 -08:00
Matt Nadareski
a1c22ca9da Make a couple things consistent 2022-12-20 11:52:50 -08:00
Matt Nadareski
0c37932631 Cleanup and notes for 3P-Lock 2022-12-20 11:26:22 -08:00
Matt Nadareski
dbf1f6dcca Integrate changes from TheRogueArchivist 2022-12-19 23:53:52 -08:00
Matt Nadareski
7bb26c0faf Add PE checks to ByteShield 2022-12-19 23:49:34 -08:00
Matt Nadareski
31a0b55556 Add new info to developer guide 2022-12-19 23:28:12 -08:00
Matt Nadareski
64cc4785ca Add PE checks to SoftLock 2022-12-19 21:39:24 -08:00
Matt Nadareski
28391de50c Confirm SoftLock path checks 2022-12-19 20:49:10 -08:00
Matt Nadareski
b2ed69ab78 Add 7-zip SFX detection 2022-12-18 14:18:35 -08:00
Matt Nadareski
2f08940927 Simplify Gefest checks 2022-12-17 22:52:35 -08:00
Matt Nadareski
70928227e4 Fix launch.json 2022-12-17 22:02:12 -08:00
Matt Nadareski
546bd70418 Start fixing MSZIP decoding (nw) 2022-12-17 22:01:00 -08:00
Matt Nadareski
f2521a0110 Write MS-CAB file extraction (nw) 2022-12-16 23:11:06 -08:00
Matt Nadareski
02b83513a1 Hook up MSZIP decompression (nw) 2022-12-16 22:58:07 -08:00
Matt Nadareski
f26b2ff61b Migrate MSZIP to wrapper, where possible 2022-12-16 22:41:36 -08:00
Matt Nadareski
4d26535d07 Add BitStream type and add BitArray extensions 2022-12-16 21:59:46 -08:00
Matt Nadareski
01a365033e MIgrate some Quantum stuff to models 2022-12-16 10:19:32 -08:00
Matt Nadareski
318a89a4bc Move some LZX things to models 2022-12-16 09:48:52 -08:00
Matt Nadareski
b20f22fb92 Subfolder it 2022-12-16 00:19:18 -08:00
Matt Nadareski
fedf76e534 Migrate some MSZIP pieces to Models 2022-12-15 23:51:12 -08:00
Matt Nadareski
e0e16292eb ASN.1 and OID to its own library 2022-12-15 22:07:12 -08:00
Matt Nadareski
b3c0e48bdd Address some MPQ issues 2022-12-15 16:50:24 -08:00
Matt Nadareski
aded5ee03a Stream safety and better streams 2022-12-15 14:20:27 -08:00
Matt Nadareski
16e71c910e Use MemoryStream in builders 2022-12-15 13:37:34 -08:00
Matt Nadareski
bbe234b459 Use MemoryStream in wrappers 2022-12-15 12:41:08 -08:00
Matt Nadareski
715b9eb156 Fix MS-CAB info 2022-12-15 12:20:06 -08:00
Matt Nadareski
4cc441afcf Get rid of code duplication 2022-12-15 00:13:24 -08:00
Matt Nadareski
f79cd759bd Add SFFS models, no encryption 2022-12-14 23:16:37 -08:00
Matt Nadareski
1b232e4405 Fix wrapper printing, add to info 2022-12-14 23:06:09 -08:00
Matt Nadareski
6d43afb258 Use wrapper in BFPK scans 2022-12-14 23:01:06 -08:00
Matt Nadareski
a47c778b0e Add BFPK wrapper 2022-12-14 22:58:18 -08:00
Matt Nadareski
ddb82842bc Add BFPK builder 2022-12-14 22:41:17 -08:00
Matt Nadareski
199914b19f Remove BinaryReader from BFPK 2022-12-14 22:28:35 -08:00
Matt Nadareski
9fadd84597 Add unused BFPK models 2022-12-14 22:25:35 -08:00
Matt Nadareski
95dd670c7c Add format note to SFFS, fix magic 2022-12-14 22:15:43 -08:00
Matt Nadareski
adc9def0c9 Slight MoPaQ builder cleanup 2022-12-14 22:07:11 -08:00
Matt Nadareski
8dcc9d9b0e Add BET/HET parsign to MoPaQ (nw) 2022-12-14 22:06:31 -08:00
Matt Nadareski
b5177b16ea Add hi-block table parsing to MoPaQ (nw) 2022-12-14 21:59:48 -08:00
Matt Nadareski
f9b4693aae Add block table parsing to MoPaQ (nw) 2022-12-14 21:48:22 -08:00
Matt Nadareski
f2a479e35c Update SafeDisc with better finding 2022-12-14 21:31:51 -08:00
Matt Nadareski
1f40c2e052 Fix locking exception 2022-12-14 21:30:53 -08:00
Matt Nadareski
b5c8d05814 Add CodeView debug parsing/finding 2022-12-14 21:07:02 -08:00
Matt Nadareski
f99634bc08 Add generic debug check to SafeDisc 2022-12-14 20:57:26 -08:00
Matt Nadareski
ab88e2f553 Add NB10 debug data type 2022-12-14 20:56:13 -08:00
Matt Nadareski
5465abe1ac Add RSDS debug data type 2022-12-14 20:47:18 -08:00
Matt Nadareski
b0df7a8f3b Add debug data to PE wrapper 2022-12-14 20:46:24 -08:00
Matt Nadareski
0d4fab100d Add PE table data/string caching 2022-12-14 17:24:14 -08:00
Matt Nadareski
8c5e10fd88 First attempt at MoPaQ hash table parsing 2022-12-14 17:03:34 -08:00
Matt Nadareski
e8aef1596b Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2022-12-14 16:48:52 -08:00
Matt Nadareski
386c86f04f Add placeholders for parsing in MoPaQ 2022-12-14 16:47:45 -08:00
TheRogueArchivist
3f5a66f170 Add support for Gefest Protection System (#185)
* Add detection and notes for Gefest Protection System.

* Add Gefest Protection System to README.
2022-12-14 16:37:06 -08:00
TheRogueArchivist
a961d9534c Add SafeDisc "SD0XXX.dll" checks (#184)
* Add detection and notes for SafeDisc "SD0XXX.dll" files.
2022-12-14 16:35:54 -08:00
TheRogueArchivist
d1919c18f5 Add nProtect DRM (#183)
* Add nProtect DRM

* Add detection and notes for nProtect GameGuard and nProtect KeyCrypt.

* Add nProtect DRM to README.

* Fix missing "|" character

* Fix missing "|" character.
2022-12-14 16:35:01 -08:00
Matt Nadareski
afa8b24ba9 Add MoPaQ printing skeleton 2022-12-14 16:33:26 -08:00
Matt Nadareski
b793b74b32 Add MoPaQ builder (nw) 2022-12-14 16:29:07 -08:00
Matt Nadareski
65499d1f46 Add MoPaQ models 2022-12-14 15:43:13 -08:00
Matt Nadareski
5f387cdb74 Migrate WIP MS-CAB to model/builder/wrapper 2022-12-14 15:23:54 -08:00
Matt Nadareski
ed2e88c781 Start filling out LZX 2022-12-14 13:57:54 -08:00
Matt Nadareski
1cb3157110 Add more notes, including Quantum 2022-12-14 13:17:29 -08:00
TheRogueArchivist
a480b53787 CD-Cops is a mess why don't we have more samples (#182)
* Confirm CD-Cops check.

* Add a few notes for CD-Cops.
2022-12-14 12:12:12 -08:00
TheRogueArchivist
405c895352 Add support for CD-Guard (#181)
* why is there so much DRM I am going to cry

* Add support for detecting CD-Guard, as well as notes.

* Add CD-Guard to README.

* Address PR comments

* Ignore case in import/export checks.
2022-12-14 12:11:23 -08:00
Matt Nadareski
53dc251a0c Continue with deflate 2022-12-14 12:10:59 -08:00
Matt Nadareski
d715072cbc Start writing Inflate implementation 2022-12-14 10:55:56 -08:00
Matt Nadareski
aaee56f44e Start adding MSZIP notes 2022-12-14 00:05:49 -08:00
Matt Nadareski
27ceb4ed48 Streamline MS-CAB reading, add to info printing 2022-12-13 23:06:49 -08:00
Matt Nadareski
2d51bd8f37 Implement MS-CAB checksum routine 2022-12-13 22:11:19 -08:00
Matt Nadareski
645a366dc5 Split MS-CAB into subfiles, LibMSPackSharp for .NET 6 2022-12-13 21:51:24 -08:00
Matt Nadareski
756a74eda6 Disable printing by default again 2022-12-13 21:13:20 -08:00
Matt Nadareski
8052ee2afb Some PE resource handling cleanup 2022-12-13 21:05:52 -08:00
Matt Nadareski
6171c0defd Fix SFFS not being scanned 2022-12-13 11:55:12 -08:00
TheRogueArchivist
56c27d0b8f Why is there so much StarForce, geez (#180)
* Begin work on overhauling StarForce detection, and to add notes.

* Attempt to add SFFS file detection.

* Fix minor TAGES issue.
2022-12-13 11:42:55 -08:00
Matt Nadareski
9c173fd3a1 Update extension method for AddD section 2022-12-12 21:40:29 -08:00
Matt Nadareski
fa3ccf9953 Add more SecuROM AddD section notes 2022-12-12 21:25:00 -08:00
456 changed files with 32308 additions and 10091 deletions

9
.gitmodules vendored
View File

@@ -1,12 +1,3 @@
[submodule "BurnOutSharp/External/stormlibsharp"]
path = BurnOutSharp/External/stormlibsharp
url = https://github.com/robpaveza/stormlibsharp.git
[submodule "HLLibSharp"]
path = HLLibSharp
url = https://github.com/mnadareski/HLLibSharp
[submodule "LibMSPackSharp"]
path = LibMSPackSharp
url = https://github.com/mnadareski/LibMSPackSharp.git
[submodule "Dtf"]
path = Dtf
url = https://github.com/wixtoolset/Dtf.git

13
.vscode/launch.json vendored
View File

@@ -17,19 +17,6 @@
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": ".NET Core Launch (ExecutableTest)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
// If you have changed target frameworks, make sure to update the program path.
"program": "${workspaceFolder}/ExecutableTest/bin/Debug/net6.0/ExecutableTest.dll",
"args": [],
"cwd": "${workspaceFolder}/Test",
// For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": ".NET Core Attach",
"type": "coreclr",

View File

@@ -0,0 +1,31 @@
using System.Collections.Generic;
namespace BurnOutSharp.ASN1
{
/// <summary>
/// ASN.1 Parser
/// </summary>
public static class AbstractSyntaxNotationOne
{
/// <summary>
/// Parse a byte array into a DER-encoded ASN.1 structure
/// </summary>
/// <param name="data">Byte array representing the data</param>
/// <param name="pointer">Current pointer into the data</param>
/// <returns></returns>
public static List<TypeLengthValue> Parse(byte[] data, int pointer)
{
// Create the output list to return
var topLevelValues = new List<TypeLengthValue>();
// Loop through the data and return all top-level values
while (pointer < data.Length)
{
var topLevelValue = new TypeLengthValue(data, ref pointer);
topLevelValues.Add(topLevelValue);
}
return topLevelValues;
}
}
}

View File

@@ -0,0 +1,27 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BurnOutSharp.ASN1</Title>
<AssemblyName>BurnOutSharp.ASN1</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.6</Version>
<AssemblyVersion>2.6</AssemblyVersion>
<FileVersion>2.6</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\BurnOutSharp.Utilities\BurnOutSharp.Utilities.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,55 @@
using System;
namespace BurnOutSharp.ASN1
{
/// <summary>
/// ASN.1 type indicators
/// </summary>
[Flags]
public enum ASN1Type : byte
{
#region Modifiers
V_ASN1_UNIVERSAL = 0x00,
V_ASN1_PRIMITIVE_TAG = 0x1F,
V_ASN1_CONSTRUCTED = 0x20,
V_ASN1_APPLICATION = 0x40,
V_ASN1_CONTEXT_SPECIFIC = 0x80,
V_ASN1_PRIVATE = 0xC0,
#endregion
#region Types
V_ASN1_EOC = 0x00,
V_ASN1_BOOLEAN = 0x01,
V_ASN1_INTEGER = 0x02,
V_ASN1_BIT_STRING = 0x03,
V_ASN1_OCTET_STRING = 0x04,
V_ASN1_NULL = 0x05,
V_ASN1_OBJECT = 0x06,
V_ASN1_OBJECT_DESCRIPTOR = 0x07,
V_ASN1_EXTERNAL = 0x08,
V_ASN1_REAL = 0x09,
V_ASN1_ENUMERATED = 0x0A,
V_ASN1_UTF8STRING = 0x0C,
V_ASN1_SEQUENCE = 0x10,
V_ASN1_SET = 0x11,
V_ASN1_NUMERICSTRING = 0x12,
V_ASN1_PRINTABLESTRING = 0x13,
V_ASN1_T61STRING = 0x14,
V_ASN1_TELETEXSTRING = 0x14,
V_ASN1_VIDEOTEXSTRING = 0x15,
V_ASN1_IA5STRING = 0x16,
V_ASN1_UTCTIME = 0x17,
V_ASN1_GENERALIZEDTIME = 0x18,
V_ASN1_GRAPHICSTRING = 0x19,
V_ASN1_ISO64STRING = 0x1A,
V_ASN1_VISIBLESTRING = 0x1A,
V_ASN1_GENERALSTRING = 0x1B,
V_ASN1_UNIVERSALSTRING = 0x1C,
V_ASN1_BMPSTRING = 0x1E,
#endregion
}
}

View File

@@ -1,4 +1,4 @@
namespace BurnOutSharp.Builder
namespace BurnOutSharp.ASN1
{
#pragma warning disable IDE0011

View File

@@ -1,7 +1,7 @@
using System.Linq;
using System.Text;
namespace BurnOutSharp.Builder
namespace BurnOutSharp.ASN1
{
#pragma warning disable IDE0011

View File

@@ -1,7 +1,7 @@
using System.Linq;
using System.Text;
namespace BurnOutSharp.Builder
namespace BurnOutSharp.ASN1
{
#pragma warning disable IDE0011

View File

@@ -1,7 +1,7 @@
using System;
using System.Collections.Generic;
namespace BurnOutSharp.Builder
namespace BurnOutSharp.ASN1
{
/// <summary>
/// Methods related to Object Identifiers (OID)

View File

@@ -1,4 +1,4 @@
namespace BurnOutSharp.Builder
namespace BurnOutSharp.ASN1
{
#pragma warning disable IDE0011

View File

@@ -3,91 +3,14 @@ using System.Collections.Generic;
using System.Linq;
using System.Numerics;
using System.Text;
using BurnOutSharp.Utilities;
namespace BurnOutSharp.Builder
namespace BurnOutSharp.ASN1
{
/// <summary>
/// ASN.1 type indicators
/// </summary>
[Flags]
public enum ASN1Type : byte
{
#region Modifiers
V_ASN1_UNIVERSAL = 0x00,
V_ASN1_PRIMITIVE_TAG = 0x1F,
V_ASN1_CONSTRUCTED = 0x20,
V_ASN1_APPLICATION = 0x40,
V_ASN1_CONTEXT_SPECIFIC = 0x80,
V_ASN1_PRIVATE = 0xC0,
#endregion
#region Types
V_ASN1_EOC = 0x00,
V_ASN1_BOOLEAN = 0x01,
V_ASN1_INTEGER = 0x02,
V_ASN1_BIT_STRING = 0x03,
V_ASN1_OCTET_STRING = 0x04,
V_ASN1_NULL = 0x05,
V_ASN1_OBJECT = 0x06,
V_ASN1_OBJECT_DESCRIPTOR = 0x07,
V_ASN1_EXTERNAL = 0x08,
V_ASN1_REAL = 0x09,
V_ASN1_ENUMERATED = 0x0A,
V_ASN1_UTF8STRING = 0x0C,
V_ASN1_SEQUENCE = 0x10,
V_ASN1_SET = 0x11,
V_ASN1_NUMERICSTRING = 0x12,
V_ASN1_PRINTABLESTRING = 0x13,
V_ASN1_T61STRING = 0x14,
V_ASN1_TELETEXSTRING = 0x14,
V_ASN1_VIDEOTEXSTRING = 0x15,
V_ASN1_IA5STRING = 0x16,
V_ASN1_UTCTIME = 0x17,
V_ASN1_GENERALIZEDTIME = 0x18,
V_ASN1_GRAPHICSTRING = 0x19,
V_ASN1_ISO64STRING = 0x1A,
V_ASN1_VISIBLESTRING = 0x1A,
V_ASN1_GENERALSTRING = 0x1B,
V_ASN1_UNIVERSALSTRING = 0x1C,
V_ASN1_BMPSTRING = 0x1E,
#endregion
}
/// <summary>
/// ASN.1 Parser
/// </summary>
public class AbstractSyntaxNotationOne
{
/// <summary>
/// Parse a byte array into a DER-encoded ASN.1 structure
/// </summary>
/// <param name="data">Byte array representing the data</param>
/// <param name="pointer">Current pointer into the data</param>
/// <returns></returns>
public static List<ASN1TypeLengthValue> Parse(byte[] data, int pointer)
{
// Create the output list to return
var topLevelValues = new List<ASN1TypeLengthValue>();
// Loop through the data and return all top-level values
while (pointer < data.Length)
{
var topLevelValue = new ASN1TypeLengthValue(data, ref pointer);
topLevelValues.Add(topLevelValue);
}
return topLevelValues;
}
}
/// <summary>
/// ASN.1 type/length/value class that all types are based on
/// </summary>
public class ASN1TypeLengthValue
public class TypeLengthValue
{
/// <summary>
/// The ASN.1 type
@@ -109,7 +32,7 @@ namespace BurnOutSharp.Builder
/// </summary>
/// <param name="data">Byte array representing data to read</param>
/// <param name="index">Index within the array to read at</param>
public ASN1TypeLengthValue(byte[] data, ref int index)
public TypeLengthValue(byte[] data, ref int index)
{
// Get the type and modifiers
this.Type = (ASN1Type)data[index++];
@@ -124,12 +47,12 @@ namespace BurnOutSharp.Builder
// Read the value
if (this.Type.HasFlag(ASN1Type.V_ASN1_CONSTRUCTED))
{
var valueList = new List<ASN1TypeLengthValue>();
var valueList = new List<TypeLengthValue>();
int currentIndex = index;
while (index < currentIndex + (int)this.Length)
{
valueList.Add(new ASN1TypeLengthValue(data, ref index));
valueList.Add(new TypeLengthValue(data, ref index));
}
this.Value = valueList.ToArray();
@@ -171,7 +94,7 @@ namespace BurnOutSharp.Builder
// If we have a constructed type
if (this.Type.HasFlag(ASN1Type.V_ASN1_CONSTRUCTED))
{
var valueAsObjectArray = this.Value as ASN1TypeLengthValue[];
var valueAsObjectArray = this.Value as TypeLengthValue[];
if (valueAsObjectArray == null)
{
formatBuilder.Append(", Value: [INVALID DATA TYPE]");

View File

@@ -1,958 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using BurnOutSharp.Models.NewExecutable;
namespace BurnOutSharp.Builder
{
// TODO: Make Stream Data rely on Byte Data
public static class NewExecutable
{
#region Byte Data
/// <summary>
/// Parse a byte array into a New Executable
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Cache the current offset
int initialOffset = offset;
// Create a new executable to fill
var executable = new Executable();
#region MS-DOS Stub
// Parse the MS-DOS stub
var stub = MSDOS.ParseExecutable(data, offset);
if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0)
return null;
// Set the MS-DOS stub
executable.Stub = stub;
#endregion
#region Executable Header
// Try to parse the executable header
offset = (int)(initialOffset + stub.Header.NewExeHeaderAddr);
var executableHeader = ParseExecutableHeader(data, offset);
if (executableHeader == null)
return null;
// Set the executable header
executable.Header = executableHeader;
#endregion
#region Segment Table
// If the offset for the segment table doesn't exist
int tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.SegmentTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the segment table
var segmentTable = ParseSegmentTable(data, tableAddress, executableHeader.FileSegmentCount);
if (segmentTable == null)
return null;
// Set the segment table
executable.SegmentTable = segmentTable;
#endregion
#region Resource Table
// If the offset for the segment table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.SegmentTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the resource table
var resourceTable = ParseResourceTable(data, tableAddress, executableHeader.ResourceEntriesCount);
if (resourceTable == null)
return null;
// Set the resource table
executable.ResourceTable = resourceTable;
#endregion
#region Resident-Name Table
// If the offset for the resident-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ResidentNameTableOffset;
int endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the resident-name table
var residentNameTable = ParseResidentNameTable(data, tableAddress, endOffset);
if (residentNameTable == null)
return null;
// Set the resident-name table
executable.ResidentNameTable = residentNameTable;
#endregion
#region Module-Reference Table
// If the offset for the module-reference table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the module-reference table
var moduleReferenceTable = ParseModuleReferenceTable(data, tableAddress, executableHeader.ModuleReferenceTableSize);
if (moduleReferenceTable == null)
return null;
// Set the module-reference table
executable.ModuleReferenceTable = moduleReferenceTable;
#endregion
#region Imported-Name Table
// If the offset for the imported-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ImportedNamesTableOffset;
endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the imported-name table
var importedNameTable = ParseImportedNameTable(data, tableAddress, endOffset);
if (importedNameTable == null)
return null;
// Set the imported-name table
executable.ImportedNameTable = importedNameTable;
#endregion
#region Entry Table
// If the offset for the entry table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset;
endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset
+ executableHeader.EntryTableSize;
if (tableAddress >= data.Length)
return executable;
// Try to parse the entry table
var entryTable = ParseEntryTable(data, tableAddress, endOffset);
if (entryTable == null)
return null;
// Set the entry table
executable.EntryTable = entryTable;
#endregion
#region Nonresident-Name Table
// If the offset for the nonresident-name table doesn't exist
tableAddress = initialOffset
+ (int)executableHeader.NonResidentNamesTableOffset;
endOffset = initialOffset
+ (int)executableHeader.NonResidentNamesTableOffset
+ executableHeader.NonResidentNameTableSize;
if (tableAddress >= data.Length)
return executable;
// Try to parse the nonresident-name table
var nonResidentNameTable = ParseNonResidentNameTable(data, tableAddress, endOffset);
if (nonResidentNameTable == null)
return null;
// Set the nonresident-name table
executable.NonResidentNameTable = nonResidentNameTable;
#endregion
return executable;
}
/// <summary>
/// Parse a byte array into a New Executable header
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled executable header on success, null on error</returns>
private static ExecutableHeader ParseExecutableHeader(byte[] data, int offset)
{
// TODO: Use marshalling here instead of building
var header = new ExecutableHeader();
header.Magic = new byte[2];
for (int i = 0; i < header.Magic.Length; i++)
{
header.Magic[i] = data.ReadByte(ref offset);
}
if (header.Magic[0] != 'N' || header.Magic[1] != 'E')
return null;
header.LinkerVersion = data.ReadByte(ref offset);
header.LinkerRevision = data.ReadByte(ref offset);
header.EntryTableOffset = data.ReadUInt16(ref offset);
header.EntryTableSize = data.ReadUInt16(ref offset);
header.CrcChecksum = data.ReadUInt32(ref offset);
header.FlagWord = (HeaderFlag)data.ReadUInt16(ref offset);
header.AutomaticDataSegmentNumber = data.ReadUInt16(ref offset);
header.InitialHeapAlloc = data.ReadUInt16(ref offset);
header.InitialStackAlloc = data.ReadUInt16(ref offset);
header.InitialCSIPSetting = data.ReadUInt32(ref offset);
header.InitialSSSPSetting = data.ReadUInt32(ref offset);
header.FileSegmentCount = data.ReadUInt16(ref offset);
header.ModuleReferenceTableSize = data.ReadUInt16(ref offset);
header.NonResidentNameTableSize = data.ReadUInt16(ref offset);
header.SegmentTableOffset = data.ReadUInt16(ref offset);
header.ResourceTableOffset = data.ReadUInt16(ref offset);
header.ResidentNameTableOffset = data.ReadUInt16(ref offset);
header.ModuleReferenceTableOffset = data.ReadUInt16(ref offset);
header.ImportedNamesTableOffset = data.ReadUInt16(ref offset);
header.NonResidentNamesTableOffset = data.ReadUInt32(ref offset);
header.MovableEntriesCount = data.ReadUInt16(ref offset);
header.SegmentAlignmentShiftCount = data.ReadUInt16(ref offset);
header.ResourceEntriesCount = data.ReadUInt16(ref offset);
header.TargetOperatingSystem = (OperatingSystem)data.ReadByte(ref offset);
header.AdditionalFlags = (OS2Flag)data.ReadByte(ref offset);
header.ReturnThunkOffset = data.ReadUInt16(ref offset);
header.SegmentReferenceThunkOffset = data.ReadUInt16(ref offset);
header.MinCodeSwapAreaSize = data.ReadUInt16(ref offset);
header.WindowsSDKRevision = data.ReadByte(ref offset);
header.WindowsSDKVersion = data.ReadByte(ref offset);
return header;
}
/// <summary>
/// Parse a byte array into a segment table
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <param name="count">Number of segment table entries to read</param>
/// <returns>Filled segment table on success, null on error</returns>
private static SegmentTableEntry[] ParseSegmentTable(byte[] data, int offset, int count)
{
// TODO: Use marshalling here instead of building
var segmentTable = new SegmentTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new SegmentTableEntry();
entry.Offset = data.ReadUInt16(ref offset);
entry.Length = data.ReadUInt16(ref offset);
entry.FlagWord = (SegmentTableEntryFlag)data.ReadUInt16(ref offset);
entry.MinimumAllocationSize = data.ReadUInt16(ref offset);
segmentTable[i] = entry;
}
return segmentTable;
}
/// <summary>
/// Parse a byte array into a resource table
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <param name="count">Number of resource table entries to read</param>
/// <returns>Filled resource table on success, null on error</returns>
private static ResourceTable ParseResourceTable(byte[] data, int offset, int count)
{
int initialOffset = offset;
// TODO: Use marshalling here instead of building
var resourceTable = new ResourceTable();
resourceTable.AlignmentShiftCount = data.ReadUInt16(ref offset);
resourceTable.ResourceTypes = new ResourceTypeInformationEntry[count];
for (int i = 0; i < resourceTable.ResourceTypes.Length; i++)
{
var entry = new ResourceTypeInformationEntry();
entry.TypeID = data.ReadUInt16(ref offset);
entry.ResourceCount = data.ReadUInt16(ref offset);
entry.Reserved = data.ReadUInt32(ref offset);
entry.Resources = new ResourceTypeResourceEntry[entry.ResourceCount];
for (int j = 0; j < entry.ResourceCount; j++)
{
// TODO: Should we read and store the resource data?
var resource = new ResourceTypeResourceEntry();
resource.Offset = data.ReadUInt16(ref offset);
resource.Length = data.ReadUInt16(ref offset);
resource.FlagWord = (ResourceTypeResourceFlag)data.ReadUInt16(ref offset);
resource.ResourceID = data.ReadUInt16(ref offset);
resource.Reserved = data.ReadUInt32(ref offset);
entry.Resources[j] = resource;
}
resourceTable.ResourceTypes[i] = entry;
}
// Get the full list of unique string offsets
var stringOffsets = resourceTable.ResourceTypes
.Where(rt => rt.IsIntegerType() == false)
.Select(rt => rt.TypeID)
.Union(resourceTable.ResourceTypes
.SelectMany(rt => rt.Resources)
.Where(r => r.IsIntegerType() == false)
.Select(r => r.ResourceID))
.Distinct()
.OrderBy(o => o)
.ToList();
// Populate the type and name string dictionary
resourceTable.TypeAndNameStrings = new Dictionary<ushort, ResourceTypeAndNameString>();
for (int i = 0; i < stringOffsets.Count; i++)
{
int stringOffset = stringOffsets[i] + initialOffset;
var str = new ResourceTypeAndNameString();
str.Length = data.ReadByte(ref stringOffset);
str.Text = data.ReadBytes(ref stringOffset, str.Length);
resourceTable.TypeAndNameStrings[stringOffsets[i]] = str;
}
return resourceTable;
}
/// <summary>
/// Parse a byte array into a resident-name table
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <param name="endOffset">First address not part of the resident-name table</param>
/// <returns>Filled resident-name table on success, null on error</returns>
private static ResidentNameTableEntry[] ParseResidentNameTable(byte[] data, int offset, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<ResidentNameTableEntry>();
while (offset < endOffset)
{
var entry = new ResidentNameTableEntry();
entry.Length = data.ReadByte(ref offset);
entry.NameString = data.ReadBytes(ref offset, entry.Length);
entry.OrdinalNumber = data.ReadUInt16(ref offset);
residentNameTable.Add(entry);
}
return residentNameTable.ToArray();
}
/// <summary>
/// Parse a byte array into a module-reference table
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <param name="count">Number of module-reference table entries to read</param>
/// <returns>Filled module-reference table on success, null on error</returns>
private static ModuleReferenceTableEntry[] ParseModuleReferenceTable(byte[] data, int offset, int count)
{
// TODO: Use marshalling here instead of building
var moduleReferenceTable = new ModuleReferenceTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new ModuleReferenceTableEntry();
entry.Offset = data.ReadUInt16(ref offset);
moduleReferenceTable[i] = entry;
}
return moduleReferenceTable;
}
/// <summary>
/// Parse a byte array into an imported-name table
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <param name="endOffset">First address not part of the imported-name table</param>
/// <returns>Filled imported-name table on success, null on error</returns>
private static Dictionary<ushort, ImportedNameTableEntry> ParseImportedNameTable(byte[] data, int offset, int endOffset)
{
// TODO: Use marshalling here instead of building
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry>();
while (offset < endOffset)
{
ushort currentOffset = (ushort)offset;
var entry = new ImportedNameTableEntry();
entry.Length = data.ReadByte(ref offset);
entry.NameString = data.ReadBytes(ref offset, entry.Length);
importedNameTable[currentOffset] = entry;
}
return importedNameTable;
}
/// <summary>
/// Parse a byte array into an entry table
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <param name="endOffset">First address not part of the entry table</param>
/// <returns>Filled entry table on success, null on error</returns>
private static EntryTableBundle[] ParseEntryTable(byte[] data, int offset, int endOffset)
{
// TODO: Use marshalling here instead of building
var entryTable = new List<EntryTableBundle>();
while (offset < endOffset)
{
var entry = new EntryTableBundle();
entry.EntryCount = data.ReadByte(ref offset);
entry.SegmentIndicator = data.ReadByte(ref offset);
switch (entry.GetEntryType())
{
case SegmentEntryType.Unused:
break;
case SegmentEntryType.FixedSegment:
entry.FixedFlagWord = (FixedSegmentEntryFlag)data.ReadByte(ref offset);
entry.FixedOffset = data.ReadUInt16(ref offset);
break;
case SegmentEntryType.MoveableSegment:
entry.MoveableFlagWord = (MoveableSegmentEntryFlag)data.ReadByte(ref offset);
entry.MoveableReserved = data.ReadUInt16(ref offset);
entry.MoveableSegmentNumber = data.ReadByte(ref offset);
entry.MoveableOffset = data.ReadUInt16(ref offset);
break;
}
entryTable.Add(entry);
}
return entryTable.ToArray();
}
/// <summary>
/// Parse a byte array into a nonresident-name table
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <param name="endOffset">First address not part of the nonresident-name table</param>
/// <returns>Filled nonresident-name table on success, null on error</returns>
private static NonResidentNameTableEntry[] ParseNonResidentNameTable(byte[] data, int offset, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<NonResidentNameTableEntry>();
while (offset < endOffset)
{
var entry = new NonResidentNameTableEntry();
entry.Length = data.ReadByte(ref offset);
entry.NameString = data.ReadBytes(ref offset, entry.Length);
entry.OrdinalNumber = data.ReadUInt16(ref offset);
residentNameTable.Add(entry);
}
return residentNameTable.ToArray();
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a New Executable
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(Stream data)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new executable to fill
var executable = new Executable();
#region MS-DOS Stub
// Parse the MS-DOS stub
var stub = MSDOS.ParseExecutable(data);
if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0)
return null;
// Set the MS-DOS stub
executable.Stub = stub;
#endregion
#region Executable Header
// Try to parse the executable header
data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin);
var executableHeader = ParseExecutableHeader(data);
if (executableHeader == null)
return null;
// Set the executable header
executable.Header = executableHeader;
#endregion
#region Segment Table
// If the offset for the segment table doesn't exist
int tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.SegmentTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the segment table
data.Seek(tableAddress, SeekOrigin.Begin);
var segmentTable = ParseSegmentTable(data, executableHeader.FileSegmentCount);
if (segmentTable == null)
return null;
// Set the segment table
executable.SegmentTable = segmentTable;
#endregion
#region Resource Table
// If the offset for the segment table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.SegmentTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the resource table
data.Seek(tableAddress, SeekOrigin.Begin);
var resourceTable = ParseResourceTable(data, executableHeader.ResourceEntriesCount);
if (resourceTable == null)
return null;
// Set the resource table
executable.ResourceTable = resourceTable;
#endregion
#region Resident-Name Table
// If the offset for the resident-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ResidentNameTableOffset;
int endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the resident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var residentNameTable = ParseResidentNameTable(data, endOffset);
if (residentNameTable == null)
return null;
// Set the resident-name table
executable.ResidentNameTable = residentNameTable;
#endregion
#region Module-Reference Table
// If the offset for the module-reference table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the module-reference table
data.Seek(tableAddress, SeekOrigin.Begin);
var moduleReferenceTable = ParseModuleReferenceTable(data, executableHeader.ModuleReferenceTableSize);
if (moduleReferenceTable == null)
return null;
// Set the module-reference table
executable.ModuleReferenceTable = moduleReferenceTable;
#endregion
#region Imported-Name Table
// If the offset for the imported-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ImportedNamesTableOffset;
endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var importedNameTable = ParseImportedNameTable(data, endOffset);
if (importedNameTable == null)
return null;
// Set the imported-name table
executable.ImportedNameTable = importedNameTable;
#endregion
#region Entry Table
// If the offset for the imported-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset;
endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset
+ executableHeader.EntryTableSize;
if (tableAddress >= data.Length)
return executable;
// Try to parse the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var entryTable = ParseEntryTable(data, endOffset);
if (entryTable == null)
return null;
// Set the entry table
executable.EntryTable = entryTable;
#endregion
#region Nonresident-Name Table
// If the offset for the nonresident-name table doesn't exist
tableAddress = initialOffset
+ (int)executableHeader.NonResidentNamesTableOffset;
endOffset = initialOffset
+ (int)executableHeader.NonResidentNamesTableOffset
+ executableHeader.NonResidentNameTableSize;
if (tableAddress >= data.Length)
return executable;
// Try to parse the nonresident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var nonResidentNameTable = ParseNonResidentNameTable(data, endOffset);
if (nonResidentNameTable == null)
return null;
// Set the nonresident-name table
executable.NonResidentNameTable = nonResidentNameTable;
#endregion
return executable;
}
/// <summary>
/// Parse a Stream into a New Executable header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable header on success, null on error</returns>
private static ExecutableHeader ParseExecutableHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new ExecutableHeader();
header.Magic = new byte[2];
for (int i = 0; i < header.Magic.Length; i++)
{
header.Magic[i] = data.ReadByteValue();
}
if (header.Magic[0] != 'N' || header.Magic[1] != 'E')
return null;
header.LinkerVersion = data.ReadByteValue();
header.LinkerRevision = data.ReadByteValue();
header.EntryTableOffset = data.ReadUInt16();
header.EntryTableSize = data.ReadUInt16();
header.CrcChecksum = data.ReadUInt32();
header.FlagWord = (HeaderFlag)data.ReadUInt16();
header.AutomaticDataSegmentNumber = data.ReadUInt16();
header.InitialHeapAlloc = data.ReadUInt16();
header.InitialStackAlloc = data.ReadUInt16();
header.InitialCSIPSetting = data.ReadUInt32();
header.InitialSSSPSetting = data.ReadUInt32();
header.FileSegmentCount = data.ReadUInt16();
header.ModuleReferenceTableSize = data.ReadUInt16();
header.NonResidentNameTableSize = data.ReadUInt16();
header.SegmentTableOffset = data.ReadUInt16();
header.ResourceTableOffset = data.ReadUInt16();
header.ResidentNameTableOffset = data.ReadUInt16();
header.ModuleReferenceTableOffset = data.ReadUInt16();
header.ImportedNamesTableOffset = data.ReadUInt16();
header.NonResidentNamesTableOffset = data.ReadUInt32();
header.MovableEntriesCount = data.ReadUInt16();
header.SegmentAlignmentShiftCount = data.ReadUInt16();
header.ResourceEntriesCount = data.ReadUInt16();
header.TargetOperatingSystem = (OperatingSystem)data.ReadByteValue();
header.AdditionalFlags = (OS2Flag)data.ReadByteValue();
header.ReturnThunkOffset = data.ReadUInt16();
header.SegmentReferenceThunkOffset = data.ReadUInt16();
header.MinCodeSwapAreaSize = data.ReadUInt16();
header.WindowsSDKRevision = data.ReadByteValue();
header.WindowsSDKVersion = data.ReadByteValue();
return header;
}
/// <summary>
/// Parse a Stream into a segment table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of segment table entries to read</param>
/// <returns>Filled segment table on success, null on error</returns>
private static SegmentTableEntry[] ParseSegmentTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var segmentTable = new SegmentTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new SegmentTableEntry();
entry.Offset = data.ReadUInt16();
entry.Length = data.ReadUInt16();
entry.FlagWord = (SegmentTableEntryFlag)data.ReadUInt16();
entry.MinimumAllocationSize = data.ReadUInt16();
segmentTable[i] = entry;
}
return segmentTable;
}
/// <summary>
/// Parse a Stream into a resource table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of resource table entries to read</param>
/// <returns>Filled resource table on success, null on error</returns>
private static ResourceTable ParseResourceTable(Stream data, int count)
{
long initialOffset = data.Position;
// TODO: Use marshalling here instead of building
var resourceTable = new ResourceTable();
resourceTable.AlignmentShiftCount = data.ReadUInt16();
resourceTable.ResourceTypes = new ResourceTypeInformationEntry[count];
for (int i = 0; i < resourceTable.ResourceTypes.Length; i++)
{
var entry = new ResourceTypeInformationEntry();
entry.TypeID = data.ReadUInt16();
entry.ResourceCount = data.ReadUInt16();
entry.Reserved = data.ReadUInt32();
entry.Resources = new ResourceTypeResourceEntry[entry.ResourceCount];
for (int j = 0; j < entry.ResourceCount; j++)
{
// TODO: Should we read and store the resource data?
var resource = new ResourceTypeResourceEntry();
resource.Offset = data.ReadUInt16();
resource.Length = data.ReadUInt16();
resource.FlagWord = (ResourceTypeResourceFlag)data.ReadUInt16();
resource.ResourceID = data.ReadUInt16();
resource.Reserved = data.ReadUInt32();
entry.Resources[j] = resource;
}
resourceTable.ResourceTypes[i] = entry;
}
// Get the full list of unique string offsets
var stringOffsets = resourceTable.ResourceTypes
.Where(rt => rt.IsIntegerType() == false)
.Select(rt => rt.TypeID)
.Union(resourceTable.ResourceTypes
.SelectMany(rt => rt.Resources)
.Where(r => r.IsIntegerType() == false)
.Select(r => r.ResourceID))
.Distinct()
.OrderBy(o => o)
.ToList();
// Populate the type and name string dictionary
resourceTable.TypeAndNameStrings = new Dictionary<ushort, ResourceTypeAndNameString>();
for (int i = 0; i < stringOffsets.Count; i++)
{
int stringOffset = (int)(stringOffsets[i] + initialOffset);
data.Seek(stringOffset, SeekOrigin.Begin);
var str = new ResourceTypeAndNameString();
str.Length = data.ReadByteValue();
str.Text = data.ReadBytes(str.Length);
resourceTable.TypeAndNameStrings[stringOffsets[i]] = str;
}
return resourceTable;
}
/// <summary>
/// Parse a Stream into a resident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the resident-name table</param>
/// <returns>Filled resident-name table on success, null on error</returns>
private static ResidentNameTableEntry[] ParseResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<ResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new ResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
residentNameTable.Add(entry);
}
return residentNameTable.ToArray();
}
/// <summary>
/// Parse a Stream into a module-reference table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of module-reference table entries to read</param>
/// <returns>Filled module-reference table on success, null on error</returns>
private static ModuleReferenceTableEntry[] ParseModuleReferenceTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var moduleReferenceTable = new ModuleReferenceTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new ModuleReferenceTableEntry();
entry.Offset = data.ReadUInt16();
moduleReferenceTable[i] = entry;
}
return moduleReferenceTable;
}
/// <summary>
/// Parse a Stream into an imported-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the imported-name table</param>
/// <returns>Filled imported-name table on success, null on error</returns>
private static Dictionary<ushort, ImportedNameTableEntry> ParseImportedNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry>();
while (data.Position < endOffset)
{
ushort currentOffset = (ushort)data.Position;
var entry = new ImportedNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
importedNameTable[currentOffset] = entry;
}
return importedNameTable;
}
/// <summary>
/// Parse a Stream into an entry table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the entry table</param>
/// <returns>Filled entry table on success, null on error</returns>
private static EntryTableBundle[] ParseEntryTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var entryTable = new List<EntryTableBundle>();
while (data.Position < endOffset)
{
var entry = new EntryTableBundle();
entry.EntryCount = data.ReadByteValue();
entry.SegmentIndicator = data.ReadByteValue();
switch (entry.GetEntryType())
{
case SegmentEntryType.Unused:
break;
case SegmentEntryType.FixedSegment:
entry.FixedFlagWord = (FixedSegmentEntryFlag)data.ReadByteValue();
entry.FixedOffset = data.ReadUInt16();
break;
case SegmentEntryType.MoveableSegment:
entry.MoveableFlagWord = (MoveableSegmentEntryFlag)data.ReadByteValue();
entry.MoveableReserved = data.ReadUInt16();
entry.MoveableSegmentNumber = data.ReadByteValue();
entry.MoveableOffset = data.ReadUInt16();
break;
}
entryTable.Add(entry);
}
return entryTable.ToArray();
}
/// <summary>
/// Parse a Stream into a nonresident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the nonresident-name table</param>
/// <returns>Filled nonresident-name table on success, null on error</returns>
private static NonResidentNameTableEntry[] ParseNonResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<NonResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new NonResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
residentNameTable.Add(entry);
}
return residentNameTable.ToArray();
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,151 @@
using System.IO;
using System.Linq;
using System.Text;
using BurnOutSharp.Models.BFPK;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.BFPK.Constants;
namespace BurnOutSharp.Builders
{
public class BFPK
{
#region Byte Data
/// <summary>
/// Parse a byte array into a BFPK archive
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseArchive(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a BFPK archive
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the archive header
archive.Header = header;
#endregion
#region Files
// If we have any files
if (header.Files > 0)
{
var files = new FileEntry[header.Files];
// Read all entries in turn
for (int i = 0; i < header.Files; i++)
{
var file = ParseFileEntry(data);
if (file == null)
return null;
files[i] = file;
}
// Set the files
archive.Files = files;
}
#endregion
return archive;
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] magic = data.ReadBytes(4);
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.Version = data.ReadInt32();
header.Files = data.ReadInt32();
return header;
}
/// <summary>
/// Parse a Stream into a file entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file entry on success, null on error</returns>
private static FileEntry ParseFileEntry(Stream data)
{
// TODO: Use marshalling here instead of building
FileEntry fileEntry = new FileEntry();
fileEntry.NameSize = data.ReadInt32();
if (fileEntry.NameSize > 0)
{
byte[] name = data.ReadBytes(fileEntry.NameSize);
fileEntry.Name = Encoding.ASCII.GetString(name);
}
fileEntry.UncompressedSize = data.ReadInt32();
fileEntry.Offset = data.ReadInt32();
if (fileEntry.Offset > 0)
{
long currentOffset = data.Position;
data.Seek(fileEntry.Offset, SeekOrigin.Begin);
fileEntry.CompressedSize = data.ReadInt32();
data.Seek(currentOffset, SeekOrigin.Begin);
}
return fileEntry;
}
#endregion
}
}

View File

@@ -0,0 +1,250 @@
using System.IO;
using System.Linq;
using System.Text;
using BurnOutSharp.Models.BSP;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.BSP.Constants;
namespace BurnOutSharp.Builders
{
public static class BSP
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Level
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Level on success, null on error</returns>
public static Models.BSP.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Level
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level on success, null on error</returns>
public static Models.BSP.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new Half-Life Level to fill
var file = new Models.BSP.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the level header
file.Header = header;
#endregion
#region Lumps
// Create the lump array
file.Lumps = new Lump[HL_BSP_LUMP_COUNT];
// Try to parse the lumps
for (int i = 0; i < HL_BSP_LUMP_COUNT; i++)
{
var lump = ParseLump(data);
file.Lumps[i] = lump;
}
#endregion
#region Texture header
// Try to get the texture header lump
var textureDataLump = file.Lumps[HL_BSP_LUMP_TEXTUREDATA];
if (textureDataLump.Offset == 0 || textureDataLump.Length == 0)
return null;
// Seek to the texture header
data.Seek(textureDataLump.Offset, SeekOrigin.Begin);
// Try to parse the texture header
var textureHeader = ParseTextureHeader(data);
if (textureHeader == null)
return null;
// Set the texture header
file.TextureHeader = textureHeader;
#endregion
#region Textures
// Create the texture array
file.Textures = new Texture[textureHeader.TextureCount];
// Try to parse the textures
for (int i = 0; i < textureHeader.TextureCount; i++)
{
// Get the texture offset
int offset = (int)(textureHeader.Offsets[i] + file.Lumps[HL_BSP_LUMP_TEXTUREDATA].Offset);
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the texture
data.Seek(offset, SeekOrigin.Begin);
var texture = ParseTexture(data);
file.Textures[i] = texture;
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Level header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
// Only recognized versions are 29 and 30
header.Version = data.ReadUInt32();
if (header.Version != 29 && header.Version != 30)
return null;
return header;
}
/// <summary>
/// Parse a Stream into a lump
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled lump on success, null on error</returns>
private static Lump ParseLump(Stream data)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.Length = data.ReadUInt32();
return lump;
}
/// <summary>
/// Parse a Stream into a Half-Life Level texture header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level texture header on success, null on error</returns>
private static TextureHeader ParseTextureHeader(Stream data)
{
// TODO: Use marshalling here instead of building
TextureHeader textureHeader = new TextureHeader();
textureHeader.TextureCount = data.ReadUInt32();
var offsets = new uint[textureHeader.TextureCount];
for (int i = 0; i < textureHeader.TextureCount; i++)
{
offsets[i] = data.ReadUInt32();
}
textureHeader.Offsets = offsets;
return textureHeader;
}
/// <summary>
/// Parse a Stream into a texture
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="mipmap">Mipmap level</param>
/// <returns>Filled texture on success, null on error</returns>
private static Texture ParseTexture(Stream data, uint mipmap = 0)
{
// TODO: Use marshalling here instead of building
Texture texture = new Texture();
byte[] name = data.ReadBytes(16).TakeWhile(c => c != '\0').ToArray();
texture.Name = Encoding.ASCII.GetString(name);
texture.Width = data.ReadUInt32();
texture.Height = data.ReadUInt32();
texture.Offsets = new uint[4];
for (int i = 0; i < 4; i++)
{
texture.Offsets[i] = data.ReadUInt32();
}
// Get the size of the pixel data
uint pixelSize = 0;
for (int i = 0; i < HL_BSP_MIPMAP_COUNT; i++)
{
if (texture.Offsets[i] != 0)
{
pixelSize += (texture.Width >> i) * (texture.Height >> i);
}
}
// If we have no pixel data
if (pixelSize == 0)
return texture;
texture.TextureData = data.ReadBytes((int)pixelSize);
texture.PaletteSize = data.ReadUInt16();
texture.PaletteData = data.ReadBytes((int)(texture.PaletteSize * 3));
// Adjust the dimensions based on mipmap level
switch (mipmap)
{
case 1:
texture.Width /= 2;
texture.Height /= 2;
break;
case 2:
texture.Width /= 4;
texture.Height /= 4;
break;
case 3:
texture.Width /= 8;
texture.Height /= 8;
break;
}
return texture;
}
#endregion
}
}

View File

@@ -1,16 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>netstandard2.0;net6.0</TargetFrameworks>
<Title>BurnOutSharp.Builder</Title>
<AssemblyName>BurnOutSharp.Builder</AssemblyName>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BurnOutSharp.Builders</Title>
<AssemblyName>BurnOutSharp.Builders</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.5</Version>
<AssemblyVersion>2.5</AssemblyVersion>
<FileVersion>2.5</FileVersion>
<Version>2.6</Version>
<AssemblyVersion>2.6</AssemblyVersion>
<FileVersion>2.6</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
@@ -21,6 +22,7 @@
<ItemGroup>
<ProjectReference Include="..\BurnOutSharp.Models\BurnOutSharp.Models.csproj" />
<ProjectReference Include="..\BurnOutSharp.Utilities\BurnOutSharp.Utilities.csproj" />
</ItemGroup>
</Project>

View File

@@ -4,290 +4,12 @@ using System.IO;
using System.Linq;
using System.Text;
using System.Xml.Serialization;
using BurnOutSharp.Utilities;
namespace BurnOutSharp.Builder
namespace BurnOutSharp.Builders
{
public static class Extensions
{
#region Byte Arrays
/// <summary>
/// Read a byte and increment the pointer to an array
/// </summary>
public static byte ReadByte(this byte[] content, ref int offset)
{
return content[offset++];
}
/// <summary>
/// Read a byte array and increment the pointer to an array
/// </summary>
public static byte[] ReadBytes(this byte[] content, ref int offset, int count)
{
// If there's an invalid byte count, don't do anything
if (count <= 0)
return null;
byte[] buffer = new byte[count];
Array.Copy(content, offset, buffer, 0, Math.Min(count, content.Length - offset));
offset += count;
return buffer;
}
/// <summary>
/// Read a char and increment the pointer to an array
/// </summary>
public static char ReadChar(this byte[] content, ref int offset)
{
return (char)content[offset++];
}
/// <summary>
/// Read a character array and increment the pointer to an array
/// </summary>
public static char[] ReadChars(this byte[] content, ref int offset, int count) => content.ReadChars(ref offset, count, Encoding.Default);
/// <summary>
/// Read a character array and increment the pointer to an array
/// </summary>
public static char[] ReadChars(this byte[] content, ref int offset, int count, Encoding encoding)
{
// TODO: Fix the code below to make it work with byte arrays and not streams
return null;
// byte[] buffer = new byte[count];
// stream.Read(buffer, 0, count);
// return encoding.GetString(buffer).ToCharArray();
}
/// <summary>
/// Read a short and increment the pointer to an array
/// </summary>
public static short ReadInt16(this byte[] content, ref int offset)
{
short value = BitConverter.ToInt16(content, offset);
offset += 2;
return value;
}
/// <summary>
/// Read a ushort and increment the pointer to an array
/// </summary>
public static ushort ReadUInt16(this byte[] content, ref int offset)
{
ushort value = BitConverter.ToUInt16(content, offset);
offset += 2;
return value;
}
/// <summary>
/// Read a int and increment the pointer to an array
/// </summary>
public static int ReadInt32(this byte[] content, ref int offset)
{
int value = BitConverter.ToInt32(content, offset);
offset += 4;
return value;
}
/// <summary>
/// Read a uint and increment the pointer to an array
/// </summary>
public static uint ReadUInt32(this byte[] content, ref int offset)
{
uint value = BitConverter.ToUInt32(content, offset);
offset += 4;
return value;
}
/// <summary>
/// Read a long and increment the pointer to an array
/// </summary>
public static long ReadInt64(this byte[] content, ref int offset)
{
long value = BitConverter.ToInt64(content, offset);
offset += 8;
return value;
}
/// <summary>
/// Read a ulong and increment the pointer to an array
/// </summary>
public static ulong ReadUInt64(this byte[] content, ref int offset)
{
ulong value = BitConverter.ToUInt64(content, offset);
offset += 8;
return value;
}
/// <summary>
/// Read a null-terminated string from the stream
/// </summary>
public static string ReadString(this byte[] content, ref int offset) => content.ReadString(ref offset, Encoding.Default);
/// <summary>
/// Read a null-terminated string from the stream
/// </summary>
public static string ReadString(this byte[] content, ref int offset, Encoding encoding)
{
if (offset >= content.Length)
return null;
byte[] nullTerminator = encoding.GetBytes(new char[] { '\0' });
int charWidth = nullTerminator.Length;
List<char> keyChars = new List<char>();
while (offset < content.Length && BitConverter.ToUInt16(content, offset) != 0x0000)
{
keyChars.Add(encoding.GetChars(content, offset, charWidth)[0]); offset += charWidth;
}
offset += 2;
return new string(keyChars.ToArray());
}
#endregion
#region Streams
/// <summary>
/// Read a byte from the stream
/// </summary>
public static byte ReadByteValue(this Stream stream)
{
byte[] buffer = new byte[1];
stream.Read(buffer, 0, 1);
return buffer[0];
}
/// <summary>
/// Read a byte array from the stream
/// </summary>
public static byte[] ReadBytes(this Stream stream, int count)
{
// If there's an invalid byte count, don't do anything
if (count <= 0)
return null;
byte[] buffer = new byte[count];
stream.Read(buffer, 0, count);
return buffer;
}
/// <summary>
/// Read a character from the stream
/// </summary>
public static char ReadChar(this Stream stream)
{
byte[] buffer = new byte[1];
stream.Read(buffer, 0, 1);
return (char)buffer[0];
}
/// <summary>
/// Read a character array from the stream
/// </summary>
public static char[] ReadChars(this Stream stream, int count) => stream.ReadChars(count, Encoding.Default);
/// <summary>
/// Read a character array from the stream
/// </summary>
public static char[] ReadChars(this Stream stream, int count, Encoding encoding)
{
byte[] buffer = new byte[count];
stream.Read(buffer, 0, count);
return encoding.GetString(buffer).ToCharArray();
}
/// <summary>
/// Read a short from the stream
/// </summary>
public static short ReadInt16(this Stream stream)
{
byte[] buffer = new byte[2];
stream.Read(buffer, 0, 2);
return BitConverter.ToInt16(buffer, 0);
}
/// <summary>
/// Read a ushort from the stream
/// </summary>
public static ushort ReadUInt16(this Stream stream)
{
byte[] buffer = new byte[2];
stream.Read(buffer, 0, 2);
return BitConverter.ToUInt16(buffer, 0);
}
/// <summary>
/// Read an int from the stream
/// </summary>
public static int ReadInt32(this Stream stream)
{
byte[] buffer = new byte[4];
stream.Read(buffer, 0, 4);
return BitConverter.ToInt32(buffer, 0);
}
/// <summary>
/// Read a uint from the stream
/// </summary>
public static uint ReadUInt32(this Stream stream)
{
byte[] buffer = new byte[4];
stream.Read(buffer, 0, 4);
return BitConverter.ToUInt32(buffer, 0);
}
/// <summary>
/// Read a long from the stream
/// </summary>
public static long ReadInt64(this Stream stream)
{
byte[] buffer = new byte[8];
stream.Read(buffer, 0, 8);
return BitConverter.ToInt64(buffer, 0);
}
/// <summary>
/// Read a ulong from the stream
/// </summary>
public static ulong ReadUInt64(this Stream stream)
{
byte[] buffer = new byte[8];
stream.Read(buffer, 0, 8);
return BitConverter.ToUInt64(buffer, 0);
}
/// <summary>
/// Read a null-terminated string from the stream
/// </summary>
public static string ReadString(this Stream stream) => stream.ReadString(Encoding.Default);
/// <summary>
/// Read a null-terminated string from the stream
/// </summary>
public static string ReadString(this Stream stream, Encoding encoding)
{
if (stream.Position >= stream.Length)
return null;
byte[] nullTerminator = encoding.GetBytes(new char[] { '\0' });
int charWidth = nullTerminator.Length;
List<byte> tempBuffer = new List<byte>();
byte[] buffer = new byte[charWidth];
while (stream.Position < stream.Length && stream.Read(buffer, 0, charWidth) != 0 && !buffer.SequenceEqual(nullTerminator))
{
tempBuffer.AddRange(buffer);
}
return encoding.GetString(tempBuffer.ToArray());
}
#endregion
#region New Executable
/// <summary>
@@ -345,7 +67,6 @@ namespace BurnOutSharp.Builder
#endregion
// TODO: Implement other resource types from https://learn.microsoft.com/en-us/windows/win32/menurc/resource-file-formats
#region Portable Executable
/// <summary>
@@ -392,9 +113,47 @@ namespace BurnOutSharp.Builder
}
/// <summary>
/// Read resource data as a SecuROM AddD overlay data
/// Find the section a revlative virtual address lives in
/// </summary>
/// <param name="data">Data to parse into a resource header</param>
/// <param name="rva">Relative virtual address to convert</param>
/// <param name="sections">Array of sections to check against</param>
/// <returns>Section index, null on error</returns>
public static int ContainingSectionIndex(this uint rva, Models.PortableExecutable.SectionHeader[] sections)
{
// If we have an invalid section table, we can't do anything
if (sections == null || sections.Length == 0)
return -1;
// If the RVA is 0, we just return -1 because it's invalid
if (rva == 0)
return -1;
// Loop through all of the sections
for (int i = 0; i < sections.Length; i++)
{
// If the section is invalid, just skip it
if (sections[i] == null)
continue;
// If the section "starts" at 0, just skip it
if (sections[i].PointerToRawData == 0)
continue;
// Attempt to derive the physical address from the current section
var section = sections[i];
if (rva >= section.VirtualAddress && section.VirtualSize != 0 && rva <= section.VirtualAddress + section.VirtualSize)
return i;
else if (rva >= section.VirtualAddress && section.SizeOfRawData != 0 && rva <= section.VirtualAddress + section.SizeOfRawData)
return i;
}
return -1;
}
/// <summary>
/// Read overlay data as a SecuROM AddD overlay data
/// </summary>
/// <param name="data">Data to parse into overlay data</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled SecuROM AddD overlay data on success, null on error</returns>
public static Models.PortableExecutable.SecuROMAddD AsSecuROMAddD(this byte[] data, ref int offset)
@@ -410,20 +169,25 @@ namespace BurnOutSharp.Builder
if (addD.Signature != 0x44646441)
return null;
int originalOffset = offset;
addD.EntryCount = data.ReadUInt32(ref offset);
addD.Version = data.ReadString(ref offset, Encoding.ASCII);
if (string.IsNullOrWhiteSpace(addD.Version))
offset = originalOffset + 0x10;
addD.Build = data.ReadBytes(ref offset, 4).Select(b => (char)b).ToArray();
addD.Unknown14h = data.ReadUInt32(ref offset);
addD.Unknown18h = data.ReadUInt32(ref offset);
addD.Unknown1Ch = data.ReadUInt32(ref offset);
addD.Unknown20h = data.ReadUInt32(ref offset);
addD.Unknown24h = data.ReadUInt32(ref offset);
addD.Unknown28h = data.ReadUInt32(ref offset);
addD.Unknown2Ch = data.ReadUInt32(ref offset);
addD.Unknown30h = data.ReadUInt32(ref offset);
addD.Unknown34h = data.ReadUInt32(ref offset);
addD.Unknown38h = data.ReadUInt32(ref offset);
addD.Unknown3Ch = data.ReadUInt32(ref offset);
// Distinguish between v1 and v2
int bytesToRead = 112; // v2
if (string.IsNullOrWhiteSpace(addD.Version)
|| addD.Version.StartsWith("3")
|| addD.Version.StartsWith("4.47"))
{
bytesToRead = 44;
}
addD.Unknown14h = data.ReadBytes(ref offset, bytesToRead);
addD.Entries = new Models.PortableExecutable.SecuROMAddDEntry[addD.EntryCount];
for (int i = 0; i < addD.EntryCount; i++)
@@ -447,6 +211,64 @@ namespace BurnOutSharp.Builder
return addD;
}
#region Debug
/// <summary>
/// Read debug data as an NB10 Program Database
/// </summary>
/// <param name="data">Data to parse into a database</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled NB10 Program Database on success, null on error</returns>
public static Models.PortableExecutable.NB10ProgramDatabase AsNB10ProgramDatabase(this byte[] data, ref int offset)
{
// If we have data that's invalid, we can't do anything
if (data == null)
return null;
var nb10ProgramDatabase = new Models.PortableExecutable.NB10ProgramDatabase();
nb10ProgramDatabase.Signature = data.ReadUInt32(ref offset);
if (nb10ProgramDatabase.Signature != 0x3031424E)
return null;
nb10ProgramDatabase.Offset = data.ReadUInt32(ref offset);
nb10ProgramDatabase.Timestamp = data.ReadUInt32(ref offset);
nb10ProgramDatabase.Age = data.ReadUInt32(ref offset);
nb10ProgramDatabase.PdbFileName = data.ReadString(ref offset, Encoding.ASCII); // TODO: Actually null-terminated UTF-8?
return nb10ProgramDatabase;
}
/// <summary>
/// Read debug data as an RSDS Program Database
/// </summary>
/// <param name="data">Data to parse into a database</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled RSDS Program Database on success, null on error</returns>
public static Models.PortableExecutable.RSDSProgramDatabase AsRSDSProgramDatabase(this byte[] data, ref int offset)
{
// If we have data that's invalid, we can't do anything
if (data == null)
return null;
var rsdsProgramDatabase = new Models.PortableExecutable.RSDSProgramDatabase();
rsdsProgramDatabase.Signature = data.ReadUInt32(ref offset);
if (rsdsProgramDatabase.Signature != 0x53445352)
return null;
rsdsProgramDatabase.GUID = new Guid(data.ReadBytes(ref offset, 0x10));
rsdsProgramDatabase.Age = data.ReadUInt32(ref offset);
rsdsProgramDatabase.PathAndFileName = data.ReadString(ref offset, Encoding.ASCII); // TODO: Actually null-terminated UTF-8
return rsdsProgramDatabase;
}
#endregion
// TODO: Implement other resource types from https://learn.microsoft.com/en-us/windows/win32/menurc/resource-file-formats
#region Resources
/// <summary>
/// Read resource data as a resource header
/// </summary>
@@ -1411,108 +1233,12 @@ namespace BurnOutSharp.Builder
if (nextKey == "StringFileInfo")
{
var stringFileInfo = new Models.PortableExecutable.StringFileInfo();
stringFileInfo.Length = entry.Data.ReadUInt16(ref offset);
stringFileInfo.ValueLength = entry.Data.ReadUInt16(ref offset);
stringFileInfo.ResourceType = (Models.PortableExecutable.VersionResourceType)entry.Data.ReadUInt16(ref offset);
stringFileInfo.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
if (stringFileInfo.Key != "StringFileInfo")
return null;
while ((offset % 4) != 0)
stringFileInfo.Padding = entry.Data.ReadUInt16(ref offset);
var stringFileInfoChildren = new List<Models.PortableExecutable.StringTable>();
while (offset < stringFileInfo.Length)
{
var stringTable = new Models.PortableExecutable.StringTable();
stringTable.Length = entry.Data.ReadUInt16(ref offset);
stringTable.ValueLength = entry.Data.ReadUInt16(ref offset);
stringTable.ResourceType = (Models.PortableExecutable.VersionResourceType)entry.Data.ReadUInt16(ref offset);
stringTable.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
while ((offset % 4) != 0)
stringTable.Padding = entry.Data.ReadUInt16(ref offset);
var stringTableChildren = new List<Models.PortableExecutable.StringData>();
while (offset < stringTable.Length)
{
var stringData = new Models.PortableExecutable.StringData();
stringData.Length = entry.Data.ReadUInt16(ref offset);
stringData.ValueLength = entry.Data.ReadUInt16(ref offset);
stringData.ResourceType = (Models.PortableExecutable.VersionResourceType)entry.Data.ReadUInt16(ref offset);
stringData.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
while ((offset % 4) != 0)
stringData.Padding = entry.Data.ReadUInt16(ref offset);
if (stringData.ValueLength > 0)
{
byte[] valueBytes = entry.Data.ReadBytes(ref offset, stringData.ValueLength * sizeof(ushort));
stringData.Value = Encoding.Unicode.GetString(valueBytes);
}
while ((offset % 4) != 0)
_ = entry.Data.ReadUInt16(ref offset);
stringTableChildren.Add(stringData);
}
stringTable.Children = stringTableChildren.ToArray();
stringFileInfoChildren.Add(stringTable);
}
stringFileInfo.Children = stringFileInfoChildren.ToArray();
var stringFileInfo = AsStringFileInfo(entry.Data, ref offset);
versionInfo.StringFileInfo = stringFileInfo;
}
else if (nextKey == "VarFileInfo")
{
var varFileInfo = new Models.PortableExecutable.VarFileInfo();
varFileInfo.Length = entry.Data.ReadUInt16(ref offset);
varFileInfo.ValueLength = entry.Data.ReadUInt16(ref offset);
varFileInfo.ResourceType = (Models.PortableExecutable.VersionResourceType)entry.Data.ReadUInt16(ref offset);
varFileInfo.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
if (varFileInfo.Key != "VarFileInfo")
return null;
while ((offset % 4) != 0)
varFileInfo.Padding = entry.Data.ReadUInt16(ref offset);
var varFileInfoChildren = new List<Models.PortableExecutable.VarData>();
while (offset < varFileInfo.Length)
{
var varData = new Models.PortableExecutable.VarData();
varData.Length = entry.Data.ReadUInt16(ref offset);
varData.ValueLength = entry.Data.ReadUInt16(ref offset);
varData.ResourceType = (Models.PortableExecutable.VersionResourceType)entry.Data.ReadUInt16(ref offset);
varData.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
if (varData.Key != "Translation")
return null;
while ((offset % 4) != 0)
varData.Padding = entry.Data.ReadUInt16(ref offset);
var varDataValue = new List<uint>();
while (offset < (varData.ValueLength * sizeof(ushort)))
{
uint languageAndCodeIdentifierPair = entry.Data.ReadUInt32(ref offset);
varDataValue.Add(languageAndCodeIdentifierPair);
}
varData.Value = varDataValue.ToArray();
varFileInfoChildren.Add(varData);
}
varFileInfo.Children = varFileInfoChildren.ToArray();
var varFileInfo = AsVarFileInfo(entry.Data, ref offset);
versionInfo.VarFileInfo = varFileInfo;
}
}
@@ -1529,108 +1255,12 @@ namespace BurnOutSharp.Builder
if (nextKey == "StringFileInfo")
{
var stringFileInfo = new Models.PortableExecutable.StringFileInfo();
stringFileInfo.Length = entry.Data.ReadUInt16(ref offset);
stringFileInfo.ValueLength = entry.Data.ReadUInt16(ref offset);
stringFileInfo.ResourceType = (Models.PortableExecutable.VersionResourceType)entry.Data.ReadUInt16(ref offset);
stringFileInfo.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
if (stringFileInfo.Key != "StringFileInfo")
return null;
while ((offset % 4) != 0)
stringFileInfo.Padding = entry.Data.ReadUInt16(ref offset);
var stringFileInfoChildren = new List<Models.PortableExecutable.StringTable>();
while (offset < stringFileInfo.Length)
{
var stringTable = new Models.PortableExecutable.StringTable();
stringTable.Length = entry.Data.ReadUInt16(ref offset);
stringTable.ValueLength = entry.Data.ReadUInt16(ref offset);
stringTable.ResourceType = (Models.PortableExecutable.VersionResourceType)entry.Data.ReadUInt16(ref offset);
stringTable.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
while ((offset % 4) != 0)
stringTable.Padding = entry.Data.ReadUInt16(ref offset);
var stringTableChildren = new List<Models.PortableExecutable.StringData>();
while (offset < stringTable.Length)
{
var stringData = new Models.PortableExecutable.StringData();
stringData.Length = entry.Data.ReadUInt16(ref offset);
stringData.ValueLength = entry.Data.ReadUInt16(ref offset);
stringData.ResourceType = (Models.PortableExecutable.VersionResourceType)entry.Data.ReadUInt16(ref offset);
stringData.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
while ((offset % 4) != 0)
stringData.Padding = entry.Data.ReadUInt16(ref offset);
if (stringData.ValueLength > 0)
{
byte[] valueBytes = entry.Data.ReadBytes(ref offset, stringData.ValueLength * sizeof(ushort));
stringData.Value = Encoding.Unicode.GetString(valueBytes);
}
while ((offset % 4) != 0)
_ = entry.Data.ReadUInt16(ref offset);
stringTableChildren.Add(stringData);
}
stringTable.Children = stringTableChildren.ToArray();
stringFileInfoChildren.Add(stringTable);
}
stringFileInfo.Children = stringFileInfoChildren.ToArray();
var stringFileInfo = AsStringFileInfo(entry.Data, ref offset);
versionInfo.StringFileInfo = stringFileInfo;
}
else if (nextKey == "VarFileInfo")
{
var varFileInfo = new Models.PortableExecutable.VarFileInfo();
varFileInfo.Length = entry.Data.ReadUInt16(ref offset);
varFileInfo.ValueLength = entry.Data.ReadUInt16(ref offset);
varFileInfo.ResourceType = (Models.PortableExecutable.VersionResourceType)entry.Data.ReadUInt16(ref offset);
varFileInfo.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
if (varFileInfo.Key != "VarFileInfo")
return null;
while ((offset % 4) != 0)
varFileInfo.Padding = entry.Data.ReadUInt16(ref offset);
var varFileInfoChildren = new List<Models.PortableExecutable.VarData>();
while (offset < varFileInfo.Length)
{
var varData = new Models.PortableExecutable.VarData();
varData.Length = entry.Data.ReadUInt16(ref offset);
varData.ValueLength = entry.Data.ReadUInt16(ref offset);
varData.ResourceType = (Models.PortableExecutable.VersionResourceType)entry.Data.ReadUInt16(ref offset);
varData.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
if (varData.Key != "Translation")
return null;
while ((offset % 4) != 0)
varData.Padding = entry.Data.ReadUInt16(ref offset);
var varDataValue = new List<uint>();
while (offset < (varData.ValueLength * sizeof(ushort)))
{
uint languageAndCodeIdentifierPair = entry.Data.ReadUInt32(ref offset);
varDataValue.Add(languageAndCodeIdentifierPair);
}
varData.Value = varDataValue.ToArray();
varFileInfoChildren.Add(varData);
}
varFileInfo.Children = varFileInfoChildren.ToArray();
var varFileInfo = AsVarFileInfo(entry.Data, ref offset);
versionInfo.VarFileInfo = varFileInfo;
}
}
@@ -1638,6 +1268,152 @@ namespace BurnOutSharp.Builder
return versionInfo;
}
/// <summary>
/// Read byte data as a string file info resource
/// </summary>
/// <param name="data">Data to parse into a string file info</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled string file info resource on success, null on error</returns>
private static Models.PortableExecutable.StringFileInfo AsStringFileInfo(byte[] data, ref int offset)
{
var stringFileInfo = new Models.PortableExecutable.StringFileInfo();
stringFileInfo.Length = data.ReadUInt16(ref offset);
stringFileInfo.ValueLength = data.ReadUInt16(ref offset);
stringFileInfo.ResourceType = (Models.PortableExecutable.VersionResourceType)data.ReadUInt16(ref offset);
stringFileInfo.Key = data.ReadString(ref offset, Encoding.Unicode);
if (stringFileInfo.Key != "StringFileInfo")
return null;
// Align to the DWORD boundary if we're not at the end
if (offset != data.Length)
{
while ((offset % 4) != 0)
stringFileInfo.Padding = data.ReadByte(ref offset);
}
var stringFileInfoChildren = new List<Models.PortableExecutable.StringTable>();
while (offset < stringFileInfo.Length)
{
var stringTable = new Models.PortableExecutable.StringTable();
stringTable.Length = data.ReadUInt16(ref offset);
stringTable.ValueLength = data.ReadUInt16(ref offset);
stringTable.ResourceType = (Models.PortableExecutable.VersionResourceType)data.ReadUInt16(ref offset);
stringTable.Key = data.ReadString(ref offset, Encoding.Unicode);
// Align to the DWORD boundary if we're not at the end
if (offset != data.Length)
{
while ((offset % 4) != 0)
stringTable.Padding = data.ReadByte(ref offset);
}
var stringTableChildren = new List<Models.PortableExecutable.StringData>();
while (offset < stringTable.Length)
{
var stringData = new Models.PortableExecutable.StringData();
stringData.Length = data.ReadUInt16(ref offset);
stringData.ValueLength = data.ReadUInt16(ref offset);
stringData.ResourceType = (Models.PortableExecutable.VersionResourceType)data.ReadUInt16(ref offset);
stringData.Key = data.ReadString(ref offset, Encoding.Unicode);
// Align to the DWORD boundary if we're not at the end
if (offset != data.Length)
{
while ((offset % 4) != 0)
stringData.Padding = data.ReadByte(ref offset);
}
if (stringData.ValueLength > 0)
{
byte[] valueBytes = data.ReadBytes(ref offset, stringData.ValueLength * sizeof(ushort));
stringData.Value = Encoding.Unicode.GetString(valueBytes);
}
// Align to the DWORD boundary if we're not at the end
if (offset != data.Length)
{
while ((offset % 4) != 0)
_ = data.ReadByte(ref offset);
}
stringTableChildren.Add(stringData);
}
stringTable.Children = stringTableChildren.ToArray();
stringFileInfoChildren.Add(stringTable);
}
stringFileInfo.Children = stringFileInfoChildren.ToArray();
return stringFileInfo;
}
/// <summary>
/// Read byte data as a var file info resource
/// </summary>
/// <param name="data">Data to parse into a var file info</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled var file info resource on success, null on error</returns>
private static Models.PortableExecutable.VarFileInfo AsVarFileInfo(byte[] data, ref int offset)
{
var varFileInfo = new Models.PortableExecutable.VarFileInfo();
varFileInfo.Length = data.ReadUInt16(ref offset);
varFileInfo.ValueLength = data.ReadUInt16(ref offset);
varFileInfo.ResourceType = (Models.PortableExecutable.VersionResourceType)data.ReadUInt16(ref offset);
varFileInfo.Key = data.ReadString(ref offset, Encoding.Unicode);
if (varFileInfo.Key != "VarFileInfo")
return null;
// Align to the DWORD boundary if we're not at the end
if (offset != data.Length)
{
while ((offset % 4) != 0)
varFileInfo.Padding = data.ReadByte(ref offset);
}
var varFileInfoChildren = new List<Models.PortableExecutable.VarData>();
while (offset < varFileInfo.Length)
{
var varData = new Models.PortableExecutable.VarData();
varData.Length = data.ReadUInt16(ref offset);
varData.ValueLength = data.ReadUInt16(ref offset);
varData.ResourceType = (Models.PortableExecutable.VersionResourceType)data.ReadUInt16(ref offset);
varData.Key = data.ReadString(ref offset, Encoding.Unicode);
if (varData.Key != "Translation")
return null;
// Align to the DWORD boundary if we're not at the end
if (offset != data.Length)
{
while ((offset % 4) != 0)
varData.Padding = data.ReadByte(ref offset);
}
var varDataValue = new List<uint>();
while (offset < (varData.ValueLength * sizeof(ushort)))
{
uint languageAndCodeIdentifierPair = data.ReadUInt32(ref offset);
varDataValue.Add(languageAndCodeIdentifierPair);
}
varData.Value = varDataValue.ToArray();
varFileInfoChildren.Add(varData);
}
varFileInfo.Children = varFileInfoChildren.ToArray();
return varFileInfo;
}
#endregion
#endregion
}
}

View File

@@ -0,0 +1,775 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.GCF;
using BurnOutSharp.Utilities;
namespace BurnOutSharp.Builders
{
public static class GCF
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Game Cache
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
public static Models.GCF.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Game Cache
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
public static Models.GCF.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life Game Cache to fill
var file = new Models.GCF.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the game cache header
file.Header = header;
#endregion
#region Block Entry Header
// Try to parse the block entry header
var blockEntryHeader = ParseBlockEntryHeader(data);
if (blockEntryHeader == null)
return null;
// Set the game cache block entry header
file.BlockEntryHeader = blockEntryHeader;
#endregion
#region Block Entries
// Create the block entry array
file.BlockEntries = new BlockEntry[blockEntryHeader.BlockCount];
// Try to parse the block entries
for (int i = 0; i < blockEntryHeader.BlockCount; i++)
{
var blockEntry = ParseBlockEntry(data);
file.BlockEntries[i] = blockEntry;
}
#endregion
#region Fragmentation Map Header
// Try to parse the fragmentation map header
var fragmentationMapHeader = ParseFragmentationMapHeader(data);
if (fragmentationMapHeader == null)
return null;
// Set the game cache fragmentation map header
file.FragmentationMapHeader = fragmentationMapHeader;
#endregion
#region Fragmentation Maps
// Create the fragmentation map array
file.FragmentationMaps = new FragmentationMap[fragmentationMapHeader.BlockCount];
// Try to parse the fragmentation maps
for (int i = 0; i < fragmentationMapHeader.BlockCount; i++)
{
var fragmentationMap = ParseFragmentationMap(data);
file.FragmentationMaps[i] = fragmentationMap;
}
#endregion
#region Block Entry Map Header
if (header.MinorVersion < 6)
{
// Try to parse the block entry map header
var blockEntryMapHeader = ParseBlockEntryMapHeader(data);
if (blockEntryMapHeader == null)
return null;
// Set the game cache block entry map header
file.BlockEntryMapHeader = blockEntryMapHeader;
}
#endregion
#region Block Entry Maps
if (header.MinorVersion < 6)
{
// Create the block entry map array
file.BlockEntryMaps = new BlockEntryMap[file.BlockEntryMapHeader.BlockCount];
// Try to parse the block entry maps
for (int i = 0; i < file.BlockEntryMapHeader.BlockCount; i++)
{
var blockEntryMap = ParseBlockEntryMap(data);
file.BlockEntryMaps[i] = blockEntryMap;
}
}
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Directory Header
// Try to parse the directory header
var directoryHeader = ParseDirectoryHeader(data);
if (directoryHeader == null)
return null;
// Set the game cache directory header
file.DirectoryHeader = directoryHeader;
#endregion
#region Directory Entries
// Create the directory entry array
file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount];
// Try to parse the directory entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.DirectoryEntries[i] = directoryEntry;
}
#endregion
#region Directory Names
if (directoryHeader.NameSize > 0)
{
// Get the current offset for adjustment
long directoryNamesStart = data.Position;
// Get the ending offset
long directoryNamesEnd = data.Position + directoryHeader.NameSize;
// Create the string dictionary
file.DirectoryNames = new Dictionary<long, string>();
// Loop and read the null-terminated strings
while (data.Position < directoryNamesEnd)
{
long nameOffset = data.Position - directoryNamesStart;
string directoryName = data.ReadString(Encoding.ASCII);
if (data.Position > directoryNamesEnd)
{
data.Seek(-directoryName.Length, SeekOrigin.Current);
byte[] endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
if (endingData != null)
directoryName = Encoding.ASCII.GetString(endingData);
else
directoryName = null;
}
file.DirectoryNames[nameOffset] = directoryName;
}
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}
#endregion
#region Directory Info 1 Entries
// Create the directory info 1 entry array
file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count];
// Try to parse the directory info 1 entries
for (int i = 0; i < directoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
}
#endregion
#region Directory Info 2 Entries
// Create the directory info 2 entry array
file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount];
// Try to parse the directory info 2 entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
}
#endregion
#region Directory Copy Entries
// Create the directory copy entry array
file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount];
// Try to parse the directory copy entries
for (int i = 0; i < directoryHeader.CopyCount; i++)
{
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
file.DirectoryCopyEntries[i] = directoryCopyEntry;
}
#endregion
#region Directory Local Entries
// Create the directory local entry array
file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount];
// Try to parse the directory local entries
for (int i = 0; i < directoryHeader.LocalCount; i++)
{
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
file.DirectoryLocalEntries[i] = directoryLocalEntry;
}
#endregion
// Seek to end of directory section, just in case
data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin);
#region Directory Map Header
if (header.MinorVersion >= 5)
{
// Try to parse the directory map header
var directoryMapHeader = ParseDirectoryMapHeader(data);
if (directoryMapHeader == null)
return null;
// Set the game cache directory map header
file.DirectoryMapHeader = directoryMapHeader;
}
#endregion
#region Directory Map Entries
// Create the directory map entry array
file.DirectoryMapEntries = new DirectoryMapEntry[directoryHeader.ItemCount];
// Try to parse the directory map entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryMapEntry = ParseDirectoryMapEntry(data);
file.DirectoryMapEntries[i] = directoryMapEntry;
}
#endregion
#region Checksum Header
// Try to parse the checksum header
var checksumHeader = ParseChecksumHeader(data);
if (checksumHeader == null)
return null;
// Set the game cache checksum header
file.ChecksumHeader = checksumHeader;
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Checksum Map Header
// Try to parse the checksum map header
var checksumMapHeader = ParseChecksumMapHeader(data);
if (checksumMapHeader == null)
return null;
// Set the game cache checksum map header
file.ChecksumMapHeader = checksumMapHeader;
#endregion
#region Checksum Map Entries
// Create the checksum map entry array
file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount];
// Try to parse the checksum map entries
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = ParseChecksumMapEntry(data);
file.ChecksumMapEntries[i] = checksumMapEntry;
}
#endregion
#region Checksum Entries
// Create the checksum entry array
file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount];
// Try to parse the checksum entries
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = ParseChecksumEntry(data);
file.ChecksumEntries[i] = checksumEntry;
}
#endregion
// Seek to end of checksum section, just in case
data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin);
#region Data Block Header
// Try to parse the data block header
var dataBlockHeader = ParseDataBlockHeader(data, header.MinorVersion);
if (dataBlockHeader == null)
return null;
// Set the game cache data block header
file.DataBlockHeader = dataBlockHeader;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.Dummy0 = data.ReadUInt32();
if (header.Dummy0 != 0x00000001)
return null;
header.MajorVersion = data.ReadUInt32();
if (header.MajorVersion != 0x00000001)
return null;
header.MinorVersion = data.ReadUInt32();
if (header.MinorVersion != 3 && header.MinorVersion != 5 && header.MinorVersion != 6)
return null;
header.CacheID = data.ReadUInt32();
header.LastVersionPlayed = data.ReadUInt32();
header.Dummy1 = data.ReadUInt32();
header.Dummy2 = data.ReadUInt32();
header.FileSize = data.ReadUInt32();
header.BlockSize = data.ReadUInt32();
header.BlockCount = data.ReadUInt32();
header.Dummy3 = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry header on success, null on error</returns>
private static BlockEntryHeader ParseBlockEntryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryHeader blockEntryHeader = new BlockEntryHeader();
blockEntryHeader.BlockCount = data.ReadUInt32();
blockEntryHeader.BlocksUsed = data.ReadUInt32();
blockEntryHeader.Dummy0 = data.ReadUInt32();
blockEntryHeader.Dummy1 = data.ReadUInt32();
blockEntryHeader.Dummy2 = data.ReadUInt32();
blockEntryHeader.Dummy3 = data.ReadUInt32();
blockEntryHeader.Dummy4 = data.ReadUInt32();
blockEntryHeader.Checksum = data.ReadUInt32();
return blockEntryHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry on success, null on error</returns>
private static BlockEntry ParseBlockEntry(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntry blockEntry = new BlockEntry();
blockEntry.EntryFlags = data.ReadUInt32();
blockEntry.FileDataOffset = data.ReadUInt32();
blockEntry.FileDataSize = data.ReadUInt32();
blockEntry.FirstDataBlockIndex = data.ReadUInt32();
blockEntry.NextBlockEntryIndex = data.ReadUInt32();
blockEntry.PreviousBlockEntryIndex = data.ReadUInt32();
blockEntry.DirectoryIndex = data.ReadUInt32();
return blockEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache fragmentation map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache fragmentation map header on success, null on error</returns>
private static FragmentationMapHeader ParseFragmentationMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
FragmentationMapHeader fragmentationMapHeader = new FragmentationMapHeader();
fragmentationMapHeader.BlockCount = data.ReadUInt32();
fragmentationMapHeader.FirstUnusedEntry = data.ReadUInt32();
fragmentationMapHeader.Terminator = data.ReadUInt32();
fragmentationMapHeader.Checksum = data.ReadUInt32();
return fragmentationMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache fragmentation map
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache fragmentation map on success, null on error</returns>
private static FragmentationMap ParseFragmentationMap(Stream data)
{
// TODO: Use marshalling here instead of building
FragmentationMap fragmentationMap = new FragmentationMap();
fragmentationMap.NextDataBlockIndex = data.ReadUInt32();
return fragmentationMap;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry map header on success, null on error</returns>
private static BlockEntryMapHeader ParseBlockEntryMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryMapHeader blockEntryMapHeader = new BlockEntryMapHeader();
blockEntryMapHeader.BlockCount = data.ReadUInt32();
blockEntryMapHeader.FirstBlockEntryIndex = data.ReadUInt32();
blockEntryMapHeader.LastBlockEntryIndex = data.ReadUInt32();
blockEntryMapHeader.Dummy0 = data.ReadUInt32();
blockEntryMapHeader.Checksum = data.ReadUInt32();
return blockEntryMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry map
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry map on success, null on error</returns>
private static BlockEntryMap ParseBlockEntryMap(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryMap blockEntryMap = new BlockEntryMap();
blockEntryMap.PreviousBlockEntryIndex = data.ReadUInt32();
blockEntryMap.NextBlockEntryIndex = data.ReadUInt32();
return blockEntryMap;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory header on success, null on error</returns>
private static DirectoryHeader ParseDirectoryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryHeader directoryHeader = new DirectoryHeader();
directoryHeader.Dummy0 = data.ReadUInt32();
directoryHeader.CacheID = data.ReadUInt32();
directoryHeader.LastVersionPlayed = data.ReadUInt32();
directoryHeader.ItemCount = data.ReadUInt32();
directoryHeader.FileCount = data.ReadUInt32();
directoryHeader.Dummy1 = data.ReadUInt32();
directoryHeader.DirectorySize = data.ReadUInt32();
directoryHeader.NameSize = data.ReadUInt32();
directoryHeader.Info1Count = data.ReadUInt32();
directoryHeader.CopyCount = data.ReadUInt32();
directoryHeader.LocalCount = data.ReadUInt32();
directoryHeader.Dummy2 = data.ReadUInt32();
directoryHeader.Dummy3 = data.ReadUInt32();
directoryHeader.Checksum = data.ReadUInt32();
return directoryHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.NameOffset = data.ReadUInt32();
directoryEntry.ItemSize = data.ReadUInt32();
directoryEntry.ChecksumIndex = data.ReadUInt32();
directoryEntry.DirectoryFlags = (HL_GCF_FLAG)data.ReadUInt32();
directoryEntry.ParentIndex = data.ReadUInt32();
directoryEntry.NextIndex = data.ReadUInt32();
directoryEntry.FirstIndex = data.ReadUInt32();
return directoryEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory info 1 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory info 1 entry on success, null on error</returns>
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
return directoryInfo1Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory info 2 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory info 2 entry on success, null on error</returns>
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
return directoryInfo2Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory copy entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory copy entry on success, null on error</returns>
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
return directoryCopyEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory local entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory local entry on success, null on error</returns>
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
return directoryLocalEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory map header on success, null on error</returns>
private static DirectoryMapHeader ParseDirectoryMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapHeader directoryMapHeader = new DirectoryMapHeader();
directoryMapHeader.Dummy0 = data.ReadUInt32();
if (directoryMapHeader.Dummy0 != 0x00000001)
return null;
directoryMapHeader.Dummy1 = data.ReadUInt32();
if (directoryMapHeader.Dummy1 != 0x00000000)
return null;
return directoryMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory map entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory map entry on success, null on error</returns>
private static DirectoryMapEntry ParseDirectoryMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapEntry directoryMapEntry = new DirectoryMapEntry();
directoryMapEntry.FirstBlockIndex = data.ReadUInt32();
return directoryMapEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum header on success, null on error</returns>
private static ChecksumHeader ParseChecksumHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumHeader checksumHeader = new ChecksumHeader();
checksumHeader.Dummy0 = data.ReadUInt32();
if (checksumHeader.Dummy0 != 0x00000001)
return null;
checksumHeader.ChecksumSize = data.ReadUInt32();
return checksumHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum map header on success, null on error</returns>
private static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
checksumMapHeader.Dummy0 = data.ReadUInt32();
if (checksumMapHeader.Dummy0 != 0x14893721)
return null;
checksumMapHeader.Dummy1 = data.ReadUInt32();
if (checksumMapHeader.Dummy1 != 0x00000001)
return null;
checksumMapHeader.ItemCount = data.ReadUInt32();
checksumMapHeader.ChecksumCount = data.ReadUInt32();
return checksumMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum map entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum map entry on success, null on error</returns>
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
checksumMapEntry.ChecksumCount = data.ReadUInt32();
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
return checksumMapEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum entry on success, null on error</returns>
private static ChecksumEntry ParseChecksumEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumEntry checksumEntry = new ChecksumEntry();
checksumEntry.Checksum = data.ReadUInt32();
return checksumEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache data block header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="minorVersion">Minor version field from the header</param>
/// <returns>Filled Half-Life Game Cache data block header on success, null on error</returns>
private static DataBlockHeader ParseDataBlockHeader(Stream data, uint minorVersion)
{
// TODO: Use marshalling here instead of building
DataBlockHeader dataBlockHeader = new DataBlockHeader();
// In version 3 the DataBlockHeader is missing the LastVersionPlayed field.
if (minorVersion >= 5)
dataBlockHeader.LastVersionPlayed = data.ReadUInt32();
dataBlockHeader.BlockCount = data.ReadUInt32();
dataBlockHeader.BlockSize = data.ReadUInt32();
dataBlockHeader.FirstBlockOffset = data.ReadUInt32();
dataBlockHeader.BlocksUsed = data.ReadUInt32();
dataBlockHeader.Checksum = data.ReadUInt32();
return dataBlockHeader;
}
#endregion
}
}

View File

@@ -0,0 +1,651 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.InstallShieldCabinet;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.InstallShieldCabinet.Constants;
namespace BurnOutSharp.Builders
{
// TODO: Add multi-cabinet reading
public class InstallShieldCabinet
{
#region Byte Data
/// <summary>
/// Parse a byte array into a InstallShield Cabinet file
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Header ParseCabinet(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseCabinet(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a InstallShield Cabinet file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Header ParseCabinet(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new cabinet to fill
var header = new Header();
#region Common Header
// Try to parse the cabinet header
var commonHeader = ParseCommonHeader(data);
if (commonHeader == null)
return null;
// Set the cabinet header
header.CommonHeader = commonHeader;
#endregion
#region Cabinet Descriptor
// Get the cabinet descriptor offset
uint cabinetDescriptorOffset = commonHeader.CabDescriptorOffset;
if (cabinetDescriptorOffset < 0 || cabinetDescriptorOffset >= data.Length)
return null;
// Seek to the cabinet descriptor
data.Seek(cabinetDescriptorOffset, SeekOrigin.Begin);
// Try to parse the cabinet descriptor
var cabinetDescriptor = ParseCabinetDescriptor(data);
if (cabinetDescriptor == null)
return null;
// Set the cabinet descriptor
header.CabinetDescriptor = cabinetDescriptor;
#endregion
#region File Descriptor Offsets
// Get the file table offset
uint fileTableOffset = commonHeader.CabDescriptorOffset + cabinetDescriptor.FileTableOffset;
if (fileTableOffset < 0 || fileTableOffset >= data.Length)
return null;
// Seek to the file table
data.Seek(fileTableOffset, SeekOrigin.Begin);
// Get the number of file table items
uint fileTableItems;
if (header.MajorVersion <= 5)
fileTableItems = cabinetDescriptor.DirectoryCount + cabinetDescriptor.FileCount;
else
fileTableItems = cabinetDescriptor.DirectoryCount;
// Create and fill the file table
header.FileDescriptorOffsets = new uint[fileTableItems];
for (int i = 0; i < header.FileDescriptorOffsets.Length; i++)
{
header.FileDescriptorOffsets[i] = data.ReadUInt32();
}
#endregion
#region Directory Descriptors
// Create and fill the directory descriptors
header.DirectoryDescriptors = new FileDescriptor[cabinetDescriptor.DirectoryCount];
for (int i = 0; i < cabinetDescriptor.DirectoryCount; i++)
{
// Get the directory descriptor offset
uint offset = cabinetDescriptorOffset
+ cabinetDescriptor.FileTableOffset
+ header.FileDescriptorOffsets[i];
// If we have an invalid offset
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the file descriptor offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the file descriptor
FileDescriptor directoryDescriptor = ParseDirectoryDescriptor(data, header.MajorVersion);
header.DirectoryDescriptors[i] = directoryDescriptor;
}
#endregion
#region File Descriptors
// Create and fill the file descriptors
header.FileDescriptors = new FileDescriptor[cabinetDescriptor.FileCount];
for (int i = 0; i < cabinetDescriptor.FileCount; i++)
{
// Get the file descriptor offset
uint offset;
if (header.MajorVersion <= 5)
{
offset = cabinetDescriptorOffset
+ cabinetDescriptor.FileTableOffset
+ header.FileDescriptorOffsets[cabinetDescriptor.DirectoryCount + i];
}
else
{
offset = cabinetDescriptorOffset
+ cabinetDescriptor.FileTableOffset
+ cabinetDescriptor.FileTableOffset2
+ (uint)(i * 0x57);
}
// If we have an invalid offset
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the file descriptor offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the file descriptor
FileDescriptor fileDescriptor = ParseFileDescriptor(data, header.MajorVersion, cabinetDescriptorOffset + cabinetDescriptor.FileTableOffset);
header.FileDescriptors[i] = fileDescriptor;
}
#endregion
#region File Group Offsets
// Create and fill the file group offsets
header.FileGroupOffsets = new Dictionary<long, OffsetList>();
for (int i = 0; i < cabinetDescriptor.FileGroupOffsets.Length; i++)
{
// Get the file group offset
uint offset = cabinetDescriptor.FileGroupOffsets[i];
if (offset == 0)
continue;
// Adjust the file group offset
offset += commonHeader.CabDescriptorOffset;
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the file group offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the offset
OffsetList offsetList = ParseOffsetList(data, header.MajorVersion, cabinetDescriptorOffset);
header.FileGroupOffsets[cabinetDescriptor.FileGroupOffsets[i]] = offsetList;
// If we have a nonzero next offset
uint nextOffset = offsetList.NextOffset;
while (nextOffset != 0)
{
// Get the next offset to read
uint internalOffset = nextOffset + commonHeader.CabDescriptorOffset;
// Seek to the file group offset
data.Seek(internalOffset, SeekOrigin.Begin);
// Create and add the offset
offsetList = ParseOffsetList(data, header.MajorVersion, cabinetDescriptorOffset);
header.FileGroupOffsets[nextOffset] = offsetList;
// Set the next offset
nextOffset = offsetList.NextOffset;
}
}
#endregion
#region File Groups
// Create and fill the file groups
List<FileGroup> fileGroups = new List<FileGroup>();
foreach (var kvp in header.FileGroupOffsets)
{
// Get the offset
OffsetList list = kvp.Value;
if (list == null)
continue;
/// Seek to the file group
data.Seek(list.DescriptorOffset + cabinetDescriptorOffset, SeekOrigin.Begin);
// Try to parse the file group
FileGroup fileGroup = ParseFileGroup(data, header.MajorVersion, cabinetDescriptorOffset);
// Add the file group
fileGroups.Add(fileGroup);
}
// Set the file groups
header.FileGroups = fileGroups.ToArray();
#endregion
#region Component Offsets
// Create and fill the component offsets
header.ComponentOffsets = new Dictionary<long, OffsetList>();
for (int i = 0; i < cabinetDescriptor.ComponentOffsets.Length; i++)
{
// Get the component offset
uint offset = cabinetDescriptor.ComponentOffsets[i];
if (offset == 0)
continue;
// Adjust the component offset
offset += commonHeader.CabDescriptorOffset;
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the component offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the offset
OffsetList offsetList = ParseOffsetList(data, header.MajorVersion, cabinetDescriptorOffset);
header.ComponentOffsets[cabinetDescriptor.ComponentOffsets[i]] = offsetList;
// If we have a nonzero next offset
uint nextOffset = offsetList.NextOffset;
while (nextOffset != 0)
{
// Get the next offset to read
uint internalOffset = nextOffset + commonHeader.CabDescriptorOffset;
// Seek to the file group offset
data.Seek(internalOffset, SeekOrigin.Begin);
// Create and add the offset
offsetList = ParseOffsetList(data, header.MajorVersion, cabinetDescriptorOffset);
header.ComponentOffsets[nextOffset] = offsetList;
// Set the next offset
nextOffset = offsetList.NextOffset;
}
}
#endregion
#region Components
// Create and fill the components
List<Component> components = new List<Component>();
foreach (KeyValuePair<long, OffsetList> kvp in header.ComponentOffsets)
{
// Get the offset
OffsetList list = kvp.Value;
if (list == null)
continue;
// Seek to the component
data.Seek(list.DescriptorOffset + cabinetDescriptorOffset, SeekOrigin.Begin);
// Try to parse the component
Component component = ParseComponent(data, header.MajorVersion, cabinetDescriptorOffset);
// Add the component
components.Add(component);
}
// Set the components
header.Components = components.ToArray();
#endregion
return header;
}
/// <summary>
/// Parse a Stream into a common header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled common header on success, null on error</returns>
private static CommonHeader ParseCommonHeader(Stream data)
{
CommonHeader commonHeader = new CommonHeader();
byte[] signature = data.ReadBytes(4);
commonHeader.Signature = Encoding.ASCII.GetString(signature);
if (commonHeader.Signature != SignatureString)
return null;
commonHeader.Version = data.ReadUInt32();
commonHeader.VolumeInfo = data.ReadUInt32();
commonHeader.CabDescriptorOffset = data.ReadUInt32();
commonHeader.CabDescriptorSize = data.ReadUInt32();
return commonHeader;
}
/// <summary>
/// Parse a Stream into a cabinet descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cabinet descriptor on success, null on error</returns>
private static CabDescriptor ParseCabinetDescriptor(Stream data)
{
CabDescriptor cabDescriptor = new CabDescriptor();
cabDescriptor.Reserved0 = data.ReadBytes(0x0C);
cabDescriptor.FileTableOffset = data.ReadUInt32();
cabDescriptor.Reserved1 = data.ReadBytes(0x04);
cabDescriptor.FileTableSize = data.ReadUInt32();
cabDescriptor.FileTableSize2 = data.ReadUInt32();
cabDescriptor.DirectoryCount = data.ReadUInt32();
cabDescriptor.Reserved2 = data.ReadBytes(0x08);
cabDescriptor.FileCount = data.ReadUInt32();
cabDescriptor.FileTableOffset2 = data.ReadUInt32();
cabDescriptor.Reserved3 = data.ReadBytes(0x0E);
cabDescriptor.FileGroupOffsets = new uint[MAX_FILE_GROUP_COUNT];
for (int i = 0; i < cabDescriptor.FileGroupOffsets.Length; i++)
{
cabDescriptor.FileGroupOffsets[i] = data.ReadUInt32();
}
cabDescriptor.ComponentOffsets = new uint[MAX_COMPONENT_COUNT];
for (int i = 0; i < cabDescriptor.ComponentOffsets.Length; i++)
{
cabDescriptor.ComponentOffsets[i] = data.ReadUInt32();
}
return cabDescriptor;
}
/// <summary>
/// Parse a Stream into an offset list
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled offset list on success, null on error</returns>
private static OffsetList ParseOffsetList(Stream data, int majorVersion, uint descriptorOffset)
{
OffsetList offsetList = new OffsetList();
offsetList.NameOffset = data.ReadUInt32();
offsetList.DescriptorOffset = data.ReadUInt32();
offsetList.NextOffset = data.ReadUInt32();
// Cache the current offset
long currentOffset = data.Position;
// Seek to the name offset
data.Seek(offsetList.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
offsetList.Name = data.ReadString(Encoding.Unicode);
else
offsetList.Name = data.ReadString(Encoding.ASCII);
// Seek back to the correct offset
data.Seek(currentOffset, SeekOrigin.Begin);
return offsetList;
}
/// <summary>
/// Parse a Stream into a file group
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file group on success, null on error</returns>
private static FileGroup ParseFileGroup(Stream data, int majorVersion, uint descriptorOffset)
{
FileGroup fileGroup = new FileGroup();
fileGroup.NameOffset = data.ReadUInt32();
// Skip bytes based on the version
if (majorVersion <= 5)
_ = data.ReadBytes(0x48);
else
_ = data.ReadBytes(0x12);
fileGroup.FirstFile = data.ReadUInt16();
fileGroup.LastFile = data.ReadUInt32();
// Cache the current position
long currentPosition = data.Position;
// Read the name, if possible
if (fileGroup.NameOffset != 0)
{
// Seek to the name
data.Seek(fileGroup.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
fileGroup.Name = data.ReadString(Encoding.Unicode);
else
fileGroup.Name = data.ReadString(Encoding.ASCII);
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return fileGroup;
}
/// <summary>
/// Parse a Stream into a component
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled component on success, null on error</returns>
private static Component ParseComponent(Stream data, int majorVersion, uint descriptorOffset)
{
Component component = new Component();
component.NameOffset = data.ReadUInt32();
// Skip bytes based on the version
if (majorVersion <= 5)
_ = data.ReadBytes(0x6C);
else
_ = data.ReadBytes(0x6B);
component.FileGroupCount = data.ReadUInt16();
component.FileGroupTableOffset = data.ReadUInt32();
// Cache the current position
long currentPosition = data.Position;
// Read the name, if possible
if (component.NameOffset != 0)
{
// Seek to the name
data.Seek(component.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
component.Name = data.ReadString(Encoding.Unicode);
else
component.Name = data.ReadString(Encoding.ASCII);
}
// Read the file group table, if possible
if (component.FileGroupCount != 0 && component.FileGroupTableOffset != 0)
{
// Seek to the file group table offset
data.Seek(component.FileGroupTableOffset + descriptorOffset, SeekOrigin.Begin);
// Read the file group table
component.FileGroupNames = new string[component.FileGroupCount];
for (int j = 0; j < component.FileGroupCount; j++)
{
if (majorVersion >= 17)
component.FileGroupNames[j] = data.ReadString(Encoding.Unicode);
else
component.FileGroupNames[j] = data.ReadString(Encoding.ASCII);
}
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return component;
}
/// <summary>
/// Parse a Stream into a directory descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled directory descriptor on success, null on error</returns>
private static FileDescriptor ParseDirectoryDescriptor(Stream data, int majorVersion)
{
FileDescriptor fileDescriptor = new FileDescriptor();
// Read the string
if (majorVersion >= 17)
fileDescriptor.Name = data.ReadString(Encoding.Unicode);
else
fileDescriptor.Name = data.ReadString(Encoding.ASCII);
return fileDescriptor;
}
/// <summary>
/// Parse a Stream into a file descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file descriptor on success, null on error</returns>
private static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset)
{
FileDescriptor fileDescriptor = new FileDescriptor();
// Read the descriptor based on version
if (majorVersion <= 5)
{
fileDescriptor.Volume = 0xFFFF; // Set by the header index
fileDescriptor.NameOffset = data.ReadUInt32();
fileDescriptor.DirectoryIndex = data.ReadUInt32();
fileDescriptor.Flags = (FileFlags)data.ReadUInt16();
fileDescriptor.ExpandedSize = data.ReadUInt32();
fileDescriptor.CompressedSize = data.ReadUInt32();
_ = data.ReadBytes(0x14); // Skip 0x14 bytes, unknown data?
fileDescriptor.DataOffset = data.ReadUInt32();
if (majorVersion == 5)
fileDescriptor.MD5 = data.ReadBytes(0x10);
}
else
{
fileDescriptor.Flags = (FileFlags)data.ReadUInt16();
fileDescriptor.ExpandedSize = data.ReadUInt64();
fileDescriptor.CompressedSize = data.ReadUInt64();
fileDescriptor.DataOffset = data.ReadUInt64();
fileDescriptor.MD5 = data.ReadBytes(0x10);
_ = data.ReadBytes(0x10); // Skip 0x10 bytes, unknown data?
fileDescriptor.NameOffset = data.ReadUInt32();
fileDescriptor.DirectoryIndex = data.ReadUInt16();
_ = data.ReadBytes(0x0C); // Skip 0x0C bytes, unknown data?
fileDescriptor.LinkPrevious = data.ReadUInt32();
fileDescriptor.LinkNext = data.ReadUInt32();
fileDescriptor.LinkFlags = (LinkFlags)data.ReadByteValue();
fileDescriptor.Volume = data.ReadUInt16();
}
// Cache the current position
long currentPosition = data.Position;
// Read the name, if possible
if (fileDescriptor.NameOffset != 0)
{
// Seek to the name
data.Seek(fileDescriptor.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
fileDescriptor.Name = data.ReadString(Encoding.Unicode);
else
fileDescriptor.Name = data.ReadString(Encoding.ASCII);
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return fileDescriptor;
}
/// <summary>
/// Parse a Stream into a volume header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled volume header on success, null on error</returns>
private static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
{
VolumeHeader volumeHeader = new VolumeHeader();
// Read the descriptor based on version
if (majorVersion <= 5)
{
volumeHeader.DataOffset = data.ReadUInt32();
_ = data.ReadBytes(0x04); // Skip 0x04 bytes, unknown data?
volumeHeader.FirstFileIndex = data.ReadUInt32();
volumeHeader.LastFileIndex = data.ReadUInt32();
volumeHeader.FirstFileOffset = data.ReadUInt32();
volumeHeader.FirstFileSizeExpanded = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressed = data.ReadUInt32();
volumeHeader.LastFileOffset = data.ReadUInt32();
volumeHeader.LastFileSizeExpanded = data.ReadUInt32();
volumeHeader.LastFileSizeCompressed = data.ReadUInt32();
}
else
{
volumeHeader.DataOffset = data.ReadUInt32();
volumeHeader.DataOffsetHigh = data.ReadUInt32();
volumeHeader.FirstFileIndex = data.ReadUInt32();
volumeHeader.LastFileIndex = data.ReadUInt32();
volumeHeader.FirstFileOffset = data.ReadUInt32();
volumeHeader.FirstFileOffsetHigh = data.ReadUInt32();
volumeHeader.FirstFileSizeExpanded = data.ReadUInt32();
volumeHeader.FirstFileSizeExpandedHigh = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressed = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressedHigh = data.ReadUInt32();
volumeHeader.LastFileOffset = data.ReadUInt32();
volumeHeader.LastFileOffsetHigh = data.ReadUInt32();
volumeHeader.LastFileSizeExpanded = data.ReadUInt32();
volumeHeader.LastFileSizeExpandedHigh = data.ReadUInt32();
volumeHeader.LastFileSizeCompressed = data.ReadUInt32();
volumeHeader.LastFileSizeCompressedHigh = data.ReadUInt32();
}
return volumeHeader;
}
#endregion
}
}

View File

@@ -1,9 +1,8 @@
using System.IO;
using BurnOutSharp.Models.LinearExecutable;
namespace BurnOutSharp.Builder
namespace BurnOutSharp.Builders
{
// TODO: Make Stream Data rely on Byte Data
public static class LinearExecutable
{
#region Byte Data
@@ -24,34 +23,9 @@ namespace BurnOutSharp.Builder
if (offset < 0 || offset >= data.Length)
return null;
// Cache the current offset
int initialOffset = offset;
// Create a new executable to fill
var executable = new Executable();
// Parse the MS-DOS stub
var stub = MSDOS.ParseExecutable(data, offset);
if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0)
return null;
// Set the MS-DOS stub
executable.Stub = stub;
// TODO: Implement LE/LX parsing
return null;
}
/// <summary>
/// Parse a byte array into a Linear Executable information block
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled information block on success, null on error</returns>
private static InformationBlock ParseInformationBlock(byte[] data, int offset)
{
// TODO: Implement LE/LX information block parsing
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseExecutable(dataStream);
}
#endregion
@@ -66,7 +40,7 @@ namespace BurnOutSharp.Builder
public static Executable ParseExecutable(Stream data)
{
// If the data is invalid
if (data == null)
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds

View File

@@ -1,9 +1,11 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.MSDOS;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.MSDOS.Constants;
namespace BurnOutSharp.Builder
namespace BurnOutSharp.Builders
{
// TODO: Make Stream Data rely on Byte Data
public static class MSDOS
{
#region Byte Data
@@ -24,128 +26,9 @@ namespace BurnOutSharp.Builder
if (offset < 0 || offset >= data.Length)
return null;
// Cache the current offset
int initialOffset = offset;
// Create a new executable to fill
var executable = new Executable();
#region Executable Header
// Try to parse the executable header
var executableHeader = ParseExecutableHeader(data, offset);
if (executableHeader == null)
return null;
// Set the executable header
executable.Header = executableHeader;
#endregion
#region Relocation Table
// If the offset for the relocation table doesn't exist
int tableAddress = initialOffset + executableHeader.RelocationTableAddr;
if (tableAddress >= data.Length)
return executable;
// Try to parse the relocation table
var relocationTable = ParseRelocationTable(data, tableAddress, executableHeader.RelocationItems);
if (relocationTable == null)
return null;
// Set the relocation table
executable.RelocationTable = relocationTable;
#endregion
// Return the executable
return executable;
}
/// <summary>
/// Parse a byte array into an MS-DOS executable header
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled executable header on success, null on error</returns>
private static ExecutableHeader ParseExecutableHeader(byte[] data, int offset)
{
// TODO: Use marshalling here instead of building
var header = new ExecutableHeader();
#region Standard Fields
header.Magic = new byte[2];
for (int i = 0; i < header.Magic.Length; i++)
{
header.Magic[i] = data.ReadByte(ref offset);
}
if (header.Magic[0] != 'M' || header.Magic[1] != 'Z')
return null;
header.LastPageBytes = data.ReadUInt16(ref offset);
header.Pages = data.ReadUInt16(ref offset);
header.RelocationItems = data.ReadUInt16(ref offset);
header.HeaderParagraphSize = data.ReadUInt16(ref offset);
header.MinimumExtraParagraphs = data.ReadUInt16(ref offset);
header.MaximumExtraParagraphs = data.ReadUInt16(ref offset);
header.InitialSSValue = data.ReadUInt16(ref offset);
header.InitialSPValue = data.ReadUInt16(ref offset);
header.Checksum = data.ReadUInt16(ref offset);
header.InitialIPValue = data.ReadUInt16(ref offset);
header.InitialCSValue = data.ReadUInt16(ref offset);
header.RelocationTableAddr = data.ReadUInt16(ref offset);
header.OverlayNumber = data.ReadUInt16(ref offset);
#endregion
// If we don't have enough data for PE extensions
if (offset >= data.Length || data.Length - offset < 36)
return header;
#region PE Extensions
header.Reserved1 = new ushort[4];
for (int i = 0; i < header.Reserved1.Length; i++)
{
header.Reserved1[i] = data.ReadUInt16(ref offset);
}
header.OEMIdentifier = data.ReadUInt16(ref offset);
header.OEMInformation = data.ReadUInt16(ref offset);
header.Reserved2 = new ushort[10];
for (int i = 0; i < header.Reserved2.Length; i++)
{
header.Reserved2[i] = data.ReadUInt16(ref offset);
}
header.NewExeHeaderAddr = data.ReadUInt32(ref offset);
#endregion
return header;
}
/// <summary>
/// Parse a byte array into a relocation table
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <param name="count">Number of relocation table entries to read</param>
/// <returns>Filled relocation table on success, null on error</returns>
private static RelocationEntry[] ParseRelocationTable(byte[] data, int offset, int count)
{
// TODO: Use marshalling here instead of building
var relocationTable = new RelocationEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new RelocationEntry();
entry.Offset = data.ReadUInt16(ref offset);
entry.Segment = data.ReadUInt16(ref offset);
relocationTable[i] = entry;
}
return relocationTable;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseExecutable(dataStream);
}
#endregion
@@ -160,7 +43,7 @@ namespace BurnOutSharp.Builder
public static Executable ParseExecutable(Stream data)
{
// If the data is invalid
if (data == null)
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
@@ -219,12 +102,9 @@ namespace BurnOutSharp.Builder
#region Standard Fields
header.Magic = new byte[2];
for (int i = 0; i < header.Magic.Length; i++)
{
header.Magic[i] = data.ReadByteValue();
}
if (header.Magic[0] != 'M' || header.Magic[1] != 'Z')
byte[] magic = data.ReadBytes(2);
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.LastPageBytes = data.ReadUInt16();

View File

@@ -0,0 +1,258 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.MicrosoftCabinet;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.MicrosoftCabinet.Constants;
namespace BurnOutSharp.Builders
{
// TODO: Add multi-cabinet reading
public class MicrosoftCabinet
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Microsoft Cabinet file
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Cabinet ParseCabinet(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseCabinet(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Microsoft Cabinet file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Cabinet ParseCabinet(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new cabinet to fill
var cabinet = new Cabinet();
#region Cabinet Header
// Try to parse the cabinet header
var cabinetHeader = ParseCabinetHeader(data);
if (cabinetHeader == null)
return null;
// Set the cabinet header
cabinet.Header = cabinetHeader;
#endregion
#region Folders
// Set the folder array
cabinet.Folders = new CFFOLDER[cabinetHeader.FolderCount];
// Try to parse each folder, if we have any
for (int i = 0; i < cabinetHeader.FolderCount; i++)
{
var folder = ParseFolder(data, cabinetHeader);
if (folder == null)
return null;
// Set the folder
cabinet.Folders[i] = folder;
}
#endregion
#region Files
// Get the files offset
int filesOffset = (int)cabinetHeader.FilesOffset + initialOffset;
if (filesOffset > data.Length)
return null;
// Seek to the offset
data.Seek(filesOffset, SeekOrigin.Begin);
// Set the file array
cabinet.Files = new CFFILE[cabinetHeader.FileCount];
// Try to parse each file, if we have any
for (int i = 0; i < cabinetHeader.FileCount; i++)
{
var file = ParseFile(data);
if (file == null)
return null;
// Set the file
cabinet.Files[i] = file;
}
#endregion
return cabinet;
}
/// <summary>
/// Parse a Stream into a cabinet header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cabinet header on success, null on error</returns>
private static CFHEADER ParseCabinetHeader(Stream data)
{
CFHEADER header = new CFHEADER();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.Reserved1 = data.ReadUInt32();
header.CabinetSize = data.ReadUInt32();
header.Reserved2 = data.ReadUInt32();
header.FilesOffset = data.ReadUInt32();
header.Reserved3 = data.ReadUInt32();
header.VersionMinor = data.ReadByteValue();
header.VersionMajor = data.ReadByteValue();
header.FolderCount = data.ReadUInt16();
header.FileCount = data.ReadUInt16();
header.Flags = (HeaderFlags)data.ReadUInt16();
header.SetID = data.ReadUInt16();
header.CabinetIndex = data.ReadUInt16();
if (header.Flags.HasFlag(HeaderFlags.RESERVE_PRESENT))
{
header.HeaderReservedSize = data.ReadUInt16();
if (header.HeaderReservedSize > 60_000)
return null;
header.FolderReservedSize = data.ReadByteValue();
header.DataReservedSize = data.ReadByteValue();
if (header.HeaderReservedSize > 0)
header.ReservedData = data.ReadBytes(header.HeaderReservedSize);
}
if (header.Flags.HasFlag(HeaderFlags.PREV_CABINET))
{
header.CabinetPrev = data.ReadString(Encoding.ASCII);
header.DiskPrev = data.ReadString(Encoding.ASCII);
}
if (header.Flags.HasFlag(HeaderFlags.NEXT_CABINET))
{
header.CabinetNext = data.ReadString(Encoding.ASCII);
header.DiskNext = data.ReadString(Encoding.ASCII);
}
return header;
}
/// <summary>
/// Parse a Stream into a folder
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="header">Cabinet header to get flags and sizes from</param>
/// <returns>Filled folder on success, null on error</returns>
private static CFFOLDER ParseFolder(Stream data, CFHEADER header)
{
CFFOLDER folder = new CFFOLDER();
folder.CabStartOffset = data.ReadUInt32();
folder.DataCount = data.ReadUInt16();
folder.CompressionType = (CompressionType)data.ReadUInt16();
if (header.FolderReservedSize > 0)
folder.ReservedData = data.ReadBytes(header.FolderReservedSize);
if (folder.CabStartOffset > 0)
{
long currentPosition = data.Position;
data.Seek(folder.CabStartOffset, SeekOrigin.Begin);
folder.DataBlocks = new CFDATA[folder.DataCount];
for (int i = 0; i < folder.DataCount; i++)
{
CFDATA dataBlock = ParseDataBlock(data, header.DataReservedSize);
folder.DataBlocks[i] = dataBlock;
}
data.Seek(currentPosition, SeekOrigin.Begin);
}
return folder;
}
/// <summary>
/// Parse a Stream into a data block
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="dataReservedSize">Reserved byte size for data blocks</param>
/// <returns>Filled folder on success, null on error</returns>
private static CFDATA ParseDataBlock(Stream data, byte dataReservedSize)
{
CFDATA dataBlock = new CFDATA();
dataBlock.Checksum = data.ReadUInt32();
dataBlock.CompressedSize = data.ReadUInt16();
dataBlock.UncompressedSize = data.ReadUInt16();
if (dataReservedSize > 0)
dataBlock.ReservedData = data.ReadBytes(dataReservedSize);
if (dataBlock.CompressedSize > 0)
dataBlock.CompressedData = data.ReadBytes(dataBlock.CompressedSize);
return dataBlock;
}
/// <summary>
/// Parse a Stream into a file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file on success, null on error</returns>
private static CFFILE ParseFile(Stream data)
{
CFFILE file = new CFFILE();
file.FileSize = data.ReadUInt32();
file.FolderStartOffset = data.ReadUInt32();
file.FolderIndex = (FolderIndex)data.ReadUInt16();
file.Date = data.ReadUInt16();
file.Time = data.ReadUInt16();
file.Attributes = (Models.MicrosoftCabinet.FileAttributes)data.ReadUInt16();
if (file.Attributes.HasFlag(Models.MicrosoftCabinet.FileAttributes.NAME_IS_UTF))
file.Name = data.ReadString(Encoding.Unicode);
else
file.Name = data.ReadString(Encoding.ASCII);
return file;
}
#endregion
}
}

View File

@@ -0,0 +1,651 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.MoPaQ;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.MoPaQ.Constants;
namespace BurnOutSharp.Builders
{
public class MoPaQ
{
#region Byte Data
/// <summary>
/// Parse a byte array into a MoPaQ archive
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseArchive(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a MoPaQ archive
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region User Data
// Check for User Data
uint possibleSignature = data.ReadUInt32();
data.Seek(-4, SeekOrigin.Current);
if (possibleSignature == 0x1B51504D)
{
// Save the current position for offset correction
long basePtr = data.Position;
// Deserialize the user data, returning null if invalid
var userData = ParseUserData(data);
if (userData == null)
return null;
// Set the user data
archive.UserData = userData;
// Set the starting position according to the header offset
data.Seek(basePtr + (int)archive.UserData.HeaderOffset, SeekOrigin.Begin);
}
#endregion
#region Archive Header
// Check for the Header
possibleSignature = data.ReadUInt32();
data.Seek(-4, SeekOrigin.Current);
if (possibleSignature == 0x1A51504D)
{
// Try to parse the archive header
var archiveHeader = ParseArchiveHeader(data);
if (archiveHeader == null)
return null;
// Set the archive header
archive.ArchiveHeader = archiveHeader;
}
else
{
return null;
}
#endregion
#region Hash Table
// TODO: The hash table has to be be decrypted before reading
// Version 1
if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format1)
{
// If we have a hash table
long hashTableOffset = archive.ArchiveHeader.HashTablePosition;
if (hashTableOffset != 0)
{
// Seek to the offset
data.Seek(hashTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long hashTableEnd = hashTableOffset + archive.ArchiveHeader.HashTableSize;
// Read in the hash table
var hashTable = new List<HashEntry>();
while (data.Position < hashTableEnd)
{
var hashEntry = ParseHashEntry(data);
if (hashEntry == null)
return null;
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
}
}
// Version 2 and 3
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format2 || archive.ArchiveHeader.FormatVersion == FormatVersion.Format3)
{
// If we have a hash table
long hashTableOffset = ((uint)archive.ArchiveHeader.HashTablePositionHi << 23) | archive.ArchiveHeader.HashTablePosition;
if (hashTableOffset != 0)
{
// Seek to the offset
data.Seek(hashTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long hashTableEnd = hashTableOffset + archive.ArchiveHeader.HashTableSize;
// Read in the hash table
var hashTable = new List<HashEntry>();
while (data.Position < hashTableEnd)
{
var hashEntry = ParseHashEntry(data);
if (hashEntry == null)
return null;
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
}
}
// Version 4
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format4)
{
// If we have a hash table
long hashTableOffset = ((uint)archive.ArchiveHeader.HashTablePositionHi << 23) | archive.ArchiveHeader.HashTablePosition;
if (hashTableOffset != 0)
{
// Seek to the offset
data.Seek(hashTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long hashTableEnd = hashTableOffset + (long)archive.ArchiveHeader.HashTableSizeLong;
// Read in the hash table
var hashTable = new List<HashEntry>();
while (data.Position < hashTableEnd)
{
var hashEntry = ParseHashEntry(data);
if (hashEntry == null)
return null;
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
}
}
#endregion
#region Block Table
// Version 1
if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format1)
{
// If we have a block table
long blockTableOffset = archive.ArchiveHeader.BlockTablePosition;
if (blockTableOffset != 0)
{
// Seek to the offset
data.Seek(blockTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long blockTableEnd = blockTableOffset + archive.ArchiveHeader.BlockTableSize;
// Read in the block table
var blockTable = new List<BlockEntry>();
while (data.Position < blockTableEnd)
{
var blockEntry = ParseBlockEntry(data);
if (blockEntry == null)
return null;
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
}
}
// Version 2 and 3
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format2 || archive.ArchiveHeader.FormatVersion == FormatVersion.Format3)
{
// If we have a block table
long blockTableOffset = ((uint)archive.ArchiveHeader.BlockTablePositionHi << 23) | archive.ArchiveHeader.BlockTablePosition;
if (blockTableOffset != 0)
{
// Seek to the offset
data.Seek(blockTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long blockTableEnd = blockTableOffset + archive.ArchiveHeader.BlockTableSize;
// Read in the block table
var blockTable = new List<BlockEntry>();
while (data.Position < blockTableEnd)
{
var blockEntry = ParseBlockEntry(data);
if (blockEntry == null)
return null;
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
}
}
// Version 4
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format4)
{
// If we have a block table
long blockTableOffset = ((uint)archive.ArchiveHeader.BlockTablePositionHi << 23) | archive.ArchiveHeader.BlockTablePosition;
if (blockTableOffset != 0)
{
// Seek to the offset
data.Seek(blockTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long blockTableEnd = blockTableOffset + (long)archive.ArchiveHeader.BlockTableSizeLong;
// Read in the block table
var blockTable = new List<BlockEntry>();
while (data.Position < blockTableEnd)
{
var blockEntry = ParseBlockEntry(data);
if (blockEntry == null)
return null;
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
}
}
#endregion
#region Hi-Block Table
// Version 2, 3, and 4
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format2)
{
// If we have a hi-block table
long hiBlockTableOffset = (long)archive.ArchiveHeader.HiBlockTablePosition;
if (hiBlockTableOffset != 0)
{
// Seek to the offset
data.Seek(hiBlockTableOffset, SeekOrigin.Begin);
// Read in the hi-block table
var hiBlockTable = new List<short>();
for (int i = 0; i < archive.BlockTable.Length; i++)
{
short hiBlockEntry = data.ReadInt16();
hiBlockTable.Add(hiBlockEntry);
}
archive.HiBlockTable = hiBlockTable.ToArray();
}
}
#endregion
#region BET Table
// Version 3 and 4
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format3)
{
// If we have a BET table
long betTableOffset = (long)archive.ArchiveHeader.BetTablePosition;
if (betTableOffset != 0)
{
// Seek to the offset
data.Seek(betTableOffset, SeekOrigin.Begin);
// Read in the BET table
var betTable = ParseBetTable(data);
if (betTable != null)
return null;
archive.BetTable = betTable;
}
}
#endregion
#region HET Table
// Version 3 and 4
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format3)
{
// If we have a HET table
long hetTableOffset = (long)archive.ArchiveHeader.HetTablePosition;
if (hetTableOffset != 0)
{
// Seek to the offset
data.Seek(hetTableOffset, SeekOrigin.Begin);
// Read in the HET table
var hetTable = ParseHetTable(data);
if (hetTable != null)
return null;
archive.HetTable = hetTable;
}
}
#endregion
return archive;
}
/// <summary>
/// Parse a Stream into a archive header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive header on success, null on error</returns>
private static ArchiveHeader ParseArchiveHeader(Stream data)
{
ArchiveHeader archiveHeader = new ArchiveHeader();
// V1 - Common
byte[] signature = data.ReadBytes(4);
archiveHeader.Signature = Encoding.ASCII.GetString(signature);
if (archiveHeader.Signature != ArchiveHeaderSignatureString)
return null;
archiveHeader.HeaderSize = data.ReadUInt32();
archiveHeader.ArchiveSize = data.ReadUInt32();
archiveHeader.FormatVersion = (FormatVersion)data.ReadUInt16();
archiveHeader.BlockSize = data.ReadUInt16();
archiveHeader.HashTablePosition = data.ReadUInt32();
archiveHeader.BlockTablePosition = data.ReadUInt32();
archiveHeader.HashTableSize = data.ReadUInt32();
archiveHeader.BlockTableSize = data.ReadUInt32();
// V2
if (archiveHeader.FormatVersion >= FormatVersion.Format2)
{
archiveHeader.HiBlockTablePosition = data.ReadUInt64();
archiveHeader.HashTablePositionHi = data.ReadUInt16();
archiveHeader.BlockTablePositionHi = data.ReadUInt16();
}
// V3
if (archiveHeader.FormatVersion >= FormatVersion.Format3)
{
archiveHeader.ArchiveSizeLong = data.ReadUInt64();
archiveHeader.BetTablePosition = data.ReadUInt64();
archiveHeader.HetTablePosition = data.ReadUInt64();
}
// V4
if (archiveHeader.FormatVersion >= FormatVersion.Format4)
{
archiveHeader.HashTableSizeLong = data.ReadUInt64();
archiveHeader.BlockTableSizeLong = data.ReadUInt64();
archiveHeader.HiBlockTableSize = data.ReadUInt64();
archiveHeader.HetTableSize = data.ReadUInt64();
archiveHeader.BetTablesize = data.ReadUInt64();
archiveHeader.RawChunkSize = data.ReadUInt32();
archiveHeader.BlockTableMD5 = data.ReadBytes(0x10);
archiveHeader.HashTableMD5 = data.ReadBytes(0x10);
archiveHeader.HiBlockTableMD5 = data.ReadBytes(0x10);
archiveHeader.BetTableMD5 = data.ReadBytes(0x10);
archiveHeader.HetTableMD5 = data.ReadBytes(0x10);
archiveHeader.HetTableMD5 = data.ReadBytes(0x10);
}
return archiveHeader;
}
/// <summary>
/// Parse a Stream into a user data object
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled user data on success, null on error</returns>
private static UserData ParseUserData(Stream data)
{
UserData userData = new UserData();
byte[] signature = data.ReadBytes(4);
userData.Signature = Encoding.ASCII.GetString(signature);
if (userData.Signature != UserDataSignatureString)
return null;
userData.UserDataSize = data.ReadUInt32();
userData.HeaderOffset = data.ReadUInt32();
userData.UserDataHeaderSize = data.ReadUInt32();
return userData;
}
/// <summary>
/// Parse a Stream into a HET table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled HET table on success, null on error</returns>
private static HetTable ParseHetTable(Stream data)
{
HetTable hetTable = new HetTable();
// Common Headers
byte[] signature = data.ReadBytes(4);
hetTable.Signature = Encoding.ASCII.GetString(signature);
if (hetTable.Signature != HetTableSignatureString)
return null;
hetTable.Version = data.ReadUInt32();
hetTable.DataSize = data.ReadUInt32();
// HET-Specific
hetTable.TableSize = data.ReadUInt32();
hetTable.MaxFileCount = data.ReadUInt32();
hetTable.HashTableSize = data.ReadUInt32();
hetTable.TotalIndexSize = data.ReadUInt32();
hetTable.IndexSizeExtra = data.ReadUInt32();
hetTable.IndexSize = data.ReadUInt32();
hetTable.BlockTableSize = data.ReadUInt32();
hetTable.HashTable = data.ReadBytes((int)hetTable.HashTableSize);
// TODO: Populate the file indexes array
hetTable.FileIndexes = new byte[(int)hetTable.HashTableSize][];
return hetTable;
}
/// <summary>
/// Parse a Stream into a BET table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BET table on success, null on error</returns>
private static BetTable ParseBetTable(Stream data)
{
BetTable betTable = new BetTable();
// Common Headers
byte[] signature = data.ReadBytes(4);
betTable.Signature = Encoding.ASCII.GetString(signature);
if (betTable.Signature != BetTableSignatureString)
return null;
betTable.Version = data.ReadUInt32();
betTable.DataSize = data.ReadUInt32();
// BET-Specific
betTable.TableSize = data.ReadUInt32();
betTable.FileCount = data.ReadUInt32();
betTable.Unknown = data.ReadUInt32();
betTable.TableEntrySize = data.ReadUInt32();
betTable.FilePositionBitIndex = data.ReadUInt32();
betTable.FileSizeBitIndex = data.ReadUInt32();
betTable.CompressedSizeBitIndex = data.ReadUInt32();
betTable.FlagIndexBitIndex = data.ReadUInt32();
betTable.UnknownBitIndex = data.ReadUInt32();
betTable.FilePositionBitCount = data.ReadUInt32();
betTable.FileSizeBitCount = data.ReadUInt32();
betTable.CompressedSizeBitCount = data.ReadUInt32();
betTable.FlagIndexBitCount = data.ReadUInt32();
betTable.UnknownBitCount = data.ReadUInt32();
betTable.TotalBetHashSize = data.ReadUInt32();
betTable.BetHashSizeExtra = data.ReadUInt32();
betTable.BetHashSize = data.ReadUInt32();
betTable.BetHashArraySize = data.ReadUInt32();
betTable.FlagCount = data.ReadUInt32();
betTable.FlagsArray = new uint[betTable.FlagCount];
byte[] flagsArray = data.ReadBytes((int)betTable.FlagCount * 4);
Buffer.BlockCopy(flagsArray, 0, betTable.FlagsArray, 0, (int)betTable.FlagCount * 4);
// TODO: Populate the file table
// TODO: Populate the hash table
return betTable;
}
/// <summary>
/// Parse a Stream into a hash entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled hash entry on success, null on error</returns>
private static HashEntry ParseHashEntry(Stream data)
{
// TODO: Use marshalling here instead of building
HashEntry hashEntry = new HashEntry();
hashEntry.NameHashPartA = data.ReadUInt32();
hashEntry.NameHashPartB = data.ReadUInt32();
hashEntry.Locale = (Locale)data.ReadUInt16();
hashEntry.Platform = data.ReadUInt16();
hashEntry.BlockIndex = data.ReadUInt32();
return hashEntry;
}
/// <summary>
/// Parse a Stream into a block entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled block entry on success, null on error</returns>
private static BlockEntry ParseBlockEntry(Stream data)
{
BlockEntry blockEntry = new BlockEntry();
blockEntry.FilePosition = data.ReadUInt32();
blockEntry.CompressedSize = data.ReadUInt32();
blockEntry.UncompressedSize = data.ReadUInt32();
blockEntry.Flags = (FileFlags)data.ReadUInt32();
return blockEntry;
}
/// <summary>
/// Parse a Stream into a patch info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled patch info on success, null on error</returns>
private static PatchInfo ParsePatchInfo(Stream data)
{
// TODO: Use marshalling here instead of building
PatchInfo patchInfo = new PatchInfo();
patchInfo.Length = data.ReadUInt32();
patchInfo.Flags = data.ReadUInt32();
patchInfo.DataSize = data.ReadUInt32();
patchInfo.MD5 = data.ReadBytes(0x10);
// TODO: Fill the sector offset table
return patchInfo;
}
#endregion
#region Helpers
/// <summary>
/// Buffer for encryption and decryption
/// </summary>
private uint[] _stormBuffer = new uint[STORM_BUFFER_SIZE];
/// <summary>
/// Prepare the encryption table
/// </summary>
private void PrepareCryptTable()
{
uint seed = 0x00100001;
for (uint index1 = 0; index1 < 0x100; index1++)
{
for (uint index2 = index1, i = 0; i < 5; i++, index2 += 0x100)
{
seed = (seed * 125 + 3) % 0x2AAAAB;
uint temp1 = (seed & 0xFFFF) << 0x10;
seed = (seed * 125 + 3) % 0x2AAAAB;
uint temp2 = (seed & 0xFFFF);
_stormBuffer[index2] = (temp1 | temp2);
}
}
}
/// <summary>
/// Decrypt a single block of data
/// </summary>
private unsafe byte[] DecryptBlock(byte[] block, uint length, uint key)
{
uint seed = 0xEEEEEEEE;
uint[] castBlock = new uint[length / 4];
Buffer.BlockCopy(block, 0, castBlock, 0, (int)length);
int castBlockPtr = 0;
// Round to uints
length >>= 2;
while (length-- > 0)
{
seed += _stormBuffer[MPQ_HASH_KEY2_MIX + (key & 0xFF)];
uint ch = castBlock[castBlockPtr] ^ (key + seed);
key = ((~key << 0x15) + 0x11111111) | (key >> 0x0B);
seed = ch + seed + (seed << 5) + 3;
castBlock[castBlockPtr++] = ch;
}
Buffer.BlockCopy(castBlock, 0, block, 0, (int)length);
return block;
}
#endregion
}
}

View File

@@ -0,0 +1,544 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.NCF;
using BurnOutSharp.Utilities;
namespace BurnOutSharp.Builders
{
public static class NCF
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life No Cache
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life No Cache on success, null on error</returns>
public static Models.NCF.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life No Cache
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache on success, null on error</returns>
public static Models.NCF.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life No Cache to fill
var file = new Models.NCF.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the no cache header
file.Header = header;
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Directory Header
// Try to parse the directory header
var directoryHeader = ParseDirectoryHeader(data);
if (directoryHeader == null)
return null;
// Set the game cache directory header
file.DirectoryHeader = directoryHeader;
#endregion
#region Directory Entries
// Create the directory entry array
file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount];
// Try to parse the directory entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.DirectoryEntries[i] = directoryEntry;
}
#endregion
#region Directory Names
if (directoryHeader.NameSize > 0)
{
// Get the current offset for adjustment
long directoryNamesStart = data.Position;
// Get the ending offset
long directoryNamesEnd = data.Position + directoryHeader.NameSize;
// Create the string dictionary
file.DirectoryNames = new Dictionary<long, string>();
// Loop and read the null-terminated strings
while (data.Position < directoryNamesEnd)
{
long nameOffset = data.Position - directoryNamesStart;
string directoryName = data.ReadString(Encoding.ASCII);
if (data.Position > directoryNamesEnd)
{
data.Seek(-directoryName.Length, SeekOrigin.Current);
byte[] endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
if (endingData != null)
directoryName = Encoding.ASCII.GetString(endingData);
else
directoryName = null;
}
file.DirectoryNames[nameOffset] = directoryName;
}
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}
#endregion
#region Directory Info 1 Entries
// Create the directory info 1 entry array
file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count];
// Try to parse the directory info 1 entries
for (int i = 0; i < directoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
}
#endregion
#region Directory Info 2 Entries
// Create the directory info 2 entry array
file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount];
// Try to parse the directory info 2 entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
}
#endregion
#region Directory Copy Entries
// Create the directory copy entry array
file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount];
// Try to parse the directory copy entries
for (int i = 0; i < directoryHeader.CopyCount; i++)
{
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
file.DirectoryCopyEntries[i] = directoryCopyEntry;
}
#endregion
#region Directory Local Entries
// Create the directory local entry array
file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount];
// Try to parse the directory local entries
for (int i = 0; i < directoryHeader.LocalCount; i++)
{
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
file.DirectoryLocalEntries[i] = directoryLocalEntry;
}
#endregion
// Seek to end of directory section, just in case
data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin);
#region Unknown Header
// Try to parse the unknown header
var unknownHeader = ParseUnknownHeader(data);
if (unknownHeader == null)
return null;
// Set the game cache unknown header
file.UnknownHeader = unknownHeader;
#endregion
#region Unknown Entries
// Create the unknown entry array
file.UnknownEntries = new UnknownEntry[directoryHeader.ItemCount];
// Try to parse the unknown entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var unknownEntry = ParseUnknownEntry(data);
file.UnknownEntries[i] = unknownEntry;
}
#endregion
#region Checksum Header
// Try to parse the checksum header
var checksumHeader = ParseChecksumHeader(data);
if (checksumHeader == null)
return null;
// Set the game cache checksum header
file.ChecksumHeader = checksumHeader;
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Checksum Map Header
// Try to parse the checksum map header
var checksumMapHeader = ParseChecksumMapHeader(data);
if (checksumMapHeader == null)
return null;
// Set the game cache checksum map header
file.ChecksumMapHeader = checksumMapHeader;
#endregion
#region Checksum Map Entries
// Create the checksum map entry array
file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount];
// Try to parse the checksum map entries
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = ParseChecksumMapEntry(data);
file.ChecksumMapEntries[i] = checksumMapEntry;
}
#endregion
#region Checksum Entries
// Create the checksum entry array
file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount];
// Try to parse the checksum entries
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = ParseChecksumEntry(data);
file.ChecksumEntries[i] = checksumEntry;
}
#endregion
// Seek to end of checksum section, just in case
data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin);
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.Dummy0 = data.ReadUInt32();
if (header.Dummy0 != 0x00000001)
return null;
header.MajorVersion = data.ReadUInt32();
if (header.MajorVersion != 0x00000002)
return null;
header.MinorVersion = data.ReadUInt32();
if (header.MinorVersion != 1)
return null;
header.CacheID = data.ReadUInt32();
header.LastVersionPlayed = data.ReadUInt32();
header.Dummy1 = data.ReadUInt32();
header.Dummy2 = data.ReadUInt32();
header.FileSize = data.ReadUInt32();
header.BlockSize = data.ReadUInt32();
header.BlockCount = data.ReadUInt32();
header.Dummy3 = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory header on success, null on error</returns>
private static DirectoryHeader ParseDirectoryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryHeader directoryHeader = new DirectoryHeader();
directoryHeader.Dummy0 = data.ReadUInt32();
if (directoryHeader.Dummy0 != 0x00000004)
return null;
directoryHeader.CacheID = data.ReadUInt32();
directoryHeader.LastVersionPlayed = data.ReadUInt32();
directoryHeader.ItemCount = data.ReadUInt32();
directoryHeader.FileCount = data.ReadUInt32();
directoryHeader.ChecksumDataLength = data.ReadUInt32();
directoryHeader.DirectorySize = data.ReadUInt32();
directoryHeader.NameSize = data.ReadUInt32();
directoryHeader.Info1Count = data.ReadUInt32();
directoryHeader.CopyCount = data.ReadUInt32();
directoryHeader.LocalCount = data.ReadUInt32();
directoryHeader.Dummy1 = data.ReadUInt32();
directoryHeader.Dummy2 = data.ReadUInt32();
directoryHeader.Checksum = data.ReadUInt32();
return directoryHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.NameOffset = data.ReadUInt32();
directoryEntry.ItemSize = data.ReadUInt32();
directoryEntry.ChecksumIndex = data.ReadUInt32();
directoryEntry.DirectoryFlags = (HL_NCF_FLAG)data.ReadUInt32();
directoryEntry.ParentIndex = data.ReadUInt32();
directoryEntry.NextIndex = data.ReadUInt32();
directoryEntry.FirstIndex = data.ReadUInt32();
return directoryEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory info 1 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory info 1 entry on success, null on error</returns>
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
return directoryInfo1Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory info 2 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory info 2 entry on success, null on error</returns>
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
return directoryInfo2Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory copy entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory copy entry on success, null on error</returns>
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
return directoryCopyEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory local entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory local entry on success, null on error</returns>
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
return directoryLocalEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache unknown header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache unknown header on success, null on error</returns>
private static UnknownHeader ParseUnknownHeader(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownHeader unknownHeader = new UnknownHeader();
unknownHeader.Dummy0 = data.ReadUInt32();
if (unknownHeader.Dummy0 != 0x00000001)
return null;
unknownHeader.Dummy1 = data.ReadUInt32();
if (unknownHeader.Dummy1 != 0x00000000)
return null;
return unknownHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache unknown entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cacheunknown entry on success, null on error</returns>
private static UnknownEntry ParseUnknownEntry(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownEntry unknownEntry = new UnknownEntry();
unknownEntry.Dummy0 = data.ReadUInt32();
return unknownEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum header on success, null on error</returns>
private static ChecksumHeader ParseChecksumHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumHeader checksumHeader = new ChecksumHeader();
checksumHeader.Dummy0 = data.ReadUInt32();
if (checksumHeader.Dummy0 != 0x00000001)
return null;
checksumHeader.ChecksumSize = data.ReadUInt32();
return checksumHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum map header on success, null on error</returns>
private static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
checksumMapHeader.Dummy0 = data.ReadUInt32();
if (checksumMapHeader.Dummy0 != 0x14893721)
return null;
checksumMapHeader.Dummy1 = data.ReadUInt32();
if (checksumMapHeader.Dummy1 != 0x00000001)
return null;
checksumMapHeader.ItemCount = data.ReadUInt32();
checksumMapHeader.ChecksumCount = data.ReadUInt32();
return checksumMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum map entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum map entry on success, null on error</returns>
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
checksumMapEntry.ChecksumCount = data.ReadUInt32();
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
return checksumMapEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum entry on success, null on error</returns>
private static ChecksumEntry ParseChecksumEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumEntry checksumEntry = new ChecksumEntry();
checksumEntry.Checksum = data.ReadUInt32();
return checksumEntry;
}
#endregion
}
}

View File

@@ -0,0 +1,508 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using BurnOutSharp.Models.NewExecutable;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.NewExecutable.Constants;
namespace BurnOutSharp.Builders
{
public static class NewExecutable
{
#region Byte Data
/// <summary>
/// Parse a byte array into a New Executable
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseExecutable(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a New Executable
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new executable to fill
var executable = new Executable();
#region MS-DOS Stub
// Parse the MS-DOS stub
var stub = MSDOS.ParseExecutable(data);
if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0)
return null;
// Set the MS-DOS stub
executable.Stub = stub;
#endregion
#region Executable Header
// Try to parse the executable header
data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin);
var executableHeader = ParseExecutableHeader(data);
if (executableHeader == null)
return null;
// Set the executable header
executable.Header = executableHeader;
#endregion
#region Segment Table
// If the offset for the segment table doesn't exist
int tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.SegmentTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the segment table
data.Seek(tableAddress, SeekOrigin.Begin);
var segmentTable = ParseSegmentTable(data, executableHeader.FileSegmentCount);
if (segmentTable == null)
return null;
// Set the segment table
executable.SegmentTable = segmentTable;
#endregion
#region Resource Table
// If the offset for the segment table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.SegmentTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the resource table
data.Seek(tableAddress, SeekOrigin.Begin);
var resourceTable = ParseResourceTable(data, executableHeader.ResourceEntriesCount);
if (resourceTable == null)
return null;
// Set the resource table
executable.ResourceTable = resourceTable;
#endregion
#region Resident-Name Table
// If the offset for the resident-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ResidentNameTableOffset;
int endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the resident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var residentNameTable = ParseResidentNameTable(data, endOffset);
if (residentNameTable == null)
return null;
// Set the resident-name table
executable.ResidentNameTable = residentNameTable;
#endregion
#region Module-Reference Table
// If the offset for the module-reference table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the module-reference table
data.Seek(tableAddress, SeekOrigin.Begin);
var moduleReferenceTable = ParseModuleReferenceTable(data, executableHeader.ModuleReferenceTableSize);
if (moduleReferenceTable == null)
return null;
// Set the module-reference table
executable.ModuleReferenceTable = moduleReferenceTable;
#endregion
#region Imported-Name Table
// If the offset for the imported-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ImportedNamesTableOffset;
endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var importedNameTable = ParseImportedNameTable(data, endOffset);
if (importedNameTable == null)
return null;
// Set the imported-name table
executable.ImportedNameTable = importedNameTable;
#endregion
#region Entry Table
// If the offset for the imported-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset;
endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset
+ executableHeader.EntryTableSize;
if (tableAddress >= data.Length)
return executable;
// Try to parse the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var entryTable = ParseEntryTable(data, endOffset);
if (entryTable == null)
return null;
// Set the entry table
executable.EntryTable = entryTable;
#endregion
#region Nonresident-Name Table
// If the offset for the nonresident-name table doesn't exist
tableAddress = initialOffset
+ (int)executableHeader.NonResidentNamesTableOffset;
endOffset = initialOffset
+ (int)executableHeader.NonResidentNamesTableOffset
+ executableHeader.NonResidentNameTableSize;
if (tableAddress >= data.Length)
return executable;
// Try to parse the nonresident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var nonResidentNameTable = ParseNonResidentNameTable(data, endOffset);
if (nonResidentNameTable == null)
return null;
// Set the nonresident-name table
executable.NonResidentNameTable = nonResidentNameTable;
#endregion
return executable;
}
/// <summary>
/// Parse a Stream into a New Executable header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable header on success, null on error</returns>
private static ExecutableHeader ParseExecutableHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new ExecutableHeader();
byte[] magic = data.ReadBytes(2);
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.LinkerVersion = data.ReadByteValue();
header.LinkerRevision = data.ReadByteValue();
header.EntryTableOffset = data.ReadUInt16();
header.EntryTableSize = data.ReadUInt16();
header.CrcChecksum = data.ReadUInt32();
header.FlagWord = (HeaderFlag)data.ReadUInt16();
header.AutomaticDataSegmentNumber = data.ReadUInt16();
header.InitialHeapAlloc = data.ReadUInt16();
header.InitialStackAlloc = data.ReadUInt16();
header.InitialCSIPSetting = data.ReadUInt32();
header.InitialSSSPSetting = data.ReadUInt32();
header.FileSegmentCount = data.ReadUInt16();
header.ModuleReferenceTableSize = data.ReadUInt16();
header.NonResidentNameTableSize = data.ReadUInt16();
header.SegmentTableOffset = data.ReadUInt16();
header.ResourceTableOffset = data.ReadUInt16();
header.ResidentNameTableOffset = data.ReadUInt16();
header.ModuleReferenceTableOffset = data.ReadUInt16();
header.ImportedNamesTableOffset = data.ReadUInt16();
header.NonResidentNamesTableOffset = data.ReadUInt32();
header.MovableEntriesCount = data.ReadUInt16();
header.SegmentAlignmentShiftCount = data.ReadUInt16();
header.ResourceEntriesCount = data.ReadUInt16();
header.TargetOperatingSystem = (OperatingSystem)data.ReadByteValue();
header.AdditionalFlags = (OS2Flag)data.ReadByteValue();
header.ReturnThunkOffset = data.ReadUInt16();
header.SegmentReferenceThunkOffset = data.ReadUInt16();
header.MinCodeSwapAreaSize = data.ReadUInt16();
header.WindowsSDKRevision = data.ReadByteValue();
header.WindowsSDKVersion = data.ReadByteValue();
return header;
}
/// <summary>
/// Parse a Stream into a segment table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of segment table entries to read</param>
/// <returns>Filled segment table on success, null on error</returns>
private static SegmentTableEntry[] ParseSegmentTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var segmentTable = new SegmentTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new SegmentTableEntry();
entry.Offset = data.ReadUInt16();
entry.Length = data.ReadUInt16();
entry.FlagWord = (SegmentTableEntryFlag)data.ReadUInt16();
entry.MinimumAllocationSize = data.ReadUInt16();
segmentTable[i] = entry;
}
return segmentTable;
}
/// <summary>
/// Parse a Stream into a resource table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of resource table entries to read</param>
/// <returns>Filled resource table on success, null on error</returns>
private static ResourceTable ParseResourceTable(Stream data, int count)
{
long initialOffset = data.Position;
// TODO: Use marshalling here instead of building
var resourceTable = new ResourceTable();
resourceTable.AlignmentShiftCount = data.ReadUInt16();
resourceTable.ResourceTypes = new ResourceTypeInformationEntry[count];
for (int i = 0; i < resourceTable.ResourceTypes.Length; i++)
{
var entry = new ResourceTypeInformationEntry();
entry.TypeID = data.ReadUInt16();
entry.ResourceCount = data.ReadUInt16();
entry.Reserved = data.ReadUInt32();
entry.Resources = new ResourceTypeResourceEntry[entry.ResourceCount];
for (int j = 0; j < entry.ResourceCount; j++)
{
// TODO: Should we read and store the resource data?
var resource = new ResourceTypeResourceEntry();
resource.Offset = data.ReadUInt16();
resource.Length = data.ReadUInt16();
resource.FlagWord = (ResourceTypeResourceFlag)data.ReadUInt16();
resource.ResourceID = data.ReadUInt16();
resource.Reserved = data.ReadUInt32();
entry.Resources[j] = resource;
}
resourceTable.ResourceTypes[i] = entry;
}
// Get the full list of unique string offsets
var stringOffsets = resourceTable.ResourceTypes
.Where(rt => rt.IsIntegerType() == false)
.Select(rt => rt.TypeID)
.Union(resourceTable.ResourceTypes
.SelectMany(rt => rt.Resources)
.Where(r => r.IsIntegerType() == false)
.Select(r => r.ResourceID))
.Distinct()
.OrderBy(o => o)
.ToList();
// Populate the type and name string dictionary
resourceTable.TypeAndNameStrings = new Dictionary<ushort, ResourceTypeAndNameString>();
for (int i = 0; i < stringOffsets.Count; i++)
{
int stringOffset = (int)(stringOffsets[i] + initialOffset);
data.Seek(stringOffset, SeekOrigin.Begin);
var str = new ResourceTypeAndNameString();
str.Length = data.ReadByteValue();
str.Text = data.ReadBytes(str.Length);
resourceTable.TypeAndNameStrings[stringOffsets[i]] = str;
}
return resourceTable;
}
/// <summary>
/// Parse a Stream into a resident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the resident-name table</param>
/// <returns>Filled resident-name table on success, null on error</returns>
private static ResidentNameTableEntry[] ParseResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<ResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new ResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
residentNameTable.Add(entry);
}
return residentNameTable.ToArray();
}
/// <summary>
/// Parse a Stream into a module-reference table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of module-reference table entries to read</param>
/// <returns>Filled module-reference table on success, null on error</returns>
private static ModuleReferenceTableEntry[] ParseModuleReferenceTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var moduleReferenceTable = new ModuleReferenceTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new ModuleReferenceTableEntry();
entry.Offset = data.ReadUInt16();
moduleReferenceTable[i] = entry;
}
return moduleReferenceTable;
}
/// <summary>
/// Parse a Stream into an imported-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the imported-name table</param>
/// <returns>Filled imported-name table on success, null on error</returns>
private static Dictionary<ushort, ImportedNameTableEntry> ParseImportedNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry>();
while (data.Position < endOffset)
{
ushort currentOffset = (ushort)data.Position;
var entry = new ImportedNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
importedNameTable[currentOffset] = entry;
}
return importedNameTable;
}
/// <summary>
/// Parse a Stream into an entry table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the entry table</param>
/// <returns>Filled entry table on success, null on error</returns>
private static EntryTableBundle[] ParseEntryTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var entryTable = new List<EntryTableBundle>();
while (data.Position < endOffset)
{
var entry = new EntryTableBundle();
entry.EntryCount = data.ReadByteValue();
entry.SegmentIndicator = data.ReadByteValue();
switch (entry.GetEntryType())
{
case SegmentEntryType.Unused:
break;
case SegmentEntryType.FixedSegment:
entry.FixedFlagWord = (FixedSegmentEntryFlag)data.ReadByteValue();
entry.FixedOffset = data.ReadUInt16();
break;
case SegmentEntryType.MoveableSegment:
entry.MoveableFlagWord = (MoveableSegmentEntryFlag)data.ReadByteValue();
entry.MoveableReserved = data.ReadUInt16();
entry.MoveableSegmentNumber = data.ReadByteValue();
entry.MoveableOffset = data.ReadUInt16();
break;
}
entryTable.Add(entry);
}
return entryTable.ToArray();
}
/// <summary>
/// Parse a Stream into a nonresident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the nonresident-name table</param>
/// <returns>Filled nonresident-name table on success, null on error</returns>
private static NonResidentNameTableEntry[] ParseNonResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<NonResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new NonResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
residentNameTable.Add(entry);
}
return residentNameTable.ToArray();
}
#endregion
}
}

View File

@@ -0,0 +1,137 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.PAK;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.PAK.Constants;
namespace BurnOutSharp.Builders
{
public static class PAK
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Package
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Package on success, null on error</returns>
public static Models.PAK.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Package
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Package on success, null on error</returns>
public static Models.PAK.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life Package to fill
var file = new Models.PAK.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
#region Directory Items
// Get the directory items offset
uint directoryItemsOffset = header.DirectoryOffset;
if (directoryItemsOffset < 0 || directoryItemsOffset >= data.Length)
return null;
// Seek to the directory items
data.Seek(directoryItemsOffset, SeekOrigin.Begin);
// Create the directory item array
file.DirectoryItems = new DirectoryItem[header.DirectoryLength / 64];
// Try to parse the directory items
for (int i = 0; i < file.DirectoryItems.Length; i++)
{
var directoryItem = ParseDirectoryItem(data);
file.DirectoryItems[i] = directoryItem;
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Package header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Package header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.DirectoryOffset = data.ReadUInt32();
header.DirectoryLength = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life Package directory item
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Package directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryItem directoryItem = new DirectoryItem();
byte[] itemName = data.ReadBytes(56);
directoryItem.ItemName = Encoding.ASCII.GetString(itemName).TrimEnd('\0');
directoryItem.ItemOffset = data.ReadUInt32();
directoryItem.ItemLength = data.ReadUInt32();
return directoryItem;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,184 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.Quantum;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.Quantum.Constants;
namespace BurnOutSharp.Builders
{
public class Quantum
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Quantum archive
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseArchive(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Quantum archive
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the archive header
archive.Header = header;
#endregion
#region File List
// If we have any files
if (header.FileCount > 0)
{
var fileDescriptors = new FileDescriptor[header.FileCount];
// Read all entries in turn
for (int i = 0; i < header.FileCount; i++)
{
var file = ParseFileDescriptor(data, header.MinorVersion);
if (file == null)
return null;
fileDescriptors[i] = file;
}
// Set the file list
archive.FileList = fileDescriptors;
}
#endregion
// Cache the compressed data offset
archive.CompressedDataOffset = data.Position;
return archive;
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(2);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.MajorVersion = data.ReadByteValue();
header.MinorVersion = data.ReadByteValue();
header.FileCount = data.ReadUInt16();
header.TableSize = data.ReadByteValue();
header.CompressionFlags = data.ReadByteValue();
return header;
}
/// <summary>
/// Parse a Stream into a file descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="minorVersion">Minor version of the archive</param>
/// <returns>Filled file descriptor on success, null on error</returns>
private static FileDescriptor ParseFileDescriptor(Stream data, byte minorVersion)
{
// TODO: Use marshalling here instead of building
FileDescriptor fileDescriptor = new FileDescriptor();
fileDescriptor.FileNameSize = ReadVariableLength(data);
if (fileDescriptor.FileNameSize > 0)
{
byte[] fileName = data.ReadBytes(fileDescriptor.FileNameSize);
fileDescriptor.FileName = Encoding.ASCII.GetString(fileName);
}
fileDescriptor.CommentFieldSize = ReadVariableLength(data);
if (fileDescriptor.CommentFieldSize > 0)
{
byte[] commentField = data.ReadBytes(fileDescriptor.CommentFieldSize);
fileDescriptor.CommentField = Encoding.ASCII.GetString(commentField);
}
fileDescriptor.ExpandedFileSize = data.ReadUInt32();
fileDescriptor.FileTime = data.ReadUInt16();
fileDescriptor.FileDate = data.ReadUInt16();
// Hack for unknown format data
if (minorVersion == 22)
fileDescriptor.Unknown = data.ReadUInt16();
return fileDescriptor;
}
/// <summary>
/// Parse a Stream into a variable-length size prefix
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Variable-length size prefix</returns>
/// <remarks>
/// Strings are prefixed with their length. If the length is less than 128
/// then it is stored directly in one byte. If it is greater than 127 then
/// the high bit of the first byte is set to 1 and the remaining fifteen bits
/// contain the actual length in big-endian format.
/// </remarks>
private static int ReadVariableLength(Stream data)
{
byte b0 = data.ReadByteValue();
if (b0 < 0x7F)
return b0;
b0 &= 0x7F;
byte b1 = data.ReadByteValue();
return (b0 << 8) | b1;
}
#endregion
}
}

View File

@@ -0,0 +1,732 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.SGA;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.SGA.Constants;
namespace BurnOutSharp.Builders
{
public static class SGA
{
#region Byte Data
/// <summary>
/// Parse a byte array into an SGA
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled SGA on success, null on error</returns>
public static Models.SGA.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into an SGA
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA on success, null on error</returns>
public static Models.SGA.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new SGA to fill
var file = new Models.SGA.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the SGA header
file.Header = header;
#endregion
#region Directory
// Try to parse the directory
var directory = ParseDirectory(data, header.MajorVersion);
if (directory == null)
return null;
// Set the SGA directory
file.Directory = directory;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into an SGA header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
byte[] signatureBytes = data.ReadBytes(8);
string signature = Encoding.ASCII.GetString(signatureBytes);
if (signature != SignatureString)
return null;
ushort majorVersion = data.ReadUInt16();
ushort minorVersion = data.ReadUInt16();
if (minorVersion != 0)
return null;
switch (majorVersion)
{
// Versions 4 and 5 share the same header
case 4:
case 5:
Header4 header4 = new Header4();
header4.Signature = signature;
header4.MajorVersion = majorVersion;
header4.MinorVersion = minorVersion;
header4.FileMD5 = data.ReadBytes(0x10);
byte[] header4Name = data.ReadBytes(count: 128);
header4.Name = Encoding.Unicode.GetString(header4Name).TrimEnd('\0');
header4.HeaderMD5 = data.ReadBytes(0x10);
header4.HeaderLength = data.ReadUInt32();
header4.FileDataOffset = data.ReadUInt32();
header4.Dummy0 = data.ReadUInt32();
return header4;
// Versions 6 and 7 share the same header
case 6:
case 7:
Header6 header6 = new Header6();
header6.Signature = signature;
header6.MajorVersion = majorVersion;
header6.MinorVersion = minorVersion;
byte[] header6Name = data.ReadBytes(count: 128);
header6.Name = Encoding.Unicode.GetString(header6Name).TrimEnd('\0');
header6.HeaderLength = data.ReadUInt32();
header6.FileDataOffset = data.ReadUInt32();
header6.Dummy0 = data.ReadUInt32();
return header6;
// No other major versions are recognized
default:
return null;
}
}
/// <summary>
/// Parse a Stream into an SGA directory
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA directory on success, null on error</returns>
private static Models.SGA.Directory ParseDirectory(Stream data, ushort majorVersion)
{
#region Directory
// Create the appropriate type of directory
Models.SGA.Directory directory;
switch (majorVersion)
{
case 4: directory = new Directory4(); break;
case 5: directory = new Directory5(); break;
case 6: directory = new Directory6(); break;
case 7: directory = new Directory7(); break;
default: return null;
}
#endregion
// Cache the current offset
long currentOffset = data.Position;
#region Directory Header
// Try to parse the directory header
var directoryHeader = ParseDirectoryHeader(data, majorVersion);
if (directoryHeader == null)
return null;
// Set the directory header
switch (majorVersion)
{
case 4: (directory as Directory4).DirectoryHeader = directoryHeader as DirectoryHeader4; break;
case 5: (directory as Directory5).DirectoryHeader = directoryHeader as DirectoryHeader5; break;
case 6: (directory as Directory6).DirectoryHeader = directoryHeader as DirectoryHeader5; break;
case 7: (directory as Directory7).DirectoryHeader = directoryHeader as DirectoryHeader7; break;
default: return null;
}
#endregion
#region Sections
// Get the sections offset
long sectionOffset;
switch (majorVersion)
{
case 4: sectionOffset = (directoryHeader as DirectoryHeader4).SectionOffset; break;
case 5:
case 6: sectionOffset = (directoryHeader as DirectoryHeader5).SectionOffset; break;
case 7: sectionOffset = (directoryHeader as DirectoryHeader7).SectionOffset; break;
default: return null;
}
// Adjust the sections offset based on the directory
sectionOffset += currentOffset;
// Validate the offset
if (sectionOffset < 0 || sectionOffset >= data.Length)
return null;
// Seek to the sections
data.Seek(sectionOffset, SeekOrigin.Begin);
// Get the section count
uint sectionCount;
switch (majorVersion)
{
case 4: sectionCount = (directoryHeader as DirectoryHeader4).SectionCount; break;
case 5:
case 6: sectionCount = (directoryHeader as DirectoryHeader5).SectionCount; break;
case 7: sectionCount = (directoryHeader as DirectoryHeader7).SectionCount; break;
default: return null;
}
// Create the sections array
object[] sections;
switch (majorVersion)
{
case 4: sections = new Section4[sectionCount]; break;
case 5:
case 6:
case 7: sections = new Section5[sectionCount]; break;
default: return null;
}
// Try to parse the sections
for (int i = 0; i < sections.Length; i++)
{
switch (majorVersion)
{
case 4: sections[i] = ParseSection4(data); break;
case 5:
case 6:
case 7: sections[i] = ParseSection5(data); break;
default: return null;
}
}
// Assign the sections
switch (majorVersion)
{
case 4: (directory as Directory4).Sections = sections as Section4[]; break;
case 5: (directory as Directory5).Sections = sections as Section5[]; break;
case 6: (directory as Directory6).Sections = sections as Section5[]; break;
case 7: (directory as Directory7).Sections = sections as Section5[]; break;
default: return null;
}
#endregion
#region Folders
// Get the folders offset
long folderOffset;
switch (majorVersion)
{
case 4: folderOffset = (directoryHeader as DirectoryHeader4).FolderOffset; break;
case 5: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break;
case 6: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break;
case 7: folderOffset = (directoryHeader as DirectoryHeader7).FolderOffset; break;
default: return null;
}
// Adjust the folders offset based on the directory
folderOffset += currentOffset;
// Validate the offset
if (folderOffset < 0 || folderOffset >= data.Length)
return null;
// Seek to the folders
data.Seek(folderOffset, SeekOrigin.Begin);
// Get the folder count
uint folderCount;
switch (majorVersion)
{
case 4: folderCount = (directoryHeader as DirectoryHeader4).FolderCount; break;
case 5: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break;
case 6: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break;
case 7: folderCount = (directoryHeader as DirectoryHeader7).FolderCount; break;
default: return null;
}
// Create the folders array
object[] folders;
switch (majorVersion)
{
case 4: folders = new Folder4[folderCount]; break;
case 5: folders = new Folder5[folderCount]; break;
case 6: folders = new Folder5[folderCount]; break;
case 7: folders = new Folder5[folderCount]; break;
default: return null;
}
// Try to parse the folders
for (int i = 0; i < folders.Length; i++)
{
switch (majorVersion)
{
case 4: folders[i] = ParseFolder4(data); break;
case 5: folders[i] = ParseFolder5(data); break;
case 6: folders[i] = ParseFolder5(data); break;
case 7: folders[i] = ParseFolder5(data); break;
default: return null;
}
}
// Assign the folders
switch (majorVersion)
{
case 4: (directory as Directory4).Folders = folders as Folder4[]; break;
case 5: (directory as Directory5).Folders = folders as Folder5[]; break;
case 6: (directory as Directory6).Folders = folders as Folder5[]; break;
case 7: (directory as Directory7).Folders = folders as Folder5[]; break;
default: return null;
}
#endregion
#region Files
// Get the files offset
long fileOffset;
switch (majorVersion)
{
case 4: fileOffset = (directoryHeader as DirectoryHeader4).FileOffset; break;
case 5: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break;
case 6: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break;
case 7: fileOffset = (directoryHeader as DirectoryHeader7).FileOffset; break;
default: return null;
}
// Adjust the files offset based on the directory
fileOffset += currentOffset;
// Validate the offset
if (fileOffset < 0 || fileOffset >= data.Length)
return null;
// Seek to the files
data.Seek(fileOffset, SeekOrigin.Begin);
// Get the file count
uint fileCount;
switch (majorVersion)
{
case 4: fileCount = (directoryHeader as DirectoryHeader4).FileCount; break;
case 5: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break;
case 6: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break;
case 7: fileCount = (directoryHeader as DirectoryHeader7).FileCount; break;
default: return null;
}
// Create the files array
object[] files;
switch (majorVersion)
{
case 4: files = new File4[fileCount]; break;
case 5: files = new File4[fileCount]; break;
case 6: files = new File6[fileCount]; break;
case 7: files = new File7[fileCount]; break;
default: return null;
}
// Try to parse the files
for (int i = 0; i < files.Length; i++)
{
switch (majorVersion)
{
case 4: files[i] = ParseFile4(data); break;
case 5: files[i] = ParseFile4(data); break;
case 6: files[i] = ParseFile6(data); break;
case 7: files[i] = ParseFile7(data); break;
default: return null;
}
}
// Assign the files
switch (majorVersion)
{
case 4: (directory as Directory4).Files = files as File4[]; break;
case 5: (directory as Directory5).Files = files as File4[]; break;
case 6: (directory as Directory6).Files = files as File6[]; break;
case 7: (directory as Directory7).Files = files as File7[]; break;
default: return null;
}
#endregion
#region String Table
// Get the string table offset
long stringTableOffset;
switch (majorVersion)
{
case 4: stringTableOffset = (directoryHeader as DirectoryHeader4).StringTableOffset; break;
case 5: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break;
case 6: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break;
case 7: stringTableOffset = (directoryHeader as DirectoryHeader7).StringTableOffset; break;
default: return null;
}
// Adjust the string table offset based on the directory
stringTableOffset += currentOffset;
// Validate the offset
if (stringTableOffset < 0 || stringTableOffset >= data.Length)
return null;
// Seek to the string table
data.Seek(stringTableOffset, SeekOrigin.Begin);
// Get the string table count
uint stringCount;
switch (majorVersion)
{
case 4: stringCount = (directoryHeader as DirectoryHeader4).StringTableCount; break;
case 5: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break;
case 6: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break;
case 7: stringCount = (directoryHeader as DirectoryHeader7).StringTableCount; break;
default: return null;
}
// TODO: Are these strings actually indexed by number and not position?
// TODO: If indexed by position, I think it needs to be adjusted by start of table
// Create the strings dictionary
Dictionary<long, string> strings = new Dictionary<long, string>((int)stringCount);
// Get the current position to adjust the offsets
long stringTableStart = data.Position;
// Try to parse the strings
for (int i = 0; i < stringCount; i++)
{
long currentPosition = data.Position - stringTableStart;
strings[currentPosition] = data.ReadString(Encoding.ASCII);
}
// Assign the files
switch (majorVersion)
{
case 4: (directory as Directory4).StringTable = strings; break;
case 5: (directory as Directory5).StringTable = strings; break;
case 6: (directory as Directory6).StringTable = strings; break;
case 7: (directory as Directory7).StringTable = strings; break;
default: return null;
}
// Loop through all folders to assign names
for (int i = 0; i < folderCount; i++)
{
switch (majorVersion)
{
case 4: (directory as Directory4).Folders[i].Name = strings[(directory as Directory4).Folders[i].NameOffset]; break;
case 5: (directory as Directory5).Folders[i].Name = strings[(directory as Directory5).Folders[i].NameOffset]; break;
case 6: (directory as Directory6).Folders[i].Name = strings[(directory as Directory6).Folders[i].NameOffset]; break;
case 7: (directory as Directory7).Folders[i].Name = strings[(directory as Directory7).Folders[i].NameOffset]; break;
default: return null;
}
}
// Loop through all files to assign names
for (int i = 0; i < fileCount; i++)
{
switch (majorVersion)
{
case 4: (directory as Directory4).Files[i].Name = strings[(directory as Directory4).Files[i].NameOffset]; break;
case 5: (directory as Directory5).Files[i].Name = strings[(directory as Directory5).Files[i].NameOffset]; break;
case 6: (directory as Directory6).Files[i].Name = strings[(directory as Directory6).Files[i].NameOffset]; break;
case 7: (directory as Directory7).Files[i].Name = strings[(directory as Directory7).Files[i].NameOffset]; break;
default: return null;
}
}
#endregion
return directory;
}
/// <summary>
/// Parse a Stream into an SGA directory header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA directory header on success, null on error</returns>
private static object ParseDirectoryHeader(Stream data, ushort majorVersion)
{
switch (majorVersion)
{
case 4: return ParseDirectory4Header(data);
case 5: return ParseDirectory5Header(data);
case 6: return ParseDirectory5Header(data);
case 7: return ParseDirectory7Header(data);
default: return null;
}
}
/// <summary>
/// Parse a Stream into an SGA directory header version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA directory header version 4 on success, null on error</returns>
private static DirectoryHeader4 ParseDirectory4Header(Stream data)
{
DirectoryHeader4 directoryHeader4 = new DirectoryHeader4();
directoryHeader4.SectionOffset = data.ReadUInt32();
directoryHeader4.SectionCount = data.ReadUInt16();
directoryHeader4.FolderOffset = data.ReadUInt32();
directoryHeader4.FolderCount = data.ReadUInt16();
directoryHeader4.FileOffset = data.ReadUInt32();
directoryHeader4.FileCount = data.ReadUInt16();
directoryHeader4.StringTableOffset = data.ReadUInt32();
directoryHeader4.StringTableCount = data.ReadUInt16();
return directoryHeader4;
}
/// <summary>
/// Parse a Stream into an SGA directory header version 5
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA directory header version 5 on success, null on error</returns>
private static DirectoryHeader5 ParseDirectory5Header(Stream data)
{
DirectoryHeader5 directoryHeader5 = new DirectoryHeader5();
directoryHeader5.SectionOffset = data.ReadUInt32();
directoryHeader5.SectionCount = data.ReadUInt32();
directoryHeader5.FolderOffset = data.ReadUInt32();
directoryHeader5.FolderCount = data.ReadUInt32();
directoryHeader5.FileOffset = data.ReadUInt32();
directoryHeader5.FileCount = data.ReadUInt32();
directoryHeader5.StringTableOffset = data.ReadUInt32();
directoryHeader5.StringTableCount = data.ReadUInt32();
return directoryHeader5;
}
/// <summary>
/// Parse a Stream into an SGA directory header version 7
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA directory header version 7 on success, null on error</returns>
private static DirectoryHeader7 ParseDirectory7Header(Stream data)
{
DirectoryHeader7 directoryHeader7 = new DirectoryHeader7();
directoryHeader7.SectionOffset = data.ReadUInt32();
directoryHeader7.SectionCount = data.ReadUInt32();
directoryHeader7.FolderOffset = data.ReadUInt32();
directoryHeader7.FolderCount = data.ReadUInt32();
directoryHeader7.FileOffset = data.ReadUInt32();
directoryHeader7.FileCount = data.ReadUInt32();
directoryHeader7.StringTableOffset = data.ReadUInt32();
directoryHeader7.StringTableCount = data.ReadUInt32();
directoryHeader7.HashTableOffset = data.ReadUInt32();
directoryHeader7.BlockSize = data.ReadUInt32();
return directoryHeader7;
}
/// <summary>
/// Parse a Stream into an SGA section version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA section version 4 on success, null on error</returns>
private static Section4 ParseSection4(Stream data)
{
Section4 section4 = new Section4();
byte[] section4Alias = data.ReadBytes(count: 64);
section4.Alias = Encoding.ASCII.GetString(section4Alias).TrimEnd('\0');
byte[] section4Name = data.ReadBytes(64);
section4.Name = Encoding.ASCII.GetString(section4Name).TrimEnd('\0');
section4.FolderStartIndex = data.ReadUInt16();
section4.FolderEndIndex = data.ReadUInt16();
section4.FileStartIndex = data.ReadUInt16();
section4.FileEndIndex = data.ReadUInt16();
section4.FolderRootIndex = data.ReadUInt16();
return section4;
}
/// <summary>
/// Parse a Stream into an SGA section version 5
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA section version 5 on success, null on error</returns>
private static Section5 ParseSection5(Stream data)
{
Section5 section5 = new Section5();
byte[] section5Alias = data.ReadBytes(count: 64);
section5.Alias = Encoding.ASCII.GetString(section5Alias).TrimEnd('\0');
byte[] section5Name = data.ReadBytes(64);
section5.Name = Encoding.ASCII.GetString(section5Name).TrimEnd('\0');
section5.FolderStartIndex = data.ReadUInt32();
section5.FolderEndIndex = data.ReadUInt32();
section5.FileStartIndex = data.ReadUInt32();
section5.FileEndIndex = data.ReadUInt32();
section5.FolderRootIndex = data.ReadUInt32();
return section5;
}
/// <summary>
/// Parse a Stream into an SGA folder version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA folder version 4 on success, null on error</returns>
private static Folder4 ParseFolder4(Stream data)
{
Folder4 folder4 = new Folder4();
folder4.NameOffset = data.ReadUInt32();
folder4.Name = null; // Read from string table
folder4.FolderStartIndex = data.ReadUInt16();
folder4.FolderEndIndex = data.ReadUInt16();
folder4.FileStartIndex = data.ReadUInt16();
folder4.FileEndIndex = data.ReadUInt16();
return folder4;
}
/// <summary>
/// Parse a Stream into an SGA folder version 5
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA folder version 5 on success, null on error</returns>
private static Folder5 ParseFolder5(Stream data)
{
Folder5 folder5 = new Folder5();
folder5.NameOffset = data.ReadUInt32();
folder5.Name = null; // Read from string table
folder5.FolderStartIndex = data.ReadUInt32();
folder5.FolderEndIndex = data.ReadUInt32();
folder5.FileStartIndex = data.ReadUInt32();
folder5.FileEndIndex = data.ReadUInt32();
return folder5;
}
/// <summary>
/// Parse a Stream into an SGA file version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA file version 4 on success, null on error</returns>
private static File4 ParseFile4(Stream data)
{
File4 file4 = new File4();
file4.NameOffset = data.ReadUInt32();
file4.Name = null; // Read from string table
file4.Offset = data.ReadUInt32();
file4.SizeOnDisk = data.ReadUInt32();
file4.Size = data.ReadUInt32();
file4.TimeModified = data.ReadUInt32();
file4.Dummy0 = data.ReadByteValue();
file4.Type = data.ReadByteValue();
return file4;
}
/// <summary>
/// Parse a Stream into an SGA file version 6
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA file version 6 on success, null on error</returns>
private static File6 ParseFile6(Stream data)
{
File6 file6 = new File6();
file6.NameOffset = data.ReadUInt32();
file6.Name = null; // Read from string table
file6.Offset = data.ReadUInt32();
file6.SizeOnDisk = data.ReadUInt32();
file6.Size = data.ReadUInt32();
file6.TimeModified = data.ReadUInt32();
file6.Dummy0 = data.ReadByteValue();
file6.Type = data.ReadByteValue();
file6.CRC32 = data.ReadUInt32();
return file6;
}
/// <summary>
/// Parse a Stream into an SGA file version 7
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA file version 7 on success, null on error</returns>
private static File7 ParseFile7(Stream data)
{
File7 file7 = new File7();
file7.NameOffset = data.ReadUInt32();
file7.Name = null; // Read from string table
file7.Offset = data.ReadUInt32();
file7.SizeOnDisk = data.ReadUInt32();
file7.Size = data.ReadUInt32();
file7.TimeModified = data.ReadUInt32();
file7.Dummy0 = data.ReadByteValue();
file7.Type = data.ReadByteValue();
file7.CRC32 = data.ReadUInt32();
file7.HashOffset = data.ReadUInt32();
return file7;
}
#endregion
}
}

View File

@@ -0,0 +1,141 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.VBSP;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.VBSP.Constants;
namespace BurnOutSharp.Builders
{
public static class VBSP
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life 2 Level
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life 2 Level on success, null on error</returns>
public static Models.VBSP.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life 2 Level
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life 2 Level on success, null on error</returns>
public static Models.VBSP.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life 2 Level to fill
var file = new Models.VBSP.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life 2 Level header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life 2 Level header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.Version = data.ReadInt32();
if ((header.Version < 19 || header.Version > 22) && header.Version != 0x00040014)
return null;
header.Lumps = new Lump[HL_VBSP_LUMP_COUNT];
for (int i = 0; i < HL_VBSP_LUMP_COUNT; i++)
{
header.Lumps[i] = ParseLump(data, header.Version);
}
header.MapRevision = data.ReadInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life 2 Level lump
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="version">VBSP version</param>
/// <returns>Filled Half-Life 2 Level lump on success, null on error</returns>
private static Lump ParseLump(Stream data, int version)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.Length = data.ReadUInt32();
lump.Version = data.ReadUInt32();
lump.FourCC = new char[4];
for (int i = 0; i < 4; i++)
{
lump.FourCC[i] = (char)data.ReadByte();
}
// This block was commented out because test VBSPs with header
// version 21 had the values in the "right" order already and
// were causing decompression issues
//if (version >= 21 && version != 0x00040014)
//{
// uint temp = lump.Version;
// lump.Version = lump.Offset;
// lump.Offset = lump.Length;
// lump.Length = temp;
//}
return lump;
}
#endregion
}
}

View File

@@ -0,0 +1,318 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.VPK;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.VPK.Constants;
namespace BurnOutSharp.Builders
{
public static class VPK
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Valve Package
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Valve Package on success, null on error</returns>
public static Models.VPK.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Valve Package
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package on success, null on error</returns>
public static Models.VPK.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Valve Package to fill
var file = new Models.VPK.File();
#region Header
// Try to parse the header
// The original version had no signature.
var header = ParseHeader(data);
// Set the package header
file.Header = header;
#endregion
#region Extended Header
if (header?.Version == 2)
{
// Try to parse the extended header
var extendedHeader = ParseExtendedHeader(data);
if (extendedHeader == null)
return null;
// Set the package extended header
file.ExtendedHeader = extendedHeader;
}
#endregion
#region Directory Items
// Create the directory items tree
var directoryItems = ParseDirectoryItemTree(data);
// Set the directory items
file.DirectoryItems = directoryItems;
#endregion
#region Archive Hashes
if (header?.Version == 2 && file.ExtendedHeader != null && file.ExtendedHeader.ArchiveHashLength > 0)
{
// Create the archive hashes list
var archiveHashes = new List<ArchiveHash>();
// Cache the current offset
initialOffset = data.Position;
// Try to parse the directory items
while (data.Position < initialOffset + file.ExtendedHeader.ArchiveHashLength)
{
var archiveHash = ParseArchiveHash(data);
archiveHashes.Add(archiveHash);
}
file.ArchiveHashes = archiveHashes.ToArray();
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Valve Package header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.Signature = data.ReadUInt32();
if (header.Signature != SignatureUInt32)
return null;
header.Version = data.ReadUInt32();
if (header.Version > 2)
return null;
header.DirectoryLength = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Valve Package extended header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package extended header on success, null on error</returns>
private static ExtendedHeader ParseExtendedHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ExtendedHeader extendedHeader = new ExtendedHeader();
extendedHeader.Dummy0 = data.ReadUInt32();
extendedHeader.ArchiveHashLength = data.ReadUInt32();
extendedHeader.ExtraLength = data.ReadUInt32();
extendedHeader.Dummy1 = data.ReadUInt32();
return extendedHeader;
}
/// <summary>
/// Parse a Stream into a Valve Package archive hash
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package archive hash on success, null on error</returns>
private static ArchiveHash ParseArchiveHash(Stream data)
{
// TODO: Use marshalling here instead of building
ArchiveHash archiveHash = new ArchiveHash();
archiveHash.ArchiveIndex = data.ReadUInt32();
archiveHash.ArchiveOffset = data.ReadUInt32();
archiveHash.Length = data.ReadUInt32();
archiveHash.Hash = data.ReadBytes(0x10);
return archiveHash;
}
/// <summary>
/// Parse a Stream into a Valve Package directory item tree
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item tree on success, null on error</returns>
private static DirectoryItem[] ParseDirectoryItemTree(Stream data)
{
// Create the directory items list
var directoryItems = new List<DirectoryItem>();
while (true)
{
// Get the extension
string extensionString = data.ReadString(Encoding.ASCII);
if (string.IsNullOrEmpty(extensionString))
break;
// Sanitize the extension
for (int i = 0; i < 0x20; i++)
{
extensionString = extensionString.Replace($"{(char)i}", string.Empty);
}
while (true)
{
// Get the path
string pathString = data.ReadString(Encoding.ASCII);
if (string.IsNullOrEmpty(pathString))
break;
// Sanitize the path
for (int i = 0; i < 0x20; i++)
{
pathString = pathString.Replace($"{(char)i}", string.Empty);
}
while (true)
{
// Get the name
string nameString = data.ReadString(Encoding.ASCII);
if (string.IsNullOrEmpty(nameString))
break;
// Sanitize the name
for (int i = 0; i < 0x20; i++)
{
nameString = nameString.Replace($"{(char)i}", string.Empty);
}
// Get the directory item
var directoryItem = ParseDirectoryItem(data, extensionString, pathString, nameString);
// Add the directory item
directoryItems.Add(directoryItem);
}
}
}
return directoryItems.ToArray();
}
/// <summary>
/// Parse a Stream into a Valve Package directory item
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data, string extension, string path, string name)
{
DirectoryItem directoryItem = new DirectoryItem();
directoryItem.Extension = extension;
directoryItem.Path = path;
directoryItem.Name = name;
// Get the directory entry
var directoryEntry = ParseDirectoryEntry(data);
// Set the directory entry
directoryItem.DirectoryEntry = directoryEntry;
// Get the preload data pointer
long preloadDataPointer = -1; int preloadDataLength = -1;
if (directoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE && directoryEntry.EntryLength > 0)
{
preloadDataPointer = directoryEntry.EntryOffset;
preloadDataLength = (int)directoryEntry.EntryLength;
}
else if (directoryEntry.PreloadBytes > 0)
{
preloadDataPointer = data.Position;
preloadDataLength = directoryEntry.PreloadBytes;
}
// If we had a valid preload data pointer
byte[] preloadData = null;
if (preloadDataPointer >= 0 && preloadDataLength > 0)
{
// Cache the current offset
long initialOffset = data.Position;
// Seek to the preload data offset
data.Seek(preloadDataPointer, SeekOrigin.Begin);
// Read the preload data
preloadData = data.ReadBytes(preloadDataLength);
// Seek back to the original offset
data.Seek(initialOffset, SeekOrigin.Begin);
}
// Set the preload data
directoryItem.PreloadData = preloadData;
return directoryItem;
}
/// <summary>
/// Parse a Stream into a Valve Package directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.CRC = data.ReadUInt32();
directoryEntry.PreloadBytes = data.ReadUInt16();
directoryEntry.ArchiveIndex = data.ReadUInt16();
directoryEntry.EntryOffset = data.ReadUInt32();
directoryEntry.EntryLength = data.ReadUInt32();
directoryEntry.Dummy0 = data.ReadUInt16();
return directoryEntry;
}
#endregion
}
}

View File

@@ -0,0 +1,266 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.WAD;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.WAD.Constants;
namespace BurnOutSharp.Builders
{
public static class WAD
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Texture Package
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Texture Package on success, null on error</returns>
public static Models.WAD.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Texture Package
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package on success, null on error</returns>
public static Models.WAD.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life Texture Package to fill
var file = new Models.WAD.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
#region Lumps
// Get the lump offset
uint lumpOffset = header.LumpOffset;
if (lumpOffset < 0 || lumpOffset >= data.Length)
return null;
// Seek to the lump offset
data.Seek(lumpOffset, SeekOrigin.Begin);
// Create the lump array
file.Lumps = new Lump[header.LumpCount];
for (int i = 0; i < header.LumpCount; i++)
{
var lump = ParseLump(data);
file.Lumps[i] = lump;
}
#endregion
#region Lump Infos
// Create the lump info array
file.LumpInfos = new LumpInfo[header.LumpCount];
for (int i = 0; i < header.LumpCount; i++)
{
var lump = file.Lumps[i];
if (lump.Compression != 0)
{
file.LumpInfos[i] = null;
continue;
}
// Get the lump info offset
uint lumpInfoOffset = lump.Offset;
if (lumpInfoOffset < 0 || lumpInfoOffset >= data.Length)
{
file.LumpInfos[i] = null;
continue;
}
// Seek to the lump info offset
data.Seek(lumpInfoOffset, SeekOrigin.Begin);
// Try to parse the lump info -- TODO: Do we ever set the mipmap level?
var lumpInfo = ParseLumpInfo(data, lump.Type);
file.LumpInfos[i] = lumpInfo;
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.LumpCount = data.ReadUInt32();
header.LumpOffset = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package lump
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package lump on success, null on error</returns>
private static Lump ParseLump(Stream data)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.DiskLength = data.ReadUInt32();
lump.Length = data.ReadUInt32();
lump.Type = data.ReadByteValue();
lump.Compression = data.ReadByteValue();
lump.Padding0 = data.ReadByteValue();
lump.Padding1 = data.ReadByteValue();
byte[] name = data.ReadBytes(16);
lump.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
return lump;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package lump info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="type">Lump type</param>
/// <param name="mipmap">Mipmap level</param>
/// <returns>Filled Half-Life Texture Package lump info on success, null on error</returns>
private static LumpInfo ParseLumpInfo(Stream data, byte type, uint mipmap = 0)
{
// TODO: Use marshalling here instead of building
LumpInfo lumpInfo = new LumpInfo();
// Cache the initial offset
long initialOffset = data.Position;
// Type 0x42 has no name, type 0x43 does. Are these flags?
if (type == 0x42)
{
if (mipmap > 0)
return null;
lumpInfo.Width = data.ReadUInt32();
lumpInfo.Height = data.ReadUInt32();
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
lumpInfo.PaletteSize = data.ReadUInt16();
}
else if (type == 0x43)
{
if (mipmap > 3)
return null;
byte[] name = data.ReadBytes(16);
lumpInfo.Name = Encoding.ASCII.GetString(name);
lumpInfo.Width = data.ReadUInt32();
lumpInfo.Height = data.ReadUInt32();
lumpInfo.PixelOffset = data.ReadUInt32();
_ = data.ReadBytes(12); // Unknown data
// Cache the current offset
long currentOffset = data.Position;
// Seek to the pixel data
data.Seek(initialOffset + lumpInfo.PixelOffset, SeekOrigin.Begin);
// Read the pixel data
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
// Seek back to the offset
data.Seek(currentOffset, SeekOrigin.Begin);
uint pixelSize = lumpInfo.Width * lumpInfo.Height;
// Mipmap data -- TODO: How do we determine this during initial parsing?
switch (mipmap)
{
case 1: _ = data.ReadBytes((int)pixelSize); break;
case 2: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4))); break;
case 3: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16))); break;
default: return null;
}
_ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16) + (pixelSize / 64))); // Pixel data
lumpInfo.PaletteSize = data.ReadUInt16();
lumpInfo.PaletteData = data.ReadBytes((int)lumpInfo.PaletteSize * 3);
}
else
{
return null;
}
// Adjust based on mipmap level
switch (mipmap)
{
case 1:
lumpInfo.Width /= 2;
lumpInfo.Height /= 2;
break;
case 2:
lumpInfo.Width /= 4;
lumpInfo.Height /= 4;
break;
case 3:
lumpInfo.Width /= 8;
lumpInfo.Height /= 8;
break;
default:
return null;
}
return lumpInfo;
}
#endregion
}
}

View File

@@ -0,0 +1,274 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.XZP;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.XZP.Constants;
namespace BurnOutSharp.Builders
{
public static class XZP
{
#region Byte Data
/// <summary>
/// Parse a byte array into a XBox Package File
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled XBox Package File on success, null on error</returns>
public static Models.XZP.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a XBox Package File
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File on success, null on error</returns>
public static Models.XZP.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new XBox Package File to fill
var file = new Models.XZP.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
#region Directory Entries
// Create the directory entry array
file.DirectoryEntries = new DirectoryEntry[header.DirectoryEntryCount];
// Try to parse the directory entries
for (int i = 0; i < header.DirectoryEntryCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.DirectoryEntries[i] = directoryEntry;
}
#endregion
#region Preload Directory Entries
if (header.PreloadBytes > 0)
{
// Create the preload directory entry array
file.PreloadDirectoryEntries = new DirectoryEntry[header.PreloadDirectoryEntryCount];
// Try to parse the preload directory entries
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.PreloadDirectoryEntries[i] = directoryEntry;
}
}
#endregion
#region Preload Directory Mappings
if (header.PreloadBytes > 0)
{
// Create the preload directory mapping array
file.PreloadDirectoryMappings = new DirectoryMapping[header.PreloadDirectoryEntryCount];
// Try to parse the preload directory mappings
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
{
var directoryMapping = ParseDirectoryMapping(data);
file.PreloadDirectoryMappings[i] = directoryMapping;
}
}
#endregion
#region Directory Items
if (header.DirectoryItemCount > 0)
{
// Get the directory item offset
uint directoryItemOffset = header.DirectoryItemOffset;
if (directoryItemOffset < 0 || directoryItemOffset >= data.Length)
return null;
// Seek to the directory items
data.Seek(directoryItemOffset, SeekOrigin.Begin);
// Create the directory item array
file.DirectoryItems = new DirectoryItem[header.DirectoryItemCount];
// Try to parse the directory items
for (int i = 0; i < header.DirectoryItemCount; i++)
{
var directoryItem = ParseDirectoryItem(data);
file.DirectoryItems[i] = directoryItem;
}
}
#endregion
#region Footer
// Seek to the footer
data.Seek(-8, SeekOrigin.End);
// Try to parse the footer
var footer = ParseFooter(data);
if (footer == null)
return null;
// Set the package footer
file.Footer = footer;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a XBox Package File header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != HeaderSignatureString)
return null;
header.Version = data.ReadUInt32();
if (header.Version != 6)
return null;
header.PreloadDirectoryEntryCount = data.ReadUInt32();
header.DirectoryEntryCount = data.ReadUInt32();
header.PreloadBytes = data.ReadUInt32();
header.HeaderLength = data.ReadUInt32();
header.DirectoryItemCount = data.ReadUInt32();
header.DirectoryItemOffset = data.ReadUInt32();
header.DirectoryItemLength = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a XBox Package File directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.FileNameCRC = data.ReadUInt32();
directoryEntry.EntryLength = data.ReadUInt32();
directoryEntry.EntryOffset = data.ReadUInt32();
return directoryEntry;
}
/// <summary>
/// Parse a Stream into a XBox Package File directory mapping
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory mapping on success, null on error</returns>
private static DirectoryMapping ParseDirectoryMapping(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapping directoryMapping = new DirectoryMapping();
directoryMapping.PreloadDirectoryEntryIndex = data.ReadUInt16();
return directoryMapping;
}
/// <summary>
/// Parse a Stream into a XBox Package File directory item
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryItem directoryItem = new DirectoryItem();
directoryItem.FileNameCRC = data.ReadUInt32();
directoryItem.NameOffset = data.ReadUInt32();
directoryItem.TimeCreated = data.ReadUInt32();
// Cache the current offset
long currentPosition = data.Position;
// Seek to the name offset
data.Seek(directoryItem.NameOffset, SeekOrigin.Begin);
// Read the name
directoryItem.Name = data.ReadString(Encoding.ASCII);
// Seek back to the right position
data.Seek(currentPosition, SeekOrigin.Begin);
return directoryItem;
}
/// <summary>
/// Parse a Stream into a XBox Package File footer
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File footer on success, null on error</returns>
private static Footer ParseFooter(Stream data)
{
// TODO: Use marshalling here instead of building
Footer footer = new Footer();
footer.FileLength = data.ReadUInt32();
byte[] signature = data.ReadBytes(4);
footer.Signature = Encoding.ASCII.GetString(signature);
if (footer.Signature != FooterSignatureString)
return null;
return footer;
}
#endregion
}
}

View File

@@ -0,0 +1,12 @@
namespace BurnOutSharp.Compression.ADPCM
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public unsafe struct ADPCM_DATA
{
public uint[] pValues;
public int BitCount;
public int field_8;
public int field_C;
public int field_10;
}
}

View File

@@ -0,0 +1,131 @@
using static BurnOutSharp.Compression.ADPCM.Constants;
using static BurnOutSharp.Compression.ADPCM.Helper;
namespace BurnOutSharp.Compression.ADPCM
{
public unsafe class Compressor
{
/// <summary>
/// Compression routine
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public int CompressADPCM(void* pvOutBuffer, int cbOutBuffer, void* pvInBuffer, int cbInBuffer, int ChannelCount, int CompressionLevel)
{
TADPCMStream os = new TADPCMStream(pvOutBuffer, cbOutBuffer); // The output stream
TADPCMStream @is = new TADPCMStream(pvInBuffer, cbInBuffer); // The input stream
byte BitShift = (byte)(CompressionLevel - 1);
short[] PredictedSamples = new short[MAX_ADPCM_CHANNEL_COUNT];// Predicted samples for each channel
short[] StepIndexes = new short[MAX_ADPCM_CHANNEL_COUNT]; // Step indexes for each channel
short InputSample = 0; // Input sample for the current channel
int TotalStepSize;
int ChannelIndex;
int AbsDifference;
int Difference;
int MaxBitMask;
int StepSize;
// First byte in the output stream contains zero. The second one contains the compression level
os.WriteByteSample(0);
if (!os.WriteByteSample(BitShift))
return 2;
// Set the initial step index for each channel
PredictedSamples[0] = PredictedSamples[1] = 0;
StepIndexes[0] = StepIndexes[1] = INITIAL_ADPCM_STEP_INDEX;
// Next, InitialSample value for each channel follows
for (int i = 0; i < ChannelCount; i++)
{
// Get the initial sample from the input stream
if (!@is.ReadWordSample(ref InputSample))
return os.LengthProcessed(pvOutBuffer);
// Store the initial sample to our sample array
PredictedSamples[i] = InputSample;
// Also store the loaded sample to the output stream
if (!os.WriteWordSample(InputSample))
return os.LengthProcessed(pvOutBuffer);
}
// Get the initial index
ChannelIndex = ChannelCount - 1;
// Now keep reading the input data as long as there is something in the input buffer
while (@is.ReadWordSample(ref InputSample))
{
int EncodedSample = 0;
// If we have two channels, we need to flip the channel index
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
// Get the difference from the previous sample.
// If the difference is negative, set the sign bit to the encoded sample
AbsDifference = InputSample - PredictedSamples[ChannelIndex];
if (AbsDifference < 0)
{
AbsDifference = -AbsDifference;
EncodedSample |= 0x40;
}
// If the difference is too low (higher that difference treshold),
// write a step index modifier marker
StepSize = StepSizeTable[StepIndexes[ChannelIndex]];
if (AbsDifference < (StepSize >> CompressionLevel))
{
if (StepIndexes[ChannelIndex] != 0)
StepIndexes[ChannelIndex]--;
os.WriteByteSample(0x80);
}
else
{
// If the difference is too high, write marker that
// indicates increase in step size
while (AbsDifference > (StepSize << 1))
{
if (StepIndexes[ChannelIndex] >= 0x58)
break;
// Modify the step index
StepIndexes[ChannelIndex] += 8;
if (StepIndexes[ChannelIndex] > 0x58)
StepIndexes[ChannelIndex] = 0x58;
// Write the "modify step index" marker
StepSize = StepSizeTable[StepIndexes[ChannelIndex]];
os.WriteByteSample(0x81);
}
// Get the limit bit value
MaxBitMask = (1 << (BitShift - 1));
MaxBitMask = (MaxBitMask > 0x20) ? 0x20 : MaxBitMask;
Difference = StepSize >> BitShift;
TotalStepSize = 0;
for (int BitVal = 0x01; BitVal <= MaxBitMask; BitVal <<= 1)
{
if ((TotalStepSize + StepSize) <= AbsDifference)
{
TotalStepSize += StepSize;
EncodedSample |= BitVal;
}
StepSize >>= 1;
}
PredictedSamples[ChannelIndex] = (short)UpdatePredictedSample(PredictedSamples[ChannelIndex],
EncodedSample,
Difference + TotalStepSize);
// Write the encoded sample to the output stream
if (!os.WriteByteSample((byte)EncodedSample))
break;
// Calculates the step index to use for the next encode
StepIndexes[ChannelIndex] = GetNextStepIndex(StepIndexes[ChannelIndex], (uint)EncodedSample);
}
}
return os.LengthProcessed(pvOutBuffer);
}
}
}

View File

@@ -0,0 +1,51 @@
namespace BurnOutSharp.Compression.ADPCM
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.h"/>
public static class Constants
{
public const int MAX_ADPCM_CHANNEL_COUNT = 2;
public const byte INITIAL_ADPCM_STEP_INDEX = 0x2C;
#region Tables necessary for decompression
public static readonly int[] NextStepTable =
{
-1, 0, -1, 4, -1, 2, -1, 6,
-1, 1, -1, 5, -1, 3, -1, 7,
-1, 1, -1, 5, -1, 3, -1, 7,
-1, 2, -1, 4, -1, 6, -1, 8
};
public static readonly int[] StepSizeTable =
{
7, 8, 9, 10, 11, 12, 13, 14,
16, 17, 19, 21, 23, 25, 28, 31,
34, 37, 41, 45, 50, 55, 60, 66,
73, 80, 88, 97, 107, 118, 130, 143,
157, 173, 190, 209, 230, 253, 279, 307,
337, 371, 408, 449, 494, 544, 598, 658,
724, 796, 876, 963, 1060, 1166, 1282, 1411,
1552, 1707, 1878, 2066, 2272, 2499, 2749, 3024,
3327, 3660, 4026, 4428, 4871, 5358, 5894, 6484,
7132, 7845, 8630, 9493, 10442, 11487, 12635, 13899,
15289, 16818, 18500, 20350, 22385, 24623, 27086, 29794,
32767
};
#endregion
#region ADPCM decompression present in Starcraft I BETA
public static readonly uint[] adpcm_values_2 = { 0x33, 0x66 };
public static readonly uint[] adpcm_values_3 = { 0x3A, 0x3A, 0x50, 0x70 };
public static readonly uint[] adpcm_values_4 = { 0x3A, 0x3A, 0x3A, 0x3A, 0x4D, 0x66, 0x80, 0x9A };
public static readonly uint[] adpcm_values_6 =
{
0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A,
0x46, 0x53, 0x60, 0x6D, 0x7A, 0x86, 0x93, 0xA0, 0xAD, 0xBA, 0xC6, 0xD3, 0xE0, 0xED, 0xFA, 0x106
};
#endregion
}
}

View File

@@ -0,0 +1,205 @@
using static BurnOutSharp.Compression.ADPCM.Constants;
using static BurnOutSharp.Compression.ADPCM.Helper;
namespace BurnOutSharp.Compression.ADPCM
{
public unsafe class Decompressor
{
/// <summary>
/// Decompression routine
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public int DecompressADPCM(void* pvOutBuffer, int cbOutBuffer, void* pvInBuffer, int cbInBuffer, int ChannelCount)
{
TADPCMStream os = new TADPCMStream(pvOutBuffer, cbOutBuffer); // Output stream
TADPCMStream @is = new TADPCMStream(pvInBuffer, cbInBuffer); // Input stream
byte EncodedSample = 0;
byte BitShift = 0;
short[] PredictedSamples = new short[MAX_ADPCM_CHANNEL_COUNT]; // Predicted sample for each channel
short[] StepIndexes = new short[MAX_ADPCM_CHANNEL_COUNT]; // Predicted step index for each channel
int ChannelIndex; // Current channel index
// Initialize the StepIndex for each channel
PredictedSamples[0] = PredictedSamples[1] = 0;
StepIndexes[0] = StepIndexes[1] = INITIAL_ADPCM_STEP_INDEX;
// The first byte is always zero, the second one contains bit shift (compression level - 1)
@is.ReadByteSample(ref BitShift);
@is.ReadByteSample(ref BitShift);
// Next, InitialSample value for each channel follows
for (int i = 0; i < ChannelCount; i++)
{
// Get the initial sample from the input stream
short InitialSample = 0;
// Attempt to read the initial sample
if (!@is.ReadWordSample(ref InitialSample))
return os.LengthProcessed(pvOutBuffer);
// Store the initial sample to our sample array
PredictedSamples[i] = InitialSample;
// Also store the loaded sample to the output stream
if (!os.WriteWordSample(InitialSample))
return os.LengthProcessed(pvOutBuffer);
}
// Get the initial index
ChannelIndex = ChannelCount - 1;
// Keep reading as long as there is something in the input buffer
while (@is.ReadByteSample(ref EncodedSample))
{
// If we have two channels, we need to flip the channel index
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
if (EncodedSample == 0x80)
{
if (StepIndexes[ChannelIndex] != 0)
StepIndexes[ChannelIndex]--;
if (!os.WriteWordSample(PredictedSamples[ChannelIndex]))
return os.LengthProcessed(pvOutBuffer);
}
else if (EncodedSample == 0x81)
{
// Modify the step index
StepIndexes[ChannelIndex] += 8;
if (StepIndexes[ChannelIndex] > 0x58)
StepIndexes[ChannelIndex] = 0x58;
// Next pass, keep going on the same channel
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
}
else
{
int StepIndex = StepIndexes[ChannelIndex];
int StepSize = StepSizeTable[StepIndex];
// Encode one sample
PredictedSamples[ChannelIndex] = (short)DecodeSample(PredictedSamples[ChannelIndex],
EncodedSample,
StepSize,
StepSize >> BitShift);
// Write the decoded sample to the output stream
if (!os.WriteWordSample(PredictedSamples[ChannelIndex]))
break;
// Calculates the step index to use for the next encode
StepIndexes[ChannelIndex] = GetNextStepIndex(StepIndex, EncodedSample);
}
}
// Return total bytes written since beginning of the output buffer
return os.LengthProcessed(pvOutBuffer);
}
/// <summary>
/// ADPCM decompression present in Starcraft I BETA
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public int DecompressADPCM_SC1B(void* pvOutBuffer, int cbOutBuffer, void* pvInBuffer, int cbInBuffer, int ChannelCount)
{
TADPCMStream os = new TADPCMStream(pvOutBuffer, cbOutBuffer); // Output stream
TADPCMStream @is = new TADPCMStream(pvInBuffer, cbInBuffer); // Input stream
ADPCM_DATA AdpcmData = new ADPCM_DATA();
int[] LowBitValues = new int[MAX_ADPCM_CHANNEL_COUNT];
int[] UpperBits = new int[MAX_ADPCM_CHANNEL_COUNT];
int[] BitMasks = new int[MAX_ADPCM_CHANNEL_COUNT];
int[] PredictedSamples = new int[MAX_ADPCM_CHANNEL_COUNT];
int ChannelIndex;
int ChannelIndexMax;
int OutputSample;
byte BitCount = 0;
byte EncodedSample = 0;
short InputValue16 = 0;
int reg_eax;
int Difference;
// The first byte contains number of bits
if (!@is.ReadByteSample(ref BitCount))
return os.LengthProcessed(pvOutBuffer);
if (InitAdpcmData(AdpcmData, BitCount) == null)
return os.LengthProcessed(pvOutBuffer);
//assert(AdpcmData.pValues != NULL);
// Init bit values
for (int i = 0; i < ChannelCount; i++)
{
byte OneByte = 0;
if (!@is.ReadByteSample(ref OneByte))
return os.LengthProcessed(pvOutBuffer);
LowBitValues[i] = OneByte & 0x01;
UpperBits[i] = OneByte >> 1;
}
//
for (int i = 0; i < ChannelCount; i++)
{
if (!@is.ReadWordSample(ref InputValue16))
return os.LengthProcessed(pvOutBuffer);
BitMasks[i] = InputValue16 << AdpcmData.BitCount;
}
// Next, InitialSample value for each channel follows
for (int i = 0; i < ChannelCount; i++)
{
if (!@is.ReadWordSample(ref InputValue16))
return os.LengthProcessed(pvOutBuffer);
PredictedSamples[i] = InputValue16;
os.WriteWordSample(InputValue16);
}
// Get the initial index
ChannelIndexMax = ChannelCount - 1;
ChannelIndex = 0;
// Keep reading as long as there is something in the input buffer
while (@is.ReadByteSample(ref EncodedSample))
{
reg_eax = ((PredictedSamples[ChannelIndex] * 3) << 3) - PredictedSamples[ChannelIndex];
PredictedSamples[ChannelIndex] = ((reg_eax * 10) + 0x80) >> 8;
Difference = (((EncodedSample >> 1) + 1) * BitMasks[ChannelIndex] + AdpcmData.field_10) >> AdpcmData.BitCount;
PredictedSamples[ChannelIndex] = UpdatePredictedSample(PredictedSamples[ChannelIndex], EncodedSample, Difference, 0x01);
BitMasks[ChannelIndex] = (int)((AdpcmData.pValues[EncodedSample >> 1] * BitMasks[ChannelIndex] + 0x80) >> 6);
if (BitMasks[ChannelIndex] < AdpcmData.field_8)
BitMasks[ChannelIndex] = AdpcmData.field_8;
if (BitMasks[ChannelIndex] > AdpcmData.field_C)
BitMasks[ChannelIndex] = AdpcmData.field_C;
reg_eax = (cbInBuffer - @is.LengthProcessed(pvInBuffer)) >> ChannelIndexMax;
OutputSample = PredictedSamples[ChannelIndex];
if (reg_eax < UpperBits[ChannelIndex])
{
if (LowBitValues[ChannelIndex] != 0)
{
OutputSample += (UpperBits[ChannelIndex] - reg_eax);
if (OutputSample > 32767)
OutputSample = 32767;
}
else
{
OutputSample += (reg_eax - UpperBits[ChannelIndex]);
if (OutputSample < -32768)
OutputSample = -32768;
}
}
// Write the word sample and swap channel
os.WriteWordSample((short)(OutputSample));
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
}
return os.LengthProcessed(pvOutBuffer);
}
}
}

View File

@@ -0,0 +1,104 @@
using static BurnOutSharp.Compression.ADPCM.Constants;
namespace BurnOutSharp.Compression.ADPCM
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
internal static unsafe class Helper
{
#region Local functions
public static short GetNextStepIndex(int StepIndex, uint EncodedSample)
{
// Get the next step index
StepIndex = StepIndex + NextStepTable[EncodedSample & 0x1F];
// Don't make the step index overflow
if (StepIndex < 0)
StepIndex = 0;
else if (StepIndex > 88)
StepIndex = 88;
return (short)StepIndex;
}
public static int UpdatePredictedSample(int PredictedSample, int EncodedSample, int Difference, int BitMask = 0x40)
{
// Is the sign bit set?
if ((EncodedSample & BitMask) != 0)
{
PredictedSample -= Difference;
if (PredictedSample <= -32768)
PredictedSample = -32768;
}
else
{
PredictedSample += Difference;
if (PredictedSample >= 32767)
PredictedSample = 32767;
}
return PredictedSample;
}
public static int DecodeSample(int PredictedSample, int EncodedSample, int StepSize, int Difference)
{
if ((EncodedSample & 0x01) != 0)
Difference += (StepSize >> 0);
if ((EncodedSample & 0x02) != 0)
Difference += (StepSize >> 1);
if ((EncodedSample & 0x04) != 0)
Difference += (StepSize >> 2);
if ((EncodedSample & 0x08) != 0)
Difference += (StepSize >> 3);
if ((EncodedSample & 0x10) != 0)
Difference += (StepSize >> 4);
if ((EncodedSample & 0x20) != 0)
Difference += (StepSize >> 5);
return UpdatePredictedSample(PredictedSample, EncodedSample, Difference);
}
#endregion
#region ADPCM decompression present in Starcraft I BETA
public static uint[] InitAdpcmData(ADPCM_DATA pData, byte BitCount)
{
switch (BitCount)
{
case 2:
pData.pValues = adpcm_values_2;
break;
case 3:
pData.pValues = adpcm_values_3;
break;
case 4:
pData.pValues = adpcm_values_4;
break;
default:
pData.pValues = null;
break;
case 6:
pData.pValues = adpcm_values_6;
break;
}
pData.BitCount = BitCount;
pData.field_C = 0x20000;
pData.field_8 = 1 << BitCount;
pData.field_10 = (1 << BitCount) / 2;
return pData.pValues;
}
#endregion
}
}

View File

@@ -0,0 +1,67 @@
namespace BurnOutSharp.Compression.ADPCM
{
/// <summary>
/// Helper class for writing output ADPCM data
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public unsafe class TADPCMStream
{
private byte* pbBufferEnd;
private byte* pbBuffer;
public TADPCMStream(void* pvBuffer, int cbBuffer)
{
pbBufferEnd = (byte*)pvBuffer + cbBuffer;
pbBuffer = (byte*)pvBuffer;
}
public bool ReadByteSample(ref byte ByteSample)
{
// Check if there is enough space in the buffer
if (pbBuffer >= pbBufferEnd)
return false;
ByteSample = *pbBuffer++;
return true;
}
public bool WriteByteSample(byte ByteSample)
{
// Check if there is enough space in the buffer
if (pbBuffer >= pbBufferEnd)
return false;
*pbBuffer++ = ByteSample;
return true;
}
public bool ReadWordSample(ref short OneSample)
{
// Check if we have enough space in the output buffer
if ((int)(pbBufferEnd - pbBuffer) < sizeof(short))
return false;
// Write the sample
OneSample = (short)(pbBuffer[0] + ((pbBuffer[1]) << 0x08));
pbBuffer += sizeof(short);
return true;
}
public bool WriteWordSample(short OneSample)
{
// Check if we have enough space in the output buffer
if ((int)(pbBufferEnd - pbBuffer) < sizeof(short))
return false;
// Write the sample
*pbBuffer++ = (byte)(OneSample & 0xFF);
*pbBuffer++ = (byte)(OneSample >> 0x08);
return true;
}
public int LengthProcessed(void* pvOutBuffer)
{
return (int)((byte*)pbBuffer - (byte*)pvOutBuffer);
}
}
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BurnOutSharp.Compression</Title>
<AssemblyName>BurnOutSharp.Compression</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.6</Version>
<AssemblyVersion>2.6</AssemblyVersion>
<FileVersion>2.6</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="SharpCompress" Version="0.32.2" />
<PackageReference Include="SharpZipLib" Version="1.4.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\BurnOutSharp.Models\BurnOutSharp.Models.csproj" />
<ProjectReference Include="..\BurnOutSharp.Utilities\BurnOutSharp.Utilities.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,525 @@
using System.IO;
using System.Linq;
using System.Text;
using BurnOutSharp.Models.Compression.LZ;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.Compression.LZ.Constants;
namespace BurnOutSharp.Compression
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/kernel32/lzexpand.c"/>
public class LZ
{
#region Constructors
/// <summary>
/// Constructor
/// </summary>
public LZ() { }
#endregion
#region Static Methods
/// <summary>
/// Decompress LZ-compressed data
/// </summary>
/// <param name="compressed">Byte array representing the compressed data</param>
/// <returns>Decompressed data as a byte array, null on error</returns>
public static byte[] Decompress(byte[] compressed)
{
// If we have and invalid input
if (compressed == null || compressed.Length == 0)
return null;
// Create a memory stream for the input and decompress that
var compressedStream = new MemoryStream(compressed);
return Decompress(compressedStream);
}
/// <summary>
/// Decompress LZ-compressed data
/// </summary>
/// <param name="compressed">Stream representing the compressed data</param>
/// <returns>Decompressed data as a byte array, null on error</returns>
public static byte[] Decompress(Stream compressed)
{
// If we have and invalid input
if (compressed == null || compressed.Length == 0)
return null;
// Create a new LZ for decompression
var lz = new LZ();
// Open the input data
var sourceState = lz.Open(compressed, out _);
if (sourceState?.Window == null)
return null;
// Create the output data and open it
var decompressedStream = new MemoryStream();
var destState = lz.Open(decompressedStream, out _);
if (destState == null)
return null;
// Decompress the data by copying
long read = lz.CopyTo(sourceState, destState, out LZERROR error);
// Copy the data to the buffer
byte[] decompressed;
if (read == 0 || (error != LZERROR.LZERROR_OK && error != LZERROR.LZERROR_NOT_LZ))
{
decompressed = null;
}
else
{
int dataEnd = (int)decompressedStream.Position;
decompressedStream.Seek(0, SeekOrigin.Begin);
decompressed = decompressedStream.ReadBytes(dataEnd);
}
// Close the streams
lz.Close(sourceState);
lz.Close(destState);
return decompressed;
}
/// <summary>
/// Reconstructs the full filename of the compressed file
/// </summary>
public static string GetExpandedName(string input, out LZERROR error)
{
// Try to open the file as a compressed stream
var fileStream = File.Open(input, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
var state = new LZ().Open(fileStream, out error);
if (state?.Window == null)
return null;
// Get the extension for modification
string inputExtension = Path.GetExtension(input).TrimStart('.');
// If we have no extension
if (string.IsNullOrWhiteSpace(inputExtension))
return Path.GetFileNameWithoutExtension(input);
// If we have an extension of length 1
if (inputExtension.Length == 1)
{
if (inputExtension == "_")
return $"{Path.GetFileNameWithoutExtension(input)}.{char.ToLower(state.LastChar)}";
else
return Path.GetFileNameWithoutExtension(input);
}
// If we have an extension that doesn't end in an underscore
if (!inputExtension.EndsWith("_"))
return Path.GetFileNameWithoutExtension(input);
// Build the new filename
bool isLowerCase = char.IsUpper(input[0]);
char replacementChar = isLowerCase ? char.ToLower(state.LastChar) : char.ToUpper(state.LastChar);
string outputExtension = inputExtension.Substring(0, inputExtension.Length - 1) + replacementChar;
return $"{Path.GetFileNameWithoutExtension(input)}.{outputExtension}";
}
#endregion
#region State Management
/// <summary>
/// Opens a stream and creates a state from it
/// </summary>
/// <param name="stream">Source stream to create a state from</stream>
/// <param name="error">Output representing the last error</param>
/// <returns>An initialized State, null on error</returns>
/// <remarks>Uncompressed streams are represented by a State with no buffer</remarks>
public State Open(Stream stream, out LZERROR error)
{
State lzs = Init(stream, out error);
if (error == LZERROR.LZERROR_OK || error == LZERROR.LZERROR_NOT_LZ)
return lzs;
return null;
}
/// <summary>
/// Closes a state by invalidating the source
/// </summary>
/// <param name="stream">State object to close</stream>
public void Close(State state)
{
try
{
state?.Source?.Close();
}
catch { }
}
/// <summary>
/// Initializes internal decompression buffers
/// </summary>
/// <param name="source">Input stream to create a state from</param>
/// <param name="error">Output representing the last error</param>
/// <returns>An initialized State, null on error</returns>
/// <remarks>Uncompressed streams are represented by a State with no buffer</remarks>
public State Init(Stream source, out LZERROR error)
{
// If we have an invalid source
if (source == null)
{
error = LZERROR.LZERROR_BADVALUE;
return null;
}
// Attempt to read the header
var fileHeader = ParseFileHeader(source, out error);
// If we had a valid but uncompressed stream
if (error == LZERROR.LZERROR_NOT_LZ)
{
source.Seek(0, SeekOrigin.Begin);
return new State { Source = source };
}
// If we had any error
else if (error != LZERROR.LZERROR_OK)
{
source.Seek(0, SeekOrigin.Begin);
return null;
}
// Initialize the table with all spaces
byte[] table = Enumerable.Repeat((byte)' ', LZ_TABLE_SIZE).ToArray();
// Build the state
var state = new State
{
Source = source,
LastChar = fileHeader.LastChar,
RealLength = fileHeader.RealLength,
Window = new byte[GETLEN],
WindowLength = 0,
WindowCurrent = 0,
Table = table,
CurrentTableEntry = 0xff0,
};
// Return the state
return state;
}
#endregion
#region Stream Functionality
/// <summary>
/// Attempt to read the specified number of bytes from the State
/// </summary>
/// <param name="source">Source State to read from</param>
/// <param name="buffer">Byte buffer to read into</param>
/// <param name="offset">Offset within the buffer to read</param>
/// <param name="count">Number of bytes to read</param>
/// <param name="error">Output representing the last error</param>
/// <returns>The number of bytes read, if possible</returns>
/// <remarks>
/// If the source data is compressed, this will decompress the data.
/// If the source data is uncompressed, it is copied directly
/// </remarks>
public int Read(State source, byte[] buffer, int offset, int count, out LZERROR error)
{
// If we have an uncompressed input
if (source.Window == null)
{
error = LZERROR.LZERROR_NOT_LZ;
return source.Source.Read(buffer, offset, count);
}
// If seeking has occurred, we need to perform the seek
if (source.RealCurrent != source.RealWanted)
{
// If the requested position is before the current, we need to reset
if (source.RealCurrent > source.RealWanted)
{
// Reset the decompressor state
source.Source.Seek(LZ_HEADER_LEN, SeekOrigin.Begin);
FlushWindow(source);
source.RealCurrent = 0;
source.ByteType = 0;
source.StringLength = 0;
source.Table = Enumerable.Repeat((byte)' ', LZ_TABLE_SIZE).ToArray();
source.CurrentTableEntry = 0xFF0;
}
// While we are not at the right offset
while (source.RealCurrent < source.RealWanted)
{
_ = DecompressByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
}
}
int bytesRemaining = count;
while (bytesRemaining > 0)
{
byte b = DecompressByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return count - bytesRemaining;
source.RealWanted++;
buffer[offset++] = b;
bytesRemaining--;
}
error = LZERROR.LZERROR_OK;
return count;
}
/// <summary>
/// Perform a seek on the source data
/// </summary>
/// <param name="state">State to seek within</param>
/// <param name="offset">Data offset to seek to</state>
/// <param name="seekOrigin">SeekOrigin representing how to seek</state>
/// <param name="error">Output representing the last error</param>
/// <returns>The position that was seeked to, -1 on error</returns>
public long Seek(State state, long offset, SeekOrigin seekOrigin, out LZERROR error)
{
// If we have an invalid state
if (state == null)
{
error = LZERROR.LZERROR_BADVALUE;
return -1;
}
// If we have an uncompressed input
if (state.Window == null)
{
error = LZERROR.LZERROR_NOT_LZ;
return state.Source.Seek(offset, seekOrigin);
}
// Otherwise, generate the new offset
long newWanted = state.RealWanted;
switch (seekOrigin)
{
case SeekOrigin.Current:
newWanted += offset;
break;
case SeekOrigin.End:
newWanted = state.RealLength - offset;
break;
default:
newWanted = offset;
break;
}
// If we have an invalid new offset
if (newWanted < 0 && newWanted > state.RealLength)
{
error = LZERROR.LZERROR_BADVALUE;
return -1;
}
error = LZERROR.LZERROR_OK;
state.RealWanted = (uint)newWanted;
return newWanted;
}
/// <summary>
/// Copies all data from the source to the destination
/// </summary>
/// <param name="source">Source State to read from</param>
/// <param name="dest">Destination state to write to</param>
/// <param name="error">Output representing the last error</param>
/// <returns>The number of bytes written, -1 on error</returns>
/// <remarks>
/// If the source data is compressed, this will decompress the data.
/// If the source data is uncompressed, it is copied directly
/// </remarks>
public long CopyTo(State source, State dest, out LZERROR error)
{
error = LZERROR.LZERROR_OK;
// If we have an uncompressed input
if (source.Window == null)
{
source.Source.CopyTo(dest.Source);
return source.Source.Length;
}
// Loop until we have read everything
long length = 0;
while (true)
{
// Read at most 1000 bytes
byte[] buf = new byte[1000];
int read = Read(source, buf, 0, buf.Length, out error);
// If we had an error
if (read == 0)
{
if (error == LZERROR.LZERROR_NOT_LZ)
{
error = LZERROR.LZERROR_OK;
break;
}
else if (error != LZERROR.LZERROR_OK)
{
error = LZERROR.LZERROR_READ;
return 0;
}
}
// Otherwise, append the length read and write the data
length += read;
dest.Source.Write(buf, 0, read);
}
return length;
}
/// <summary>
/// Decompress a single byte of data from the source State
/// </summary>
/// <param name="source">Source State to read from</param>
/// <param name="error">Output representing the last error</param>
/// <returns>The read byte, if possible</returns>
private byte DecompressByte(State source, out LZERROR error)
{
byte b;
if (source.StringLength != 0)
{
b = source.Table[source.StringPosition];
source.StringPosition = (source.StringPosition + 1) & 0xFFF;
source.StringLength--;
}
else
{
if ((source.ByteType & 0x100) == 0)
{
b = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
source.ByteType = (ushort)(b | 0xFF00);
}
if ((source.ByteType & 1) != 0)
{
b = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
}
else
{
byte b1 = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
byte b2 = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
// Format:
// b1 b2
// AB CD
// where CAB is the stringoffset in the table
// and D+3 is the len of the string
source.StringPosition = (uint)(b1 | ((b2 & 0xf0) << 4));
source.StringLength = (byte)((b2 & 0xf) + 2);
// 3, but we use a byte already below...
b = source.Table[source.StringPosition];
source.StringPosition = (source.StringPosition + 1) & 0xFFF;
}
source.ByteType >>= 1;
}
// Store b in table
source.Table[source.CurrentTableEntry++] = b;
source.CurrentTableEntry &= 0xFFF;
source.RealCurrent++;
error = LZERROR.LZERROR_OK;
return b;
}
/// <summary>
/// Reads one compressed byte, including buffering
/// </summary>
/// <param name="state">State to read using</param>
/// <param name="error">Output representing the last error</param>
/// <returns>Byte value that was read, if possible</returns>
private byte ReadByte(State state, out LZERROR error)
{
// If we have enough data in the buffer
if (state.WindowCurrent < state.WindowLength)
{
error = LZERROR.LZERROR_OK;
return state.Window[state.WindowCurrent++];
}
// Otherwise, read from the source
int ret = state.Source.Read(state.Window, 0, GETLEN);
if (ret == 0)
{
error = LZERROR.LZERROR_NOT_LZ;
return 0;
}
// Reset the window state
state.WindowLength = (uint)ret;
state.WindowCurrent = 1;
error = LZERROR.LZERROR_OK;
return state.Window[0];
}
/// <summary>
/// Reset the current window position to the length
/// </summary>
/// <param name="state">State to flush</param>
private void FlushWindow(State state)
{
state.WindowCurrent = state.WindowLength;
}
/// <summary>
/// Parse a Stream into a file header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="error">Output representing the last error</param>
/// <returns>Filled file header on success, null on error</returns>
private FileHeaader ParseFileHeader(Stream data, out LZERROR error)
{
error = LZERROR.LZERROR_OK;
FileHeaader fileHeader = new FileHeaader();
byte[] magic = data.ReadBytes(LZ_MAGIC_LEN);
fileHeader.Magic = Encoding.ASCII.GetString(magic);
if (fileHeader.Magic != MagicString)
{
error = LZERROR.LZERROR_NOT_LZ;
return null;
}
fileHeader.CompressionType = data.ReadByteValue();
if (fileHeader.CompressionType != (byte)'A')
{
error = LZERROR.LZERROR_UNKNOWNALG;
return null;
}
fileHeader.LastChar = (char)data.ReadByteValue();
fileHeader.RealLength = data.ReadUInt32();
return fileHeader;
}
#endregion
}
}

View File

@@ -0,0 +1,12 @@
namespace BurnOutSharp.Compression.LZX
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public class Bits
{
public uint BitBuffer;
public int BitsLeft;
public int InputPosition; //byte*
}
}

View File

@@ -0,0 +1,759 @@
using System;
using BurnOutSharp.Models.Compression.LZX;
using static BurnOutSharp.Models.Compression.LZX.Constants;
using static BurnOutSharp.Models.MicrosoftCabinet.Constants;
namespace BurnOutSharp.Compression.LZX
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/fdi.c"/>
public class Decompressor
{
/// <summary>
/// Initialize an LZX decompressor state
/// </summary>
public static bool Init(int window, State state)
{
uint wndsize = (uint)(1 << window);
int posn_slots;
/* LZX supports window sizes of 2^15 (32Kb) through 2^21 (2Mb) */
/* if a previously allocated window is big enough, keep it */
if (window < 15 || window > 21)
return false;
if (state.actual_size < wndsize)
state.window = null;
if (state.window == null)
{
state.window = new byte[wndsize];
state.actual_size = wndsize;
}
state.window_size = wndsize;
/* calculate required position slots */
if (window == 20) posn_slots = 42;
else if (window == 21) posn_slots = 50;
else posn_slots = window << 1;
/*posn_slots=i=0; while (i < wndsize) i += 1 << CAB(extra_bits)[posn_slots++]; */
state.R0 = state.R1 = state.R2 = 1;
state.main_elements = (ushort)(LZX_NUM_CHARS + (posn_slots << 3));
state.header_read = 0;
state.frames_read = 0;
state.block_remaining = 0;
state.block_type = LZX_BLOCKTYPE_INVALID;
state.intel_curpos = 0;
state.intel_started = 0;
state.window_posn = 0;
/* initialize tables to 0 (because deltas will be applied to them) */
// memset(state.MAINTREE_len, 0, sizeof(state.MAINTREE_len));
// memset(state.LENGTH_len, 0, sizeof(state.LENGTH_len));
return true;
}
/// <summary>
/// Decompress a byte array using a given State
/// </summary>
public static bool Decompress(State state, int inlen, byte[] inbuf, int outlen, byte[] outbuf)
{
int inpos = 0; // inbuf[0];
int endinp = inpos + inlen;
int window = 0; // state.window[0];
int runsrc, rundest; // byte*
uint window_posn = state.window_posn;
uint window_size = state.window_size;
uint R0 = state.R0;
uint R1 = state.R1;
uint R2 = state.R2;
uint match_offset, i, j, k; /* ijk used in READ_HUFFSYM macro */
Bits lb = new Bits(); /* used in READ_LENGTHS macro */
int togo = outlen, this_run, main_element, aligned_bits;
int match_length, copy_length, length_footer, extra, verbatim_bits;
INIT_BITSTREAM(out int bitsleft, out uint bitbuf);
/* read header if necessary */
if (state.header_read == 0)
{
i = j = 0;
k = READ_BITS(1, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (k != 0)
{
i = READ_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
j = READ_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
}
state.intel_filesize = (int)((i << 16) | j); /* or 0 if not encoded */
state.header_read = 1;
}
/* main decoding loop */
while (togo > 0)
{
/* last block finished, new block expected */
if (state.block_remaining == 0)
{
if (state.block_type == LZX_BLOCKTYPE_UNCOMPRESSED)
{
if ((state.block_length & 1) != 0)
inpos++; /* realign bitstream to word */
INIT_BITSTREAM(out bitsleft, out bitbuf);
}
state.block_type = (ushort)READ_BITS(3, inbuf, ref inpos, ref bitsleft, ref bitbuf);
i = READ_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
j = READ_BITS(8, inbuf, ref inpos, ref bitsleft, ref bitbuf);
state.block_remaining = state.block_length = (i << 8) | j;
switch (state.block_type)
{
case LZX_BLOCKTYPE_ALIGNED:
for (i = 0; i < 8; i++)
{
j = READ_BITS(3, inbuf, ref inpos, ref bitsleft, ref bitbuf);
state.tblALIGNED_len[i] = (byte)j;
}
make_decode_table(LZX_ALIGNED_MAXSYMBOLS, LZX_ALIGNED_TABLEBITS, state.tblALIGNED_len, state.tblALIGNED_table);
/* rest of aligned header is same as verbatim */
goto case LZX_BLOCKTYPE_VERBATIM;
case LZX_BLOCKTYPE_VERBATIM:
READ_LENGTHS(state.tblMAINTREE_len, 0, 256, lb, state, inbuf, ref inpos, ref bitsleft, ref bitbuf);
READ_LENGTHS(state.tblMAINTREE_len, 256, state.main_elements, lb, state, inbuf, ref inpos, ref bitsleft, ref bitbuf);
make_decode_table(LZX_MAINTREE_MAXSYMBOLS, LZX_MAINTREE_TABLEBITS, state.tblMAINTREE_len, state.tblMAINTREE_table);
if (state.tblMAINTREE_len[0xE8] != 0)
state.intel_started = 1;
READ_LENGTHS(state.tblLENGTH_len, 0, LZX_NUM_SECONDARY_LENGTHS, lb, state, inbuf, ref inpos, ref bitsleft, ref bitbuf);
make_decode_table(LZX_LENGTH_MAXSYMBOLS, LZX_LENGTH_TABLEBITS, state.tblLENGTH_len, state.tblLENGTH_table);
break;
case LZX_BLOCKTYPE_UNCOMPRESSED:
state.intel_started = 1; /* because we can't assume otherwise */
ENSURE_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf); /* get up to 16 pad bits into the buffer */
/* and align the bitstream! */
if (bitsleft > 16)
inpos -= 2;
R0 = (uint)(inbuf[inpos + 0] | (inbuf[inpos + 1] << 8) | (inbuf[inpos + 2] << 16) | (inbuf[inpos + 3] << 24)); inpos += 4;
R1 = (uint)(inbuf[inpos + 0] | (inbuf[inpos + 1] << 8) | (inbuf[inpos + 2] << 16) | (inbuf[inpos + 3] << 24)); inpos += 4;
R2 = (uint)(inbuf[inpos + 0] | (inbuf[inpos + 1] << 8) | (inbuf[inpos + 2] << 16) | (inbuf[inpos + 3] << 24)); inpos += 4;
break;
default:
return false;
}
}
/* buffer exhaustion check */
if (inpos > endinp)
{
/* it's possible to have a file where the next run is less than
* 16 bits in size. In this case, the READ_HUFFSYM() macro used
* in building the tables will exhaust the buffer, so we should
* allow for this, but not allow those accidentally read bits to
* be used (so we check that there are at least 16 bits
* remaining - in this boundary case they aren't really part of
* the compressed data)
*/
if (inpos > (endinp + 2) || bitsleft < 16)
return false;
}
while ((this_run = (int)state.block_remaining) > 0 && togo > 0)
{
if (this_run > togo) this_run = togo;
togo -= this_run;
state.block_remaining -= (uint)this_run;
/* apply 2^x-1 mask */
window_posn &= window_size - 1;
/* runs can't straddle the window wraparound */
if ((window_posn + this_run) > window_size)
return false;
switch (state.block_type)
{
case LZX_BLOCKTYPE_VERBATIM:
while (this_run > 0)
{
main_element = READ_HUFFSYM(state.tblMAINTREE_table, state.tblMAINTREE_len, LZX_MAINTREE_TABLEBITS, LZX_MAINTREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (main_element < LZX_NUM_CHARS)
{
/* literal: 0 to LZX_NUM_CHARS-1 */
state.window[window + window_posn++] = (byte)main_element;
this_run--;
}
else
{
/* match: LZX_NUM_CHARS + ((slot<<3) | length_header (3 bits)) */
main_element -= LZX_NUM_CHARS;
match_length = main_element & LZX_NUM_PRIMARY_LENGTHS;
if (match_length == LZX_NUM_PRIMARY_LENGTHS)
{
length_footer = READ_HUFFSYM(state.tblLENGTH_table, state.tblLENGTH_len, LZX_LENGTH_TABLEBITS, LZX_LENGTH_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_length += length_footer;
}
match_length += LZX_MIN_MATCH;
match_offset = (uint)(main_element >> 3);
if (match_offset > 2)
{
/* not repeated offset */
if (match_offset != 3)
{
extra = state.ExtraBits[match_offset];
verbatim_bits = (int)READ_BITS(extra, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset = (uint)(state.PositionSlotBases[match_offset] - 2 + verbatim_bits);
}
else
{
match_offset = 1;
}
/* update repeated offset LRU queue */
R2 = R1; R1 = R0; R0 = match_offset;
}
else if (match_offset == 0)
{
match_offset = R0;
}
else if (match_offset == 1)
{
match_offset = R1;
R1 = R0; R0 = match_offset;
}
else /* match_offset == 2 */
{
match_offset = R2;
R2 = R0; R0 = match_offset;
}
rundest = (int)(window + window_posn);
this_run -= match_length;
/* copy any wrapped around source data */
if (window_posn >= match_offset)
{
/* no wrap */
runsrc = (int)(rundest - match_offset);
}
else
{
runsrc = (int)(rundest + (window_size - match_offset));
copy_length = (int)(match_offset - window_posn);
if (copy_length < match_length)
{
match_length -= copy_length;
window_posn += (uint)copy_length;
while (copy_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
runsrc = window;
}
}
window_posn += (uint)match_length;
/* copy match data - no worries about destination wraps */
while (match_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
}
}
break;
case LZX_BLOCKTYPE_ALIGNED:
while (this_run > 0)
{
main_element = READ_HUFFSYM(state.tblMAINTREE_table, state.tblMAINTREE_len, LZX_MAINTREE_TABLEBITS, LZX_MAINTREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (main_element < LZX_NUM_CHARS)
{
/* literal: 0 to LZX_NUM_CHARS-1 */
state.window[window + window_posn++] = (byte)main_element;
this_run--;
}
else
{
/* mverbatim_bitsatch: LZX_NUM_CHARS + ((slot<<3) | length_header (3 bits)) */
main_element -= LZX_NUM_CHARS;
match_length = main_element & LZX_NUM_PRIMARY_LENGTHS;
if (match_length == LZX_NUM_PRIMARY_LENGTHS)
{
length_footer = READ_HUFFSYM(state.tblLENGTH_table, state.tblLENGTH_len, LZX_LENGTH_TABLEBITS, LZX_LENGTH_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_length += length_footer;
}
match_length += LZX_MIN_MATCH;
match_offset = (uint)(main_element >> 3);
if (match_offset > 2)
{
/* not repeated offset */
extra = state.ExtraBits[match_offset];
match_offset = state.PositionSlotBases[match_offset] - 2;
if (extra > 3)
{
/* verbatim and aligned bits */
extra -= 3;
verbatim_bits = (int)READ_BITS(extra, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)(verbatim_bits << 3);
aligned_bits = READ_HUFFSYM(state.tblALIGNED_table, state.tblALIGNED_len, LZX_ALIGNED_TABLEBITS, LZX_ALIGNED_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)aligned_bits;
}
else if (extra == 3)
{
/* aligned bits only */
aligned_bits = READ_HUFFSYM(state.tblALIGNED_table, state.tblALIGNED_len, LZX_ALIGNED_TABLEBITS, LZX_ALIGNED_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)aligned_bits;
}
else if (extra > 0)
{
/* extra==1, extra==2 */
/* verbatim bits only */
verbatim_bits = (int)READ_BITS(extra, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)verbatim_bits;
}
else /* extra == 0 */
{
/* ??? */
match_offset = 1;
}
/* update repeated offset LRU queue */
R2 = R1; R1 = R0; R0 = match_offset;
}
else if (match_offset == 0)
{
match_offset = R0;
}
else if (match_offset == 1)
{
match_offset = R1;
R1 = R0; R0 = match_offset;
}
else /* match_offset == 2 */
{
match_offset = R2;
R2 = R0; R0 = match_offset;
}
rundest = (int)(window + window_posn);
this_run -= match_length;
/* copy any wrapped around source data */
if (window_posn >= match_offset)
{
/* no wrap */
runsrc = (int)(rundest - match_offset);
}
else
{
runsrc = (int)(rundest + (window_size - match_offset));
copy_length = (int)(match_offset - window_posn);
if (copy_length < match_length)
{
match_length -= copy_length;
window_posn += (uint)copy_length;
while (copy_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
runsrc = window;
}
}
window_posn += (uint)match_length;
/* copy match data - no worries about destination wraps */
while (match_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
}
}
break;
case LZX_BLOCKTYPE_UNCOMPRESSED:
if ((inpos + this_run) > endinp)
return false;
Array.Copy(inbuf, inpos, state.window, window + window_posn, this_run);
inpos += this_run;
window_posn += (uint)this_run;
break;
default:
return false; /* might as well */
}
}
}
if (togo != 0)
return false;
Array.Copy(state.window, window + ((window_posn == 0) ? window_size : window_posn) - outlen, outbuf, 0, outlen);
state.window_posn = window_posn;
state.R0 = R0;
state.R1 = R1;
state.R2 = R2;
/* intel E8 decoding */
if ((state.frames_read++ < 32768) && state.intel_filesize != 0)
{
if (outlen <= 6 || state.intel_started == 0)
{
state.intel_curpos += outlen;
}
else
{
int data = 0; // outbuf[0];
int dataend = data + outlen - 10;
int curpos = state.intel_curpos;
int filesize = state.intel_filesize;
int abs_off, rel_off;
state.intel_curpos = curpos + outlen;
while (data < dataend)
{
if (outbuf[data++] != 0xE8)
{
curpos++;
continue;
}
abs_off = outbuf[data + 0] | (outbuf[data + 1] << 8) | (outbuf[data + 2] << 16) | (outbuf[data + 3] << 24);
if ((abs_off >= -curpos) && (abs_off < filesize))
{
rel_off = (abs_off >= 0) ? abs_off - curpos : abs_off + filesize;
outbuf[data + 0] = (byte)rel_off;
outbuf[data + 1] = (byte)(rel_off >> 8);
outbuf[data + 2] = (byte)(rel_off >> 16);
outbuf[data + 3] = (byte)(rel_off >> 24);
}
data += 4;
curpos += 5;
}
}
}
return true;
}
/// <summary>
/// Read and build the Huffman tree from the lengths
/// </summary>
private static int ReadLengths(byte[] lengths, uint first, uint last, Bits lb, State state, byte[] inbuf)
{
uint x, y;
uint bitbuf = lb.BitBuffer;
int bitsleft = lb.BitsLeft;
int inpos = lb.InputPosition;
for (x = 0; x < 20; x++)
{
y = READ_BITS(4, inbuf, ref inpos, ref bitsleft, ref bitbuf);
state.tblPRETREE_len[x] = (byte)y;
}
make_decode_table(LZX_PRETREE_MAXSYMBOLS, LZX_PRETREE_TABLEBITS, state.tblPRETREE_len, state.tblPRETREE_table);
for (x = first; x < last;)
{
int z = READ_HUFFSYM(state.tblPRETREE_table, state.tblPRETREE_len, LZX_PRETREE_TABLEBITS, LZX_PRETREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (z == 17)
{
y = READ_BITS(4, inbuf, ref inpos, ref bitsleft, ref bitbuf);
y += 4;
while (y-- > 0)
{
lengths[x++] = 0;
}
}
else if (z == 18)
{
y = READ_BITS(5, inbuf, ref inpos, ref bitsleft, ref bitbuf);
y += 20;
while (y-- > 0)
{
lengths[x++] = 0;
}
}
else if (z == 19)
{
y = READ_BITS(1, inbuf, ref inpos, ref bitsleft, ref bitbuf);
y += 4;
z = READ_HUFFSYM(state.tblPRETREE_table, state.tblPRETREE_len, LZX_PRETREE_TABLEBITS, LZX_PRETREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
z = lengths[x] - z;
if (z < 0)
z += 17;
while (y-- > 0)
{
lengths[x++] = (byte)z;
}
}
else
{
z = lengths[x] - z;
if (z < 0)
z += 17;
lengths[x++] = (byte)z;
}
}
lb.BitBuffer = bitbuf;
lb.BitsLeft = bitsleft;
lb.InputPosition = inpos;
return 0;
}
// Bitstream reading macros (LZX / intel little-endian byte order)
#region Bitstream Reading Macros
/*
* These bit access routines work by using the area beyond the MSB and the
* LSB as a free source of zeroes. This avoids having to mask any bits.
* So we have to know the bit width of the bitbuffer variable.
*/
/// <summary>
/// Should be used first to set up the system
/// </summary>
private static void INIT_BITSTREAM(out int bitsleft, out uint bitbuf)
{
bitsleft = 0;
bitbuf = 0;
}
/// <summary>
/// Ensures there are at least N bits in the bit buffer. It can guarantee
// up to 17 bits (i.e. it can read in 16 new bits when there is down to
/// 1 bit in the buffer, and it can read 32 bits when there are 0 bits in
/// the buffer).
/// </summary>
/// <remarks>Quantum reads bytes in normal order; LZX is little-endian order</remarks>
private static void ENSURE_BITS(int n, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
while (bitsleft < n)
{
byte b0 = inpos + 0 < inbuf.Length ? inbuf[inpos + 0] : (byte)0;
byte b1 = inpos + 1 < inbuf.Length ? inbuf[inpos + 1] : (byte)0;
bitbuf |= (uint)(((b1 << 8) | b0) << (16 - bitsleft));
bitsleft += 16;
inpos += 2;
}
}
/// <summary>
/// Extracts (without removing) N bits from the bit buffer
/// </summary>
private static uint PEEK_BITS(int n, uint bitbuf)
{
return bitbuf >> (32 - n);
}
/// <summary>
/// Removes N bits from the bit buffer
/// </summary>
private static void REMOVE_BITS(int n, ref int bitsleft, ref uint bitbuf)
{
bitbuf <<= n;
bitsleft -= n;
}
/// <summary>
/// Takes N bits from the buffer and puts them in v.
/// </summary>
private static uint READ_BITS(int n, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
uint v = 0;
if (n > 0)
{
ENSURE_BITS(n, inbuf, ref inpos, ref bitsleft, ref bitbuf);
v = PEEK_BITS(n, bitbuf);
REMOVE_BITS(n, ref bitsleft, ref bitbuf);
}
return v;
}
#endregion
#region Huffman Methods
/// <summary>
/// This function was coded by David Tritscher. It builds a fast huffman
/// decoding table out of just a canonical huffman code lengths table.
/// </summary>
/// <param name="nsyms">Total number of symbols in this huffman tree.</param>
/// <param name="nbits">
/// Any symbols with a code length of nbits or less can be decoded
/// in one lookup of the table.
/// </param>
/// <param name="length">A table to get code lengths from [0 to syms-1]</param>
/// <param name="table">The table to fill up with decoded symbols and pointers.</param>
/// <returns>
/// OK: 0
/// error: 1
/// </returns>
private static int make_decode_table(uint nsyms, uint nbits, byte[] length, ushort[] table)
{
ushort sym;
uint leaf;
byte bit_num = 1;
uint fill;
uint pos = 0; /* the current position in the decode table */
uint table_mask = (uint)(1 << (int)nbits);
uint bit_mask = table_mask >> 1; /* don't do 0 length codes */
uint next_symbol = bit_mask; /* base of allocation for long codes */
/* fill entries for codes short enough for a direct mapping */
while (bit_num <= nbits)
{
for (sym = 0; sym < nsyms; sym++)
{
if (length[sym] == bit_num)
{
leaf = pos;
if ((pos += bit_mask) > table_mask) return 1; /* table overrun */
/* fill all possible lookups of this symbol with the symbol itself */
fill = bit_mask;
while (fill-- > 0) table[leaf++] = sym;
}
}
bit_mask >>= 1;
bit_num++;
}
/* if there are any codes longer than nbits */
if (pos != table_mask)
{
/* clear the remainder of the table */
for (sym = (ushort)pos; sym < table_mask; sym++) table[sym] = 0;
/* give ourselves room for codes to grow by up to 16 more bits */
pos <<= 16;
table_mask <<= 16;
bit_mask = 1 << 15;
while (bit_num <= 16)
{
for (sym = 0; sym < nsyms; sym++)
{
if (length[sym] == bit_num)
{
leaf = pos >> 16;
for (fill = 0; fill < bit_num - nbits; fill++)
{
/* if this path hasn't been taken yet, 'allocate' two entries */
if (table[leaf] == 0)
{
table[(next_symbol << 1)] = 0;
table[(next_symbol << 1) + 1] = 0;
table[leaf] = (ushort)next_symbol++;
}
/* follow the path and select either left or right for next bit */
leaf = (uint)(table[leaf] << 1);
if (((pos >> (int)(15 - fill)) & 1) != 0) leaf++;
}
table[leaf] = sym;
if ((pos += bit_mask) > table_mask) return 1; /* table overflow */
}
}
bit_mask >>= 1;
bit_num++;
}
}
/* full table? */
if (pos == table_mask) return 0;
/* either erroneous table, or all elements are 0 - let's find out. */
for (sym = 0; sym < nsyms; sym++) if (length[sym] != 0) return 1;
return 0;
}
#endregion
// Huffman macros
#region Huffman Macros
/// <summary>
/// Decodes one huffman symbol from the bitstream using the stated table and
/// puts it in v.
/// </summary>
private static int READ_HUFFSYM(ushort[] hufftbl, byte[] lentable, int tablebits, int maxsymbols, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
int v = 0, i, j = 0;
ENSURE_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if ((i = hufftbl[PEEK_BITS(tablebits, bitbuf)]) >= maxsymbols)
{
j = 1 << (32 - tablebits);
do
{
j >>= 1;
i <<= 1;
i |= (bitbuf & j) != 0 ? 1 : 0;
if (j == 0)
throw new System.Exception();
} while ((i = hufftbl[i]) >= maxsymbols);
}
j = lentable[v = i];
REMOVE_BITS(j, ref bitsleft, ref bitbuf);
return v;
}
/// <summary>
/// Reads in code lengths for symbols first to last in the given table. The
/// code lengths are stored in their own special LZX way.
/// </summary>
private static bool READ_LENGTHS(byte[] lentable, uint first, uint last, Bits lb, State state, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
lb.BitBuffer = bitbuf;
lb.BitsLeft = bitsleft;
lb.InputPosition = inpos;
if (ReadLengths(lentable, first, last, lb, state, inbuf) != 0)
return false;
bitbuf = lb.BitBuffer;
bitsleft = lb.BitsLeft;
inpos = lb.InputPosition;
return true;
}
#endregion
}
}

View File

@@ -0,0 +1,119 @@
using static BurnOutSharp.Models.Compression.LZX.Constants;
namespace BurnOutSharp.Compression.LZX
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public class State
{
/// <summary>
/// the actual decoding window
/// </summary>
public byte[] window;
/// <summary>
/// window size (32Kb through 2Mb)
/// </summary>
public uint window_size;
/// <summary>
/// window size when it was first allocated
/// </summary>
public uint actual_size;
/// <summary>
/// current offset within the window
/// </summary>
public uint window_posn;
/// <summary>
/// for the LRU offset system
/// </summary>
public uint R0, R1, R2;
/// <summary>
/// number of main tree elements
/// </summary>
public ushort main_elements;
/// <summary>
/// have we started decoding at all yet?
/// </summary>
public int header_read;
/// <summary>
/// type of this block
/// </summary>
public ushort block_type;
/// <summary>
/// uncompressed length of this block
/// </summary>
public uint block_length;
/// <summary>
/// uncompressed bytes still left to decode
/// </summary>
public uint block_remaining;
/// <summary>
/// the number of CFDATA blocks processed
/// </summary>
public uint frames_read;
/// <summary>
/// magic header value used for transform
/// </summary>
public int intel_filesize;
/// <summary>
/// current offset in transform space
/// </summary>
public int intel_curpos;
/// <summary>
/// have we seen any translatable data yet?
/// </summary>
public int intel_started;
public ushort[] tblPRETREE_table = new ushort[(1 << LZX_PRETREE_TABLEBITS) + (LZX_PRETREE_MAXSYMBOLS << 1)];
public byte[] tblPRETREE_len = new byte[LZX_PRETREE_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
public ushort[] tblMAINTREE_table = new ushort[(1 << LZX_MAINTREE_TABLEBITS) + (LZX_MAINTREE_MAXSYMBOLS << 1)];
public byte[] tblMAINTREE_len = new byte[LZX_MAINTREE_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
public ushort[] tblLENGTH_table = new ushort[(1 << LZX_LENGTH_TABLEBITS) + (LZX_LENGTH_MAXSYMBOLS << 1)];
public byte[] tblLENGTH_len = new byte[LZX_LENGTH_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
public ushort[] tblALIGNED_table = new ushort[(1 << LZX_ALIGNED_TABLEBITS) + (LZX_ALIGNED_MAXSYMBOLS << 1)];
public byte[] tblALIGNED_len = new byte[LZX_ALIGNED_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
#region Decompression Tables
/// <summary>
/// An index to the position slot bases
/// </summary>
public uint[] PositionSlotBases = new uint[]
{
0, 1, 2, 3, 4, 6, 8, 12,
16, 24, 32, 48, 64, 96, 128, 192,
256, 384, 512, 768, 1024, 1536, 2048, 3072,
4096, 6144, 8192, 12288, 16384, 24576, 32768, 49152,
65536, 98304, 131072, 196608, 262144, 393216, 524288, 655360,
786432, 917504, 1048576, 1179648, 1310720, 1441792, 1572864, 1703936,
1835008, 1966080, 2097152
};
/// <summary>
/// How many bits of offset-from-base data is needed
/// </summary>
public byte[] ExtraBits = new byte[]
{
0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14,
15, 15, 16, 16, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17
};
#endregion
}
}

View File

@@ -0,0 +1,636 @@
using System;
using System.Runtime.InteropServices;
using BurnOutSharp.Models.Compression.MSZIP;
using static BurnOutSharp.Models.Compression.MSZIP.Constants;
namespace BurnOutSharp.Compression.MSZIP
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/fdi.c"/>
public unsafe class Decompressor
{
/// <summary>
/// Decompress a byte array using a given State
/// </summary>
public static bool Decompress(State state, int inlen, byte[] inbuf, int outlen, byte[] outbuf)
{
fixed (byte* inpos = inbuf)
{
state.inpos = inpos;
state.bb = state.bk = state.window_posn = 0;
if (outlen > ZIPWSIZE)
return false;
// CK = Chris Kirmse, official Microsoft purloiner
if (state.inpos[0] != 0x43 || state.inpos[1] != 0x48)
return false;
state.inpos += 2;
int lastBlockFlag = 0;
do
{
if (InflateBlock(&lastBlockFlag, state, inbuf, outbuf) != 0)
return false;
} while (lastBlockFlag == 0);
// Return success
return true;
}
}
/// <summary>
/// Decompress a deflated block
/// </summary>
private static uint InflateBlock(int* e, State state, byte[] inbuf, byte[] outbuf)
{
// Make local bit buffer
uint b = state.bb;
uint k = state.bk;
// Read the deflate block header
var header = new DeflateBlockHeader();
// Read in last block bit
ZIPNEEDBITS(1, state, ref b, ref k);
header.BFINAL = (*e = (int)b & 1) != 0;
ZIPDUMPBITS(1, ref b, ref k);
// Read in block type
ZIPNEEDBITS(2, state, ref b, ref k);
header.BTYPE = (CompressionType)(b & 3);
ZIPDUMPBITS(2, ref b, ref k);
// Restore the global bit buffer
state.bb = b;
state.bk = k;
// Inflate that block type
switch (header.BTYPE)
{
case CompressionType.NoCompression:
return (uint)DecompressStored(state, inbuf, outbuf);
case CompressionType.FixedHuffman:
return (uint)DecompressFixed(state, inbuf, outbuf);
case CompressionType.DynamicHuffman:
return (uint)DecompressDynamic(state, inbuf, outbuf);
// Bad block type
case CompressionType.Reserved:
default:
return 2;
}
}
/// <summary>
/// "Decompress" a stored block
/// </summary>
private static int DecompressStored(State state, byte[] inbuf, byte[] outbuf)
{
// Make local copies of globals
uint b = state.bb;
uint k = state.bk;
uint w = state.window_posn;
// Go to byte boundary
int n = (int)(k & 7);
ZIPDUMPBITS(n, ref b, ref k);
// Read the stored block header
var header = new NonCompressedBlockHeader();
// Get the length and its compliment
ZIPNEEDBITS(16, state, ref b, ref k);
header.LEN = (ushort)(b & 0xffff);
ZIPDUMPBITS(16, ref b, ref k);
ZIPNEEDBITS(16, state, ref b, ref k);
header.NLEN = (ushort)(b & 0xffff);
if (header.LEN != (~header.NLEN & 0xffff))
return 1; // Error in compressed data
ZIPDUMPBITS(16, ref b, ref k);
// Read and output the compressed data
while (n-- > 0)
{
ZIPNEEDBITS(8, state, ref b, ref k);
outbuf[w++] = (byte)b;
ZIPDUMPBITS(8, ref b, ref k);
}
// Restore the globals from the locals
state.window_posn = w;
state.bb = b;
state.bk = k;
return 0;
}
/// <summary>
/// Decompress a block originally compressed with fixed Huffman codes
/// </summary>
private static int DecompressFixed(State state, byte[] inbuf, byte[] outbuf)
{
// Create the block header
FixedHuffmanCompressedBlockHeader header = new FixedHuffmanCompressedBlockHeader();
fixed (uint* l = state.ll)
fixed (ushort* Zipcplens = CopyLengths)
fixed (ushort* Zipcplext = LiteralExtraBits)
fixed (ushort* Zipcpdist = CopyOffsets)
fixed (ushort* Zipcpdext = DistanceExtraBits)
{
// Assign the literal lengths
state.ll = header.LiteralLengths;
HuffmanNode* fixed_tl;
int fixed_bl = 7;
// Build the literal length tree
int i = BuildHuffmanTree(l, 288, 257, Zipcplens, Zipcplext, &fixed_tl, &fixed_bl, state);
if (i != 0)
return i;
// Assign the distance codes
state.ll = header.DistanceCodes;
HuffmanNode* fixed_td;
int fixed_bd = 5;
// Build the distance code tree
i = BuildHuffmanTree(l, 30, 0, Zipcpdist, Zipcpdext, &fixed_td, &fixed_bd, state);
if (i != 0)
return i;
// Decompress until an end-of-block code
return InflateCodes(fixed_tl, fixed_td, fixed_bl, fixed_bd, state, inbuf, outbuf);
}
}
/// <summary>
/// Decompress a block originally compressed with dynamic Huffman codes
/// </summary>
private static int DecompressDynamic(State state, byte[] inbuf, byte[] outbuf)
{
int i; /* temporary variables */
uint j;
uint l; /* last length */
uint m; /* mask for bit lengths table */
uint n; /* number of lengths to get */
HuffmanNode* tl; /* literal/length code table */
HuffmanNode* td; /* distance code table */
int bl; /* lookup bits for tl */
int bd; /* lookup bits for td */
uint nb; /* number of bit length codes */
uint nl; /* number of literal/length codes */
uint nd; /* number of distance codes */
uint b; /* bit buffer */
uint k; /* number of bits in bit buffer */
/* make local bit buffer */
b = state.bb;
k = state.bk;
fixed (uint* ll = state.ll)
{
/* read in table lengths */
ZIPNEEDBITS(5, state, ref b, ref k);
nl = 257 + (b & 0x1f); /* number of literal/length codes */
ZIPDUMPBITS(5, ref b, ref k);
ZIPNEEDBITS(5, state, ref b, ref k);
nd = 1 + (b & 0x1f); /* number of distance codes */
ZIPDUMPBITS(5, ref b, ref k);
ZIPNEEDBITS(4, state, ref b, ref k);
nb = 4 + (b & 0xf); /* number of bit length codes */
ZIPDUMPBITS(4, ref b, ref k);
if (nl > 288 || nd > 32)
return 1; /* bad lengths */
/* read in bit-length-code lengths */
for (j = 0; j < nb; j++)
{
ZIPNEEDBITS(3, state, ref b, ref k);
state.ll[BitLengthOrder[j]] = b & 7;
ZIPDUMPBITS(3, ref b, ref k);
}
for (; j < 19; j++)
state.ll[BitLengthOrder[j]] = 0;
/* build decoding table for trees--single level, 7 bit lookup */
bl = 7;
if ((i = BuildHuffmanTree(ll, 19, 19, null, null, &tl, &bl, state)) != 0)
return i; /* incomplete code set */
/* read in literal and distance code lengths */
n = nl + nd;
m = BitMasks[bl];
i = (int)(l = 0);
while ((uint)i < n)
{
ZIPNEEDBITS(bl, state, ref b, ref k);
j = (td = tl + (b & m))->b;
ZIPDUMPBITS((int)j, ref b, ref k);
j = td->n;
if (j < 16) /* length of code in bits (0..15) */
{
state.ll[i++] = l = j; /* save last length in l */
}
else if (j == 16) /* repeat last length 3 to 6 times */
{
ZIPNEEDBITS(2, state, ref b, ref k);
j = 3 + (b & 3);
ZIPDUMPBITS(2, ref b, ref k);
if ((uint)i + j > n)
return 1;
while (j-- > 0)
{
state.ll[i++] = l;
}
}
else if (j == 17) /* 3 to 10 zero length codes */
{
ZIPNEEDBITS(3, state, ref b, ref k);
j = 3 + (b & 7);
ZIPDUMPBITS(3, ref b, ref k);
if ((uint)i + j > n)
return 1;
while (j-- > 0)
state.ll[i++] = 0;
l = 0;
}
else /* j == 18: 11 to 138 zero length codes */
{
ZIPNEEDBITS(7, state, ref b, ref k);
j = 11 + (b & 0x7f);
ZIPDUMPBITS(7, ref b, ref k);
if ((uint)i + j > n)
return 1;
while (j-- > 0)
state.ll[i++] = 0;
l = 0;
}
}
/* restore the global bit buffer */
state.bb = b;
state.bk = k;
fixed (ushort* Zipcplens = CopyLengths)
fixed (ushort* Zipcplext = LiteralExtraBits)
fixed (ushort* Zipcpdist = CopyOffsets)
fixed (ushort* Zipcpdext = DistanceExtraBits)
{
/* build the decoding tables for literal/length and distance codes */
bl = ZIPLBITS;
if ((i = BuildHuffmanTree(ll, nl, 257, Zipcplens, Zipcplext, &tl, &bl, state)) != 0)
{
return i; /* incomplete code set */
}
bd = ZIPDBITS;
BuildHuffmanTree(ll + nl, nd, 0, Zipcpdist, Zipcpdext, &td, &bd, state);
/* decompress until an end-of-block code */
if (InflateCodes(tl, td, bl, bd, state, inbuf, outbuf) != 0)
return 1;
return 0;
}
}
}
/// <summary>
/// Build a Huffman tree from a set of lengths
/// </summary>
private static int BuildHuffmanTree(uint* b, uint n, uint s, ushort* d, ushort* e, HuffmanNode** t, int* m, State state)
{
uint a; /* counter for codes of length k */
uint el; /* length of EOB code (value 256) */
uint f; /* i repeats in table every f entries */
int g; /* maximum code length */
int h; /* table level */
uint i; /* counter, current code */
uint j; /* counter */
int k; /* number of bits in current code */
int* l; /* stack of bits per table */
uint* p; /* pointer into state.c[],state.b[],state.v[] */
HuffmanNode* q; /* points to current table */
HuffmanNode r = new HuffmanNode(); /* table entry for structure assignment */
int w; /* bits before this table == (l * h) */
uint* xp; /* pointer into x */
int y; /* number of dummy codes added */
uint z; /* number of entries in current table */
fixed (int* state_lx_ptr = state.lx)
{
l = state_lx_ptr + 1;
/* Generate counts for each bit length */
el = n > 256 ? b[256] : ZIPBMAX; /* set length of EOB code, if any */
for (i = 0; i < ZIPBMAX + 1; ++i)
state.c[i] = 0;
p = b; i = n;
do
{
state.c[*p]++; p++; /* assume all entries <= ZIPBMAX */
} while (--i > 0);
if (state.c[0] == n) /* null input--all zero length codes */
{
*t = null;
*m = 0;
return 0;
}
/* Find minimum and maximum length, bound *m by those */
for (j = 1; j <= ZIPBMAX; j++)
{
if (state.c[j] > 0)
break;
}
k = (int)j; /* minimum code length */
if ((uint)*m < j)
*m = (int)j;
for (i = ZIPBMAX; i > 0; i--)
{
if (state.c[i] > 0)
break;
}
g = (int)i; /* maximum code length */
if ((uint)*m > i)
*m = (int)i;
/* Adjust last length count to fill out codes, if needed */
for (y = 1 << (int)j; j < i; j++, y <<= 1)
{
if ((y -= (int)state.c[j]) < 0)
return 2; /* bad input: more codes than bits */
}
if ((y -= (int)state.c[i]) < 0)
return 2;
state.c[i] += (uint)y;
/* Generate starting offsets LONGo the value table for each length */
state.x[1] = j = 0;
fixed (uint* state_c_ptr = state.c)
fixed (uint* state_x_ptr = state.x)
{
p = state_c_ptr + 1;
xp = state_x_ptr + 2;
while (--i > 0)
{
/* note that i == g from above */
*xp++ = (j += *p++);
}
}
/* Make a table of values in order of bit lengths */
p = b; i = 0;
do
{
if ((j = *p++) != 0)
state.v[state.x[j]++] = i;
} while (++i < n);
/* Generate the Huffman codes and for each, make the table entries */
state.x[0] = i = 0; /* first Huffman code is zero */
fixed (uint* state_v_ptr = state.v)
{
p = state_v_ptr; /* grab values in bit order */
h = -1; /* no tables yet--level -1 */
w = l[-1] = 0; /* no bits decoded yet */
state.u[0] = default; /* just to keep compilers happy */
q = null; /* ditto */
z = 0; /* ditto */
/* go through the bit lengths (k already is bits in shortest code) */
for (; k <= g; k++)
{
a = state.c[k];
while (a-- > 0)
{
/* here i is the Huffman code of length k bits for value *p */
/* make tables up to required level */
while (k > w + l[h])
{
w += l[h++]; /* add bits already decoded */
/* compute minimum size table less than or equal to *m bits */
if ((z = (uint)(g - w)) > (uint)*m) /* upper limit */
z = (uint)*m;
if ((f = (uint)(1 << (int)(j = (uint)(k - w)))) > a + 1) /* try a k-w bit table */
{ /* too few codes for k-w bit table */
f -= a + 1; /* deduct codes from patterns left */
fixed (uint* state_c_ptr = state.c)
{
xp = state_c_ptr + k;
while (++j < z) /* try smaller tables up to z bits */
{
if ((f <<= 1) <= *++xp)
break; /* enough codes to use up j bits */
f -= *xp; /* else deduct codes from patterns */
}
}
}
if ((uint)w + j > el && (uint)w < el)
j = (uint)(el - w); /* make EOB code end at table */
z = (uint)(1 << (int)j); /* table entries for j-bit table */
l[h] = (int)j; /* set table size in stack */
/* allocate and link in new table */
q = (HuffmanNode*)Marshal.AllocHGlobal((int)((z + 1) * sizeof(HuffmanNode)));
*t = q + 1; /* link to list for HuffmanNode_free() */
*(t = &(*q).t) = null;
state.u[h] = ++q; /* table starts after link */
/* connect to last table, if there is one */
if (h > 0)
{
state.x[h] = i; /* save pattern for backing up */
r.b = (byte)l[h - 1]; /* bits to dump before this table */
r.e = (byte)(16 + j); /* bits in this table */
r.t = q; /* pointer to this table */
j = (uint)((i & ((1 << w) - 1)) >> (w - l[h - 1]));
state.u[h - 1][j] = r; /* connect to last table */
}
}
/* set up table entry in r */
r.b = (byte)(k - w);
fixed (uint* state_v_ptr_comp = state.v)
{
if (p >= state_v_ptr_comp + n)
{
r.e = 99; /* out of values--invalid code */
}
else if (*p < s)
{
r.e = (byte)(*p < 256 ? 16 : 15); /* 256 is end-of-block code */
r.n = (ushort)*p++; /* simple code is just the value */
}
else
{
r.e = (byte)e[*p - s]; /* non-simple--look up in lists */
r.n = d[*p++ - s];
}
}
/* fill code-like entries with r */
f = (uint)(1 << (k - w));
for (j = i >> w; j < z; j += f)
{
q[j] = r;
}
/* backwards increment the k-bit code i */
for (j = (uint)(1 << (k - 1)); (i & j) != 0; j >>= 1)
{
i ^= j;
}
i ^= j;
/* backup over finished tables */
while ((i & ((1 << w) - 1)) != state.x[h])
w -= l[--h]; /* don't need to update q */
}
}
}
/* return actual size of base table */
*m = l[0];
}
/* Return true (1) if we were given an incomplete table */
return y != 0 && g != 1 ? 1 : 0;
}
/// <summary>
/// Inflate codes into Huffman trees
/// </summary>
private static int InflateCodes(HuffmanNode* tl, HuffmanNode* td, int bl, int bd, State state, byte[] inbuf, byte[] outbuf)
{
uint e; /* table entry flag/number of extra bits */
uint n, d; /* length and index for copy */
uint w; /* current window position */
HuffmanNode* t; /* pointer to table entry */
uint ml, md; /* masks for bl and bd bits */
uint b; /* bit buffer */
uint k; /* number of bits in bit buffer */
/* make local copies of globals */
b = state.bb; /* initialize bit buffer */
k = state.bk;
w = state.window_posn; /* initialize window position */
/* inflate the coded data */
ml = BitMasks[bl]; /* precompute masks for speed */
md = BitMasks[bd];
for (; ; )
{
ZIPNEEDBITS(bl, state, ref b, ref k);
if ((e = (t = tl + (b & ml))->e) > 16)
{
do
{
if (e == 99)
return 1;
ZIPDUMPBITS(t->b, ref b, ref k);
e -= 16;
ZIPNEEDBITS((int)e, state, ref b, ref k);
} while ((e = (*(t = t->t + (b & BitMasks[e]))).e) > 16);
}
ZIPDUMPBITS(t->b, ref b, ref k);
if (e == 16) /* then it's a literal */
{
outbuf[w++] = (byte)t->n;
}
else /* it's an EOB or a length */
{
/* exit if end of block */
if (e == 15)
break;
/* get length of block to copy */
ZIPNEEDBITS((int)e, state, ref b, ref k);
n = t->n + (b & BitMasks[e]);
ZIPDUMPBITS((int)e, ref b, ref k);
/* decode distance of block to copy */
ZIPNEEDBITS(bd, state, ref b, ref k);
if ((e = (*(t = td + (b & md))).e) > 16)
do
{
if (e == 99)
return 1;
ZIPDUMPBITS(t->b, ref b, ref k);
e -= 16;
ZIPNEEDBITS((int)e, state, ref b, ref k);
} while ((e = (*(t = t->t + (b & BitMasks[e]))).e) > 16);
ZIPDUMPBITS(t->b, ref b, ref k);
ZIPNEEDBITS((int)e, state, ref b, ref k);
d = w - t->n - (b & BitMasks[e]);
ZIPDUMPBITS((int)e, ref b, ref k);
do
{
d &= ZIPWSIZE - 1;
e = ZIPWSIZE - Math.Max(d, w);
e = Math.Min(e, n);
n -= e;
do
{
outbuf[w++] = outbuf[d++];
} while (--e > 0);
} while (n > 0);
}
}
/* restore the globals from the locals */
state.window_posn = w; /* restore global window pointer */
state.bb = b; /* restore global bit buffer */
state.bk = k;
/* done */
return 0;
}
#region Macros
private static void ZIPNEEDBITS(int n, State state, ref uint bitBuffer, ref uint bitCount)
{
while (bitCount < n)
{
int c = *state.inpos++;
bitBuffer |= (uint)(c << (int)bitCount);
bitCount += 8;
}
}
private static void ZIPDUMPBITS(int n, ref uint bitBuffer, ref uint bitCount)
{
bitBuffer >>= n;
bitCount -= (uint)n;
}
#endregion
}
}

View File

@@ -0,0 +1,29 @@
namespace BurnOutSharp.Compression.MSZIP
{
public unsafe struct HuffmanNode
{
/// <summary>
/// Number of extra bits or operation
/// </summary>
public byte e;
/// <summary>
/// Number of bits in this code or subcode
/// </summary>
public byte b;
#region v
/// <summary>
/// Literal, length base, or distance base
/// </summary>
public ushort n;
/// <summary>
/// Pointer to next level of table
/// </summary>
public HuffmanNode* t;
#endregion
}
}

View File

@@ -0,0 +1,56 @@
using static BurnOutSharp.Models.Compression.MSZIP.Constants;
namespace BurnOutSharp.Compression.MSZIP
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public unsafe class State
{
/// <summary>
/// Current offset within the window
/// </summary>
public uint window_posn;
/// <summary>
/// Bit buffer
/// </summary>
public uint bb;
/// <summary>
/// Bits in bit buffer
/// </summary>
public uint bk;
/// <summary>
/// Literal/length and distance code lengths
/// </summary>
public uint[] ll = new uint[288 + 32];
/// <summary>
/// Bit length count table
/// </summary>
public uint[] c = new uint[ZIPBMAX + 1];
/// <summary>
/// Memory for l[-1..ZIPBMAX-1]
/// </summary>
public int[] lx = new int[ZIPBMAX + 1];
/// <summary>
/// Table stack
/// </summary>
public HuffmanNode*[] u = new HuffmanNode*[ZIPBMAX];
/// <summary>
/// Values in order of bit length
/// </summary>
public uint[] v = new uint[ZIPN_MAX];
/// <summary>
/// Bit offsets, then code stack
/// </summary>
public uint[] x = new uint[ZIPBMAX + 1];
/// <remarks>byte*</remarks>
public byte* inpos;
}
}

View File

@@ -0,0 +1,499 @@
using System;
using System.Linq;
using BurnOutSharp.Models.Compression.Quantum;
using BurnOutSharp.Models.MicrosoftCabinet;
namespace BurnOutSharp.Compression.Quantum
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/fdi.c"/>
/// <see href="https://github.com/wine-mirror/wine/blob/master/include/fdi.h"/>
/// <see href="http://www.russotto.net/quantumcomp.html"/>
public static class Decompressor
{
/// <summary>
/// Decompress a byte array using a given State
/// </summary>
public static int Decompress(State state, int inlen, byte[] inbuf, int outlen, byte[] outbuf)
{
int inpos = 0, outpos = 0; // inbuf[0], outbuf[0]
int window = 0; // state.Window[0]
int runsrc, rundest;
uint windowPosition = state.WindowPosition;
uint windowSize = state.WindowSize;
int extra, togo = outlen, matchLength = 0, copyLength;
byte selector, sym;
uint matchOffset = 0;
// Make local copies of state variables
uint bitBuffer = state.BitBuffer;
int bitsLeft = state.BitsLeft;
ushort H = 0xFFFF, L = 0;
// Read initial value of C
ushort C = (ushort)Q_READ_BITS(16, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
// Apply 2^x-1 mask
windowPosition &= windowSize - 1;
while (togo > 0)
{
selector = (byte)GET_SYMBOL(state.SelectorModel, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
switch (selector)
{
// Selector 0 = literal model, 64 entries, 0x00-0x3F
case 0:
sym = (byte)GET_SYMBOL(state.Model0, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 1 = literal model, 64 entries, 0x40-0x7F
case 1:
sym = (byte)GET_SYMBOL(state.Model1, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 2 = literal model, 64 entries, 0x80-0xBF
case 2:
sym = (byte)GET_SYMBOL(state.Model2, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 3 = literal model, 64 entries, 0xC0-0xFF
case 3:
sym = (byte)GET_SYMBOL(state.Model3, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 4 = fixed length of 3
case 4:
sym = (byte)GET_SYMBOL(state.Model4, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.ExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchOffset = (uint)(state.PositionSlotBases[sym] + extra + 1);
matchLength = 3;
break;
// Selector 5 = fixed length of 4
case 5:
sym = (byte)GET_SYMBOL(state.Model5, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.ExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchOffset = (uint)(state.PositionSlotBases[sym] + extra + 1);
matchLength = 4;
break;
// Selector 6 = variable length
case 6:
sym = (byte)GET_SYMBOL(state.Model6Length, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.LengthExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchLength = state.LengthBases[sym] + extra + 5;
sym = (byte)GET_SYMBOL(state.Model6Position, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.ExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchOffset = (uint)(state.PositionSlotBases[sym] + extra + 1);
break;
default:
return inpos;
}
// If this is a match
if (selector >= 4)
{
rundest = (int)(window + windowPosition);
togo -= matchLength;
// Copy any wrapped around source data
if (windowPosition >= matchOffset)
{
// No wrap
runsrc = (int)(rundest - matchOffset);
}
else
{
runsrc = (int)(rundest + (windowSize - matchOffset));
copyLength = (int)(matchOffset - windowPosition);
if (copyLength < matchLength)
{
matchLength -= copyLength;
windowPosition += (uint)copyLength;
while (copyLength-- > 0)
{
state.Window[rundest++] = state.Window[rundest++];
}
runsrc = window;
}
}
windowPosition += (uint)matchLength;
// Copy match data - no worries about destination wraps
while (matchLength-- > 0)
{
state.Window[rundest++] = state.Window[runsrc++];
// Handle wraparounds that aren't supposed to happen
if (rundest >= state.Window.Length)
rundest = 0;
if (runsrc >= state.Window.Length)
runsrc = 0;
}
}
// If we hit the end of the window, copy to the output and wrap
if (windowPosition >= state.Window.Length)
{
Array.Copy(state.Window, 0, outbuf, outpos, Math.Min(windowSize, outlen));
outpos += (int)Math.Min(windowSize, outlen);
outlen -= (int)Math.Min(windowSize, outlen);
windowPosition = 0;
}
}
if (togo > 0)
return inpos;
if (outlen > 0)
{
int sourceIndex = (int)((windowPosition == 0 ? windowSize : windowPosition) - outlen);
Array.Copy(state.Window, sourceIndex, outbuf, outpos, outlen);
}
// Cache the decompression state variables
state.BitBuffer = bitBuffer;
state.BitsLeft = bitsLeft;
state.WindowPosition = windowPosition;
return inpos;
}
/// <summary>
/// Initialize a Quantum decompressor state
/// </summary>
public static bool InitState(State state, CFFOLDER folder)
{
int window = ((ushort)folder.CompressionType >> 8) & 0x1f;
int level = ((ushort)folder.CompressionType >> 4) & 0xF;
return InitState(state, window, level);
}
/// <summary>
/// Initialize a Quantum decompressor state
/// </summary>
public static bool InitState(State state, int window, int level)
{
uint windowSize = (uint)(1 << window);
int maxSize = window * 2;
// QTM supports window sizes of 2^10 (1Kb) through 2^21 (2Mb)
// If a previously allocated window is big enough, keep it
if (window < 10 || window > 21)
return false;
// If we don't have the proper window size
if (state.ActualSize < windowSize)
state.Window = null;
// If we have no window
if (state.Window == null)
{
state.Window = new byte[windowSize];
state.ActualSize = windowSize;
}
// Set the window size and position
state.WindowSize = windowSize;
state.WindowPosition = 0;
// Initialize arithmetic coding models
state.SelectorModel = CreateModel(state.SelectorModelSymbols, 7, 0);
state.Model0 = CreateModel(state.Model0Symbols, 0x40, 0x00);
state.Model1 = CreateModel(state.Model1Symbols, 0x40, 0x40);
state.Model2 = CreateModel(state.Model2Symbols, 0x40, 0x80);
state.Model3 = CreateModel(state.Model3Symbols, 0x40, 0xC0);
// Model 4 depends on table size, ranges from 20 to 24
state.Model4 = CreateModel(state.Model4Symbols, (maxSize < 24) ? maxSize : 24, 0);
// Model 5 depends on table size, ranges from 20 to 36
state.Model5 = CreateModel(symbols: state.Model5Symbols, (maxSize < 36) ? maxSize : 36, 0);
// Model 6 Position depends on table size, ranges from 20 to 42
state.Model6Position = CreateModel(state.Model6PositionSymbols, (maxSize < 42) ? maxSize : 42, 0);
state.Model6Length = CreateModel(state.Model6LengthSymbols, 27, 0);
return true;
}
/// <summary>
/// Initialize a Quantum model that decodes symbols from s to (s + n - 1)
/// </summary>
private static Model CreateModel(ModelSymbol[] symbols, int entryCount, int initialSymbol)
{
// Set the basic values
Model model = new Model
{
TimeToReorder = 4,
Entries = entryCount,
Symbols = symbols,
};
// Clear out the look-up table
model.LookupTable = Enumerable.Repeat<ushort>(0xFFFF, model.LookupTable.Length).ToArray();
// Loop through and build the look-up table
for (ushort i = 0; i < entryCount; i++)
{
// Set up a look-up entry for symbol
model.LookupTable[i + initialSymbol] = i;
// Create the symbol in the table
model.Symbols[i] = new ModelSymbol
{
Symbol = (ushort)(i + initialSymbol),
CumulativeFrequency = (ushort)(entryCount - i),
};
}
// Set the last symbol frequency to 0
model.Symbols[entryCount] = new ModelSymbol { CumulativeFrequency = 0 };
return model;
}
/// <summary>
/// Update the Quantum model for a particular symbol
/// </summary>
private static void UpdateModel(Model model, int symbol)
{
// Update the cumulative frequency for all symbols less than the provided
for (int i = 0; i < symbol; i++)
{
model.Symbols[i].CumulativeFrequency += 8;
}
// If the first symbol still has a cumulative frequency under 3800
if (model.Symbols[0].CumulativeFrequency <= 3800)
return;
// If we have more than 1 shift left in the model
if (--model.TimeToReorder != 0)
{
// Loop through the entries from highest to lowest,
// performing the shift on the cumulative frequencies
for (int i = model.Entries - 1; i >= 0; i--)
{
// -1, not -2; the 0 entry saves this
model.Symbols[i].CumulativeFrequency >>= 1;
if (model.Symbols[i].CumulativeFrequency <= model.Symbols[i + 1].CumulativeFrequency)
model.Symbols[i].CumulativeFrequency = (ushort)(model.Symbols[i + 1].CumulativeFrequency + 1);
}
}
// If we have no shifts left in the model
else
{
// Reset the shifts left value to 50
model.TimeToReorder = 50;
// Loop through the entries setting the cumulative frequencies
for (int i = 0; i < model.Entries; i++)
{
// No -1, want to include the 0 entry
// This converts cumfreqs into frequencies, then shifts right
model.Symbols[i].CumulativeFrequency -= model.Symbols[i + 1].CumulativeFrequency;
model.Symbols[i].CumulativeFrequency++; // Avoid losing things entirely
model.Symbols[i].CumulativeFrequency >>= 1;
}
// Now sort by frequencies, decreasing order -- this must be an
// inplace selection sort, or a sort with the same (in)stability
// characteristics
for (int i = 0; i < model.Entries - 1; i++)
{
for (int j = i + 1; j < model.Entries; j++)
{
if (model.Symbols[i].CumulativeFrequency < model.Symbols[j].CumulativeFrequency)
{
var temp = model.Symbols[i];
model.Symbols[i] = model.Symbols[j];
model.Symbols[j] = temp;
}
}
}
// Then convert frequencies back to cumfreq
for (int i = model.Entries - 1; i >= 0; i--)
{
model.Symbols[i].CumulativeFrequency += model.Symbols[i + 1].CumulativeFrequency;
}
// Then update the other part of the table
for (ushort i = 0; i < model.Entries; i++)
{
model.LookupTable[model.Symbols[i].Symbol] = i;
}
}
}
// Bitstream reading macros (Quantum / normal byte order)
#region Macros
/*
* These bit access routines work by using the area beyond the MSB and the
* LSB as a free source of zeroes. This avoids having to mask any bits.
* So we have to know the bit width of the bitbuffer variable. This is
* defined as Uint_BITS.
*
* Uint_BITS should be at least 16 bits. Unlike LZX's Huffman decoding,
* Quantum's arithmetic decoding only needs 1 bit at a time, it doesn't
* need an assured number. Retrieving larger bitstrings can be done with
* multiple reads and fills of the bitbuffer. The code should work fine
* for machines where Uint >= 32 bits.
*
* Also note that Quantum reads bytes in normal order; LZX is in
* little-endian order.
*/
/// <summary>
/// Should be used first to set up the system
/// </summary>
private static void Q_INIT_BITSTREAM(out int bitsleft, out uint bitbuf)
{
bitsleft = 0;
bitbuf = 0;
}
/// <summary>
/// Adds more data to the bit buffer, if there is room for another 16 bits.
/// </summary>
private static void Q_FILL_BUFFER(byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
if (bitsleft > 8)
return;
byte b0 = inpos + 0 < inbuf.Length ? inbuf[inpos + 0] : (byte)0;
byte b1 = inpos + 1 < inbuf.Length ? inbuf[inpos + 1] : (byte)0;
bitbuf |= (uint)(((b0 << 8) | b1) << (16 - bitsleft));
bitsleft += 16;
inpos += 2;
}
/// <summary>
/// Extracts (without removing) N bits from the bit buffer
/// </summary>
private static uint Q_PEEK_BITS(int n, uint bitbuf)
{
return bitbuf >> (32 - n);
}
/// <summary>
/// Removes N bits from the bit buffer
/// </summary>
private static void Q_REMOVE_BITS(int n, ref int bitsleft, ref uint bitbuf)
{
bitbuf <<= n;
bitsleft -= n;
}
/// <summary>
/// Takes N bits from the buffer and puts them in v. Unlike LZX, this can loop
/// several times to get the requisite number of bits.
/// </summary>
private static uint Q_READ_BITS(int n, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
uint v = 0; int bitrun;
for (int bitsneed = n; bitsneed != 0; bitsneed -= bitrun)
{
Q_FILL_BUFFER(inbuf, ref inpos, ref bitsleft, ref bitbuf);
bitrun = (bitsneed > bitsleft) ? bitsleft : bitsneed;
v = (v << bitrun) | Q_PEEK_BITS(bitrun, bitbuf);
Q_REMOVE_BITS(bitrun, ref bitsleft, ref bitbuf);
}
return v;
}
/// <summary>
/// Fetches the next symbol from the stated model and puts it in symbol.
/// It may need to read the bitstream to do this.
/// </summary>
private static ushort GET_SYMBOL(Model model, ref ushort H, ref ushort L, ref ushort C, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
ushort symf = GetFrequency(model.Symbols[0].CumulativeFrequency, H, L, C);
int i;
for (i = 1; i < model.Entries; i++)
{
if (model.Symbols[i].CumulativeFrequency <= symf)
break;
}
ushort symbol = model.Symbols[i - 1].Symbol;
GetCode(model.Symbols[i - 1].CumulativeFrequency,
model.Symbols[i].CumulativeFrequency,
model.Symbols[0].CumulativeFrequency,
ref H, ref L, ref C,
inbuf, ref inpos, ref bitsleft, ref bitbuf);
UpdateModel(model, i);
return symbol;
}
/// <summary>
/// Get the frequency for a given range and total frequency
/// </summary>
private static ushort GetFrequency(ushort totalFrequency, ushort H, ushort L, ushort C)
{
uint range = (uint)(((H - L) & 0xFFFF) + 1);
uint freq = (uint)(((C - L + 1) * totalFrequency - 1) / range);
return (ushort)(freq & 0xFFFF);
}
/// <summary>
/// The decoder renormalization loop
/// </summary>
private static void GetCode(int previousFrequency,
int cumulativeFrequency,
int totalFrequency,
ref ushort H,
ref ushort L,
ref ushort C,
byte[] inbuf,
ref int inpos,
ref int bitsleft,
ref uint bitbuf)
{
uint range = (uint)((H - L) + 1);
H = (ushort)(L + ((previousFrequency * range) / totalFrequency) - 1);
L = (ushort)(L + (cumulativeFrequency * range) / totalFrequency);
while (true)
{
if ((L & 0x8000) != (H & 0x8000))
{
if ((L & 0x4000) == 0 || (H & 0x4000) != 0)
break;
// Underflow case
C ^= 0x4000;
L &= 0x3FFF;
H |= 0x4000;
}
L <<= 1;
H = (ushort)((H << 1) | 1);
C = (ushort)((C << 1) | Q_READ_BITS(1, inbuf, ref inpos, ref bitsleft, ref bitbuf));
}
}
#endregion
}
}

View File

@@ -0,0 +1,193 @@
using BurnOutSharp.Models.Compression.Quantum;
namespace BurnOutSharp.Compression.Quantum
{
/// <see href="https://github.com/kyz/libmspack/blob/master/libmspack/mspack/qtmd.c"/>
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public class State
{
/// <summary>
/// The actual decoding window
/// </summary>
public byte[] Window;
/// <summary>
/// Window size (1Kb through 2Mb)
/// </summary>
public uint WindowSize;
/// <summary>
/// Window size when it was first allocated
/// </summary>
public uint ActualSize;
/// <summary>
/// Current offset within the window
/// </summary>
public uint WindowPosition;
#region Models
/// <summary>
/// Symbol table for selector model
/// </summary>
public ModelSymbol[] SelectorModelSymbols = new ModelSymbol[7 + 1];
/// <summary>
/// Model for selector values
/// </summary>
public Model SelectorModel;
/// <summary>
/// Model for Selector 0
/// </summary>
public Model Model0;
/// <summary>
/// Model for Selector 1
/// </summary>
public Model Model1;
/// <summary>
/// Model for Selector 2
/// </summary>
public Model Model2;
/// <summary>
/// Model for Selector 3
/// </summary>
public Model Model3;
/// <summary>
/// Model for Selector 4
/// </summary>
public Model Model4;
/// <summary>
/// Model for Selector 5
/// </summary>
public Model Model5;
/// <summary>
/// Model for Selector 6 Position
/// </summary>
public Model Model6Position;
/// <summary>
/// Model for Selector 6 Length
/// </summary>
public Model Model6Length;
#endregion
#region Symbol Tables
/// <summary>
/// Symbol table for Selector 0
/// </summary>
public ModelSymbol[] Model0Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 1
/// </summary>
public ModelSymbol[] Model1Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 2
/// </summary>
public ModelSymbol[] Model2Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 3
/// </summary>
public ModelSymbol[] Model3Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 4
/// </summary>
public ModelSymbol[] Model4Symbols = new ModelSymbol[0x18 + 1];
/// <summary>
/// Symbol table for Selector 5
/// </summary>
public ModelSymbol[] Model5Symbols = new ModelSymbol[0x24 + 1];
/// <summary>
/// Symbol table for Selector 6 Position
/// </summary>
public ModelSymbol[] Model6PositionSymbols = new ModelSymbol[0x2a + 1];
/// <summary>
/// Symbol table for Selector 6 Length
/// </summary>
public ModelSymbol[] Model6LengthSymbols = new ModelSymbol[0x1b + 1];
#endregion
#region Decompression Tables
/// <summary>
/// An index to the position slot bases
/// </summary>
public uint[] PositionSlotBases = new uint[42]
{
0x00000, 0x00001, 0x00002, 0x00003, 0x00004, 0x00006, 0x00008, 0x0000c,
0x00010, 0x00018, 0x00020, 0x00030, 0x00040, 0x00060, 0x00080, 0x000c0,
0x00100, 0x00180, 0x00200, 0x00300, 0x00400, 0x00600, 0x00800, 0x00c00,
0x01000, 0x01800, 0x02000, 0x03000, 0x04000, 0x06000, 0x08000, 0x0c000,
0x10000, 0x18000, 0x20000, 0x30000, 0x40000, 0x60000, 0x80000, 0xc0000,
0x100000, 0x180000
};
/// <summary>
/// How many bits of offset-from-base data is needed
/// </summary>
public byte[] ExtraBits = new byte[42]
{
0, 0, 0, 0, 1, 1, 2, 2,
3, 3, 4, 4, 5, 5, 6, 6,
7, 7, 8, 8, 9, 9, 10, 10,
11, 11, 12, 12, 13, 13, 14, 14,
15, 15, 16, 16, 17, 17, 18, 18,
19, 19
};
/// <summary>
/// An index to the position slot bases [Selector 6]
/// </summary>
public byte[] LengthBases = new byte[27]
{
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x08,
0x0a, 0x0c, 0x0e, 0x12, 0x16, 0x1a, 0x1e, 0x26,
0x2e, 0x36, 0x3e, 0x4e, 0x5e, 0x6e, 0x7e, 0x9e,
0xbe, 0xde, 0xfe
};
/// <summary>
/// How many bits of offset-from-base data is needed [Selector 6]
/// </summary>
public byte[] LengthExtraBits = new byte[27]
{
0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 2, 2, 2, 2, 3, 3,
3, 3, 4, 4, 4, 4, 5, 5,
5, 5, 0
};
#endregion
#region Decompression State
/// <summary>
/// Bit buffer to persist between runs
/// </summary>
public uint BitBuffer = 0;
/// <summary>
/// Bits remaining to persist between runs
/// </summary>
public int BitsLeft = 0;
#endregion
}
}

View File

@@ -0,0 +1,304 @@
namespace BurnOutSharp.Compression.bzip2
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib.h"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib_private.h"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/blocksort.c"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/crctable.c"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/randtable.c"/>
public static class Constants
{
#region bzlib.h
public const int BZ_RUN = 0;
public const int BZ_FLUSH = 1;
public const int BZ_FINISH = 2;
public const int BZ_OK = 0;
public const int BZ_RUN_OK = 1;
public const int BZ_FLUSH_OK = 2;
public const int BZ_FINISH_OK = 3;
public const int BZ_STREAM_END = 4;
public const int BZ_SEQUENCE_ERROR = (-1);
public const int BZ_PARAM_ERROR = (-2);
public const int BZ_MEM_ERROR = (-3);
public const int BZ_DATA_ERROR = (-4);
public const int BZ_DATA_ERROR_MAGIC = (-5);
public const int BZ_IO_ERROR = (-6);
public const int BZ_UNEXPECTED_EOF = (-7);
public const int BZ_OUTBUFF_FULL = (-8);
public const int BZ_CONFIG_ERROR = (-9);
public const int BZ_MAX_UNUSED = 5000;
#endregion
#region bzlib_private.h
internal const string BZ_VERSION = "1.0.5, 10-Dec-2007";
/*-- Header bytes. --*/
internal const byte BZ_HDR_B = 0x42; /* 'B' */
internal const byte BZ_HDR_Z = 0x5a; /* 'Z' */
internal const byte BZ_HDR_h = 0x68; /* 'h' */
internal const byte BZ_HDR_0 = 0x30; /* '0' */
/*-- Constants for the back end. --*/
internal const int BZ_MAX_ALPHA_SIZE = 258;
internal const int BZ_MAX_CODE_LEN = 23;
internal const int BZ_RUNA = 0;
internal const int BZ_RUNB = 1;
internal const int BZ_N_GROUPS = 6;
internal const int BZ_G_SIZE = 50;
internal const int BZ_N_ITERS = 4;
internal const int BZ_MAX_SELECTORS = (2 + (900000 / BZ_G_SIZE));
/*-- States and modes for compression. --*/
internal const int BZ_M_IDLE = 1;
internal const int BZ_M_RUNNING = 2;
internal const int BZ_M_FLUSHING = 3;
internal const int BZ_M_FINISHING = 4;
internal const int BZ_S_OUTPUT = 1;
internal const int BZ_S_INPUT = 2;
internal const int BZ_N_RADIX = 2;
internal const int BZ_N_QSORT = 12;
internal const int BZ_N_SHELL = 18;
internal const int BZ_N_OVERSHOOT = (BZ_N_RADIX + BZ_N_QSORT + BZ_N_SHELL + 2);
/*-- states for decompression. --*/
internal const int BZ_X_IDLE = 1;
internal const int BZ_X_OUTPUT = 2;
internal const int BZ_X_MAGIC_1 = 10;
internal const int BZ_X_MAGIC_2 = 11;
internal const int BZ_X_MAGIC_3 = 12;
internal const int BZ_X_MAGIC_4 = 13;
internal const int BZ_X_BLKHDR_1 = 14;
internal const int BZ_X_BLKHDR_2 = 15;
internal const int BZ_X_BLKHDR_3 = 16;
internal const int BZ_X_BLKHDR_4 = 17;
internal const int BZ_X_BLKHDR_5 = 18;
internal const int BZ_X_BLKHDR_6 = 19;
internal const int BZ_X_BCRC_1 = 20;
internal const int BZ_X_BCRC_2 = 21;
internal const int BZ_X_BCRC_3 = 22;
internal const int BZ_X_BCRC_4 = 23;
internal const int BZ_X_RANDBIT = 24;
internal const int BZ_X_ORIGPTR_1 = 25;
internal const int BZ_X_ORIGPTR_2 = 26;
internal const int BZ_X_ORIGPTR_3 = 27;
internal const int BZ_X_MAPPING_1 = 28;
internal const int BZ_X_MAPPING_2 = 29;
internal const int BZ_X_SELECTOR_1 = 30;
internal const int BZ_X_SELECTOR_2 = 31;
internal const int BZ_X_SELECTOR_3 = 32;
internal const int BZ_X_CODING_1 = 33;
internal const int BZ_X_CODING_2 = 34;
internal const int BZ_X_CODING_3 = 35;
internal const int BZ_X_MTF_1 = 36;
internal const int BZ_X_MTF_2 = 37;
internal const int BZ_X_MTF_3 = 38;
internal const int BZ_X_MTF_4 = 39;
internal const int BZ_X_MTF_5 = 40;
internal const int BZ_X_MTF_6 = 41;
internal const int BZ_X_ENDHDR_2 = 42;
internal const int BZ_X_ENDHDR_3 = 43;
internal const int BZ_X_ENDHDR_4 = 44;
internal const int BZ_X_ENDHDR_5 = 45;
internal const int BZ_X_ENDHDR_6 = 46;
internal const int BZ_X_CCRC_1 = 47;
internal const int BZ_X_CCRC_2 = 48;
internal const int BZ_X_CCRC_3 = 49;
internal const int BZ_X_CCRC_4 = 50;
/*-- Constants for the fast MTF decoder. --*/
internal const int MTFA_SIZE = 4096;
internal const int MTFL_SIZE = 16;
#endregion
#region blocksort.c
internal const int FALLBACK_QSORT_SMALL_THRESH = 10;
internal const int FALLBACK_QSORT_STACK_SIZE = 100;
/*--
Knuth's increments seem to work better
than Incerpi-Sedgewick here. Possibly
because the number of elems to sort is
usually small, typically <= 20.
--*/
internal static readonly int[] incs = new int[14]
{
1, 4, 13, 40, 121, 364, 1093, 3280,
9841, 29524, 88573, 265720, 797161, 2391484
};
/*--
The following is an implementation of
an elegant 3-way quicksort for strings,
described in a paper "Fast Algorithms for
Sorting and Searching Strings", by Robert
Sedgewick and Jon L. Bentley.
--*/
internal const int MAIN_QSORT_SMALL_THRESH = 20;
internal const int MAIN_QSORT_DEPTH_THRESH = (BZ_N_RADIX + BZ_N_QSORT);
internal const int MAIN_QSORT_STACK_SIZE = 100;
internal const uint SETMASK = 1 << 21;
internal const uint CLEARMASK = ~SETMASK;
#endregion
#region crctable.c
/// <summary>
/// Table for doing CRCs
/// </summary>
internal static readonly uint[] BZ2_crc32Table = new uint[256]
{
0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9,
0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005,
0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd,
0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9,
0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75,
0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011,
0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd,
0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039,
0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5,
0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81,
0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49,
0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95,
0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1,
0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d,
0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae,
0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072,
0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16,
0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca,
0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde,
0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02,
0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066,
0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba,
0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e,
0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692,
0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a,
0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e,
0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2,
0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686,
0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a,
0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637,
0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb,
0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f,
0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53,
0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47,
0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b,
0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff,
0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623,
0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7,
0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f,
0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3,
0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7,
0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b,
0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f,
0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3,
0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640,
0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c,
0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8,
0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24,
0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30,
0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec,
0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088,
0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654,
0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c,
0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18,
0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4,
0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0,
0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c,
0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668,
0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4
};
#endregion
#region randtable.c
/// <summary>
/// Table for randomising repetitive blocks
/// </summary>
internal static readonly int[] BZ2_rNums = new int[512]
{
619, 720, 127, 481, 931, 816, 813, 233, 566, 247,
985, 724, 205, 454, 863, 491, 741, 242, 949, 214,
733, 859, 335, 708, 621, 574, 73, 654, 730, 472,
419, 436, 278, 496, 867, 210, 399, 680, 480, 51,
878, 465, 811, 169, 869, 675, 611, 697, 867, 561,
862, 687, 507, 283, 482, 129, 807, 591, 733, 623,
150, 238, 59, 379, 684, 877, 625, 169, 643, 105,
170, 607, 520, 932, 727, 476, 693, 425, 174, 647,
73, 122, 335, 530, 442, 853, 695, 249, 445, 515,
909, 545, 703, 919, 874, 474, 882, 500, 594, 612,
641, 801, 220, 162, 819, 984, 589, 513, 495, 799,
161, 604, 958, 533, 221, 400, 386, 867, 600, 782,
382, 596, 414, 171, 516, 375, 682, 485, 911, 276,
98, 553, 163, 354, 666, 933, 424, 341, 533, 870,
227, 730, 475, 186, 263, 647, 537, 686, 600, 224,
469, 68, 770, 919, 190, 373, 294, 822, 808, 206,
184, 943, 795, 384, 383, 461, 404, 758, 839, 887,
715, 67, 618, 276, 204, 918, 873, 777, 604, 560,
951, 160, 578, 722, 79, 804, 96, 409, 713, 940,
652, 934, 970, 447, 318, 353, 859, 672, 112, 785,
645, 863, 803, 350, 139, 93, 354, 99, 820, 908,
609, 772, 154, 274, 580, 184, 79, 626, 630, 742,
653, 282, 762, 623, 680, 81, 927, 626, 789, 125,
411, 521, 938, 300, 821, 78, 343, 175, 128, 250,
170, 774, 972, 275, 999, 639, 495, 78, 352, 126,
857, 956, 358, 619, 580, 124, 737, 594, 701, 612,
669, 112, 134, 694, 363, 992, 809, 743, 168, 974,
944, 375, 748, 52, 600, 747, 642, 182, 862, 81,
344, 805, 988, 739, 511, 655, 814, 334, 249, 515,
897, 955, 664, 981, 649, 113, 974, 459, 893, 228,
433, 837, 553, 268, 926, 240, 102, 654, 459, 51,
686, 754, 806, 760, 493, 403, 415, 394, 687, 700,
946, 670, 656, 610, 738, 392, 760, 799, 887, 653,
978, 321, 576, 617, 626, 502, 894, 679, 243, 440,
680, 879, 194, 572, 640, 724, 926, 56, 204, 700,
707, 151, 457, 449, 797, 195, 791, 558, 945, 679,
297, 59, 87, 824, 713, 663, 412, 693, 342, 606,
134, 108, 571, 364, 631, 212, 174, 643, 304, 329,
343, 97, 430, 751, 497, 314, 983, 374, 822, 928,
140, 206, 73, 263, 980, 736, 876, 478, 430, 305,
170, 514, 364, 692, 829, 82, 855, 953, 676, 246,
369, 970, 294, 750, 807, 827, 150, 790, 288, 923,
804, 378, 215, 828, 592, 281, 565, 555, 710, 82,
896, 831, 547, 261, 524, 462, 293, 465, 502, 56,
661, 821, 976, 991, 658, 869, 905, 758, 745, 193,
768, 550, 608, 933, 378, 286, 215, 979, 792, 961,
61, 688, 793, 644, 986, 403, 106, 366, 905, 644,
372, 567, 466, 434, 645, 210, 389, 550, 919, 135,
780, 773, 635, 389, 707, 100, 626, 958, 165, 504,
920, 176, 193, 713, 857, 265, 203, 50, 668, 108,
645, 990, 626, 197, 510, 357, 358, 850, 858, 364,
936, 638
};
#endregion
}
}

View File

@@ -0,0 +1,100 @@
using static BurnOutSharp.Compression.bzip2.Constants;
namespace BurnOutSharp.Compression.bzip2
{
/// <summary>
/// Structure holding all the decompression-side stuff.
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib_private.h"/>
internal unsafe class DState
{
/* pointer back to the struct bz_stream */
public bz_stream strm;
/* state indicator for this stream */
public int state;
/* for doing the final run-length decoding */
public byte state_out_ch;
public int state_out_len;
public bool blockRandomised;
public int rNToGo;
public int rTPos;
/* the buffer for bit stream reading */
public uint bsBuff;
public int bsLive;
/* misc administratium */
public int blockSize100k;
public bool smallDecompress;
public int currBlockNo;
public int verbosity;
/* for undoing the Burrows-Wheeler transform */
public int origPtr;
public uint tPos;
public int k0;
public int[] unzftab = new int[256];
public int nblock_used;
public int[] cftab = new int[257];
public int[] cftabCopy = new int[257];
/* for undoing the Burrows-Wheeler transform (FAST) */
public uint* tt;
/* for undoing the Burrows-Wheeler transform (SMALL) */
public ushort* ll16;
public byte* ll4;
/* stored and calculated CRCs */
public uint storedBlockCRC;
public uint storedCombinedCRC;
public uint calculatedBlockCRC;
public uint calculatedCombinedCRC;
/* map of bytes used in block */
public int nInUse;
public bool[] inUse = new bool[256];
public bool[] inUse16 = new bool[16];
public byte[] seqToUnseq = new byte[256];
/* for decoding the MTF values */
public byte[] mtfa = new byte[MTFA_SIZE];
public int[] mtfbase = new int[256 / MTFL_SIZE];
public byte[] selector = new byte[BZ_MAX_SELECTORS];
public byte[] selectorMtf = new byte[BZ_MAX_SELECTORS];
public byte[,] len = new byte[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] limit = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] @base = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] perm = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[] minLens = new int[BZ_N_GROUPS];
/* save area for scalars in the main decompress code */
public int save_i;
public int save_j;
public int save_t;
public int save_alphaSize;
public int save_nGroups;
public int save_nSelectors;
public int save_EOB;
public int save_groupNo;
public int save_groupPos;
public int save_nextSym;
public int save_nblockMAX;
public int save_nblock;
public int save_es;
public int save_N;
public int save_curr;
public int save_zt;
public int save_zn;
public int save_zvec;
public int save_zj;
public int save_gSel;
public int save_gMinlen;
public int* save_gLimit;
public int* save_gBase;
public int* save_gPerm;
}
}

View File

@@ -0,0 +1,80 @@
using static BurnOutSharp.Compression.bzip2.Constants;
namespace BurnOutSharp.Compression.bzip2
{
/// <summary>
/// Structure holding all the compression-side stuff.
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib_private.h"/>
internal unsafe class EState
{
/* pointer back to the struct bz_stream */
public bz_stream* strm;
/* mode this stream is in, and whether inputting */
/* or outputting data */
public int mode;
public int state;
/* remembers avail_in when flush/finish requested */
public uint avail_in_expect;
/* for doing the block sorting */
public uint* arr1;
public uint* arr2;
public uint* ftab;
public int origPtr;
/* aliases for arr1 and arr2 */
public uint* ptr;
public byte* block;
public ushort* mtfv;
public byte* zbits;
/* for deciding when to use the fallback sorting algorithm */
public int workFactor;
/* run-length-encoding of the input */
public uint state_in_ch;
public int state_in_len;
public int rNToGo;
public int rTPos;
/* input and output limits and current posns */
public int nblock;
public int nblockMAX;
public int numZ;
public int state_out_pos;
/* map of bytes used in block */
public int nInUse;
public bool[] inUse = new bool[256];
public byte[] unseqToSeq = new byte[256];
/* the buffer for bit stream creation */
public uint bsBuff;
public int bsLive;
/* block and combined CRCs */
public uint blockCRC;
public uint combinedCRC;
/* misc administratium */
public int verbosity;
public int blockNo;
public int blockSize100k;
/* stuff for coding the MTF values */
public int nMTF;
public int[] mtfFreq = new int[BZ_MAX_ALPHA_SIZE];
public byte[] selector = new byte[BZ_MAX_SELECTORS];
public byte[] selectorMtf = new byte[BZ_MAX_SELECTORS];
public byte[,] len = new byte[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] code = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] rfreq = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
/* second dimension: only 3 needed; 4 makes index calculations faster */
public uint[,] len_pack = new uint[BZ_MAX_ALPHA_SIZE, 4];
}
}

View File

@@ -0,0 +1,217 @@
using static BurnOutSharp.Compression.bzip2.Constants;
namespace BurnOutSharp.Compression.bzip2
{
/// <summary>
/// Huffman coding low-level stuff
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/huffman.c"/>
internal static unsafe class Huffman
{
public static void BZ2_hbMakeCodeLengths(byte* len, int* freq, int alphaSize, int maxLen)
{
/*--
Nodes and heap entries run from 1. Entry 0
for both the heap and nodes is a sentinel.
--*/
int nNodes, nHeap, n1, n2, i, j, k;
bool tooLong;
int[] heap = new int[BZ_MAX_ALPHA_SIZE + 2];
int[] weight = new int[BZ_MAX_ALPHA_SIZE * 2];
int[] parent = new int[BZ_MAX_ALPHA_SIZE * 2];
for (i = 0; i < alphaSize; i++)
{
weight[i + 1] = (freq[i] == 0 ? 1 : freq[i]) << 8;
}
while (true)
{
nNodes = alphaSize;
nHeap = 0;
heap[0] = 0;
weight[0] = 0;
parent[0] = -2;
for (i = 1; i <= alphaSize; i++)
{
parent[i] = -1;
nHeap++;
heap[nHeap] = i;
UPHEAP(nHeap, heap, weight);
}
//AssertH(nHeap < (BZ_MAX_ALPHA_SIZE + 2), 2001);
while (nHeap > 1)
{
n1 = heap[1]; heap[1] = heap[nHeap]; nHeap--; DOWNHEAP(1, nHeap, heap, weight);
n2 = heap[1]; heap[1] = heap[nHeap]; nHeap--; DOWNHEAP(1, nHeap, heap, weight);
nNodes++;
parent[n1] = parent[n2] = nNodes;
weight[nNodes] = ADDWEIGHTS(weight[n1], weight[n2]);
parent[nNodes] = -1;
nHeap++;
heap[nHeap] = nNodes;
UPHEAP(nHeap, heap, weight);
}
//AssertH(nNodes < (BZ_MAX_ALPHA_SIZE * 2), 2002);
tooLong = false;
for (i = 1; i <= alphaSize; i++)
{
j = 0;
k = i;
while (parent[k] >= 0) { k = parent[k]; j++; }
len[i - 1] = (byte)j;
if (j > maxLen) tooLong = true;
}
if (!tooLong) break;
/* 17 Oct 04: keep-going condition for the following loop used
to be 'i < alphaSize', which missed the last element,
theoretically leading to the possibility of the compressor
looping. However, this count-scaling step is only needed if
one of the generated Huffman code words is longer than
maxLen, which up to and including version 1.0.2 was 20 bits,
which is extremely unlikely. In version 1.0.3 maxLen was
changed to 17 bits, which has minimal effect on compression
ratio, but does mean this scaling step is used from time to
time, enough to verify that it works.
This means that bzip2-1.0.3 and later will only produce
Huffman codes with a maximum length of 17 bits. However, in
order to preserve backwards compatibility with bitstreams
produced by versions pre-1.0.3, the decompressor must still
handle lengths of up to 20. */
for (i = 1; i <= alphaSize; i++)
{
j = weight[i] >> 8;
j = 1 + (j / 2);
weight[i] = j << 8;
}
}
}
public static void BZ2_hbAssignCodes(int* code, byte* length, int minLen, int maxLen, int alphaSize)
{
int n, vec, i;
vec = 0;
for (n = minLen; n <= maxLen; n++)
{
for (i = 0; i < alphaSize; i++)
{
if (length[i] == n)
{
code[i] = vec;
vec++;
}
};
vec <<= 1;
}
}
public static void BZ2_hbCreateDecodeTables(int* limit, int* @base, int* perm, byte* length, int minLen, int maxLen, int alphaSize)
{
int pp, i, j, vec;
pp = 0;
for (i = minLen; i <= maxLen; i++)
{
for (j = 0; j < alphaSize; j++)
{
if (length[j] == i) { perm[pp] = j; pp++; }
}
};
for (i = 0; i < BZ_MAX_CODE_LEN; i++)
{
@base[i] = 0;
}
for (i = 0; i < alphaSize; i++)
{
@base[length[i] + 1]++;
}
for (i = 1; i < BZ_MAX_CODE_LEN; i++)
{
@base[i] += @base[i - 1];
}
for (i = 0; i < BZ_MAX_CODE_LEN; i++)
{
limit[i] = 0;
}
vec = 0;
for (i = minLen; i <= maxLen; i++)
{
vec += (@base[i + 1] - @base[i]);
limit[i] = vec - 1;
vec <<= 1;
}
for (i = minLen + 1; i <= maxLen; i++)
{
@base[i] = ((limit[i - 1] + 1) << 1) - @base[i];
}
}
#region Macros
private static int WEIGHTOF(int zz0) => (int)(zz0 & 0xffffff00);
private static int DEPTHOF(int zz1) => zz1 & 0x000000ff;
private static int MYMAX(int zz2, int zz3) => zz2 > zz3 ? zz2 : zz3;
private static int ADDWEIGHTS(int zw1, int zw2) => (WEIGHTOF(zw1) + WEIGHTOF(zw2)) | (1 + MYMAX(DEPTHOF(zw1), DEPTHOF(zw2)));
private static void UPHEAP(int z, int[] heap, int[] weight)
{
int zz, tmp;
zz = z; tmp = heap[zz];
while (weight[tmp] < weight[heap[zz >> 1]])
{
heap[zz] = heap[zz >> 1];
zz >>= 1;
}
heap[zz] = tmp;
}
private static void DOWNHEAP(int z, int nHeap, int[] heap, int[] weight)
{
int zz, yy, tmp;
zz = z; tmp = heap[zz];
while (true)
{
yy = zz << 1;
if (yy > nHeap)
break;
if (yy < nHeap && weight[heap[yy + 1]] < weight[heap[yy]])
yy++;
if (weight[tmp] < weight[heap[yy]])
break;
heap[zz] = heap[yy];
zz = yy;
}
heap[zz] = tmp;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,22 @@
namespace BurnOutSharp.Compression.bzip2
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib.h"/>
public unsafe struct bz_stream
{
public char* next_in;
public uint avail_in;
public uint total_in_lo32;
public uint total_in_hi32;
public char* next_out;
public uint avail_out;
public uint total_out_lo32;
public uint total_out_hi32;
public void* state;
// void *(*bzalloc)(void *,int,int);
// void (*bzfree)(void *,void *);
// void *opaque;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>netstandard2.0;net6.0</TargetFrameworks>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BurnOutSharp.Matching</Title>
<AssemblyName>BurnOutSharp.Matching</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2018-2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.5</Version>
<AssemblyVersion>2.5</AssemblyVersion>
<FileVersion>2.5</FileVersion>
<Version>2.6</Version>
<AssemblyVersion>2.6</AssemblyVersion>
<FileVersion>2.6</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>

View File

@@ -0,0 +1,96 @@
using System.Collections.Generic;
using System.Linq;
namespace BurnOutSharp.Matching
{
public static class Extensions
{
/// <summary>
/// Find all positions of one array in another, if possible, if possible
/// </summary>
public static List<int> FindAllPositions(this byte[] stack, byte?[] needle, int start = 0, int end = -1)
{
// Get the outgoing list
List<int> positions = new List<int>();
// Initialize the loop variables
bool found = true;
int lastPosition = start;
var matcher = new ContentMatch(needle, end: end);
// Loop over and get all positions
while (found)
{
matcher.Start = lastPosition;
(found, lastPosition) = matcher.Match(stack, false);
if (found)
positions.Add(lastPosition);
}
return positions;
}
/// <summary>
/// Find the first position of one array in another, if possible
/// </summary>
public static bool FirstPosition(this byte[] stack, byte[] needle, out int position, int start = 0, int end = -1)
{
byte?[] nullableNeedle = needle != null ? needle.Select(b => (byte?)b).ToArray() : null;
return stack.FirstPosition(nullableNeedle, out position, start, end);
}
/// <summary>
/// Find the first position of one array in another, if possible
/// </summary>
public static bool FirstPosition(this byte[] stack, byte?[] needle, out int position, int start = 0, int end = -1)
{
var matcher = new ContentMatch(needle, start, end);
(bool found, int foundPosition) = matcher.Match(stack, false);
position = foundPosition;
return found;
}
/// <summary>
/// Find the last position of one array in another, if possible
/// </summary>
public static bool LastPosition(this byte[] stack, byte?[] needle, out int position, int start = 0, int end = -1)
{
var matcher = new ContentMatch(needle, start, end);
(bool found, int foundPosition) = matcher.Match(stack, true);
position = foundPosition;
return found;
}
/// <summary>
/// See if a byte array starts with another
/// </summary>
public static bool StartsWith(this byte[] stack, byte[] needle)
{
return stack.FirstPosition(needle, out int _, start: 0, end: 1);
}
/// <summary>
/// See if a byte array starts with another
/// </summary>
public static bool StartsWith(this byte[] stack, byte?[] needle)
{
return stack.FirstPosition(needle, out int _, start: 0, end: 1);
}
/// <summary>
/// See if a byte array ends with another
/// </summary>
public static bool EndsWith(this byte[] stack, byte[] needle)
{
return stack.FirstPosition(needle, out int _, start: stack.Length - needle.Length);
}
/// <summary>
/// See if a byte array ends with another
/// </summary>
public static bool EndsWith(this byte[] stack, byte?[] needle)
{
return stack.FirstPosition(needle, out int _, start: stack.Length - needle.Length);
}
}
}

View File

@@ -0,0 +1,19 @@
namespace BurnOutSharp.Models.BFPK
{
/// <summary>
/// BFPK custom archive format
/// </summary>
/// <see cref="https://forum.xentax.com/viewtopic.php?t=5102"/>
public sealed class Archive
{
/// <summary>
/// Header
/// </summary>
public Header Header { get; set; }
/// <summary>
/// Files
/// </summary>
public FileEntry[] Files { get; set; }
}
}

View File

@@ -0,0 +1,11 @@
namespace BurnOutSharp.Models.BFPK
{
public static class Constants
{
public static readonly byte[] SignatureBytes = new byte[] { 0x42, 0x46, 0x50, 0x4b };
public const string SignatureString = "BFPK";
public const uint SignatureUInt32 = 0x4b504642;
}
}

View File

@@ -0,0 +1,34 @@
namespace BurnOutSharp.Models.BFPK
{
/// <summary>
/// File entry
/// </summary>
/// <see cref="https://forum.xentax.com/viewtopic.php?t=5102"/>
public sealed class FileEntry
{
/// <summary>
/// Name size
/// </summary>
public int NameSize;
/// <summary>
/// Name
/// </summary>
public string Name;
/// <summary>
/// Uncompressed size
/// </summary>
public int UncompressedSize;
/// <summary>
/// Offset
/// </summary>
public int Offset;
/// <summary>
/// Compressed size
/// </summary>
public int CompressedSize;
}
}

View File

@@ -0,0 +1,27 @@
using System.Runtime.InteropServices;
namespace BurnOutSharp.Models.BFPK
{
/// <summary>
/// Header
/// </summary>
/// <see cref="https://forum.xentax.com/viewtopic.php?t=5102"/>
[StructLayout(LayoutKind.Sequential)]
public sealed class Header
{
/// <summary>
/// "BFPK"
/// </summary>
public string Magic;
/// <summary>
/// Version
/// </summary>
public int Version;
/// <summary>
/// Files
/// </summary>
public int Files;
}
}

View File

@@ -0,0 +1,35 @@
namespace BurnOutSharp.Models.BMP
{
/// <summary>
/// The BITMAPFILEHEADER structure contains information about the type, size,
/// and layout of a file that contains a DIB.
/// </summary>
/// <see href="https://learn.microsoft.com/en-us/windows/win32/api/wingdi/ns-wingdi-bitmapfileheader"/>
public sealed class BITMAPFILEHEADER
{
/// <summary>
/// The file type; must be BM.
/// </summary>
public ushort Type;
/// <summary>
/// The size, in bytes, of the bitmap file.
/// </summary>
public uint Size;
/// <summary>
/// Reserved; must be zero.
/// </summary>
public ushort Reserved1;
/// <summary>
/// Reserved; must be zero.
/// </summary>
public ushort Reserved2;
/// <summary>
/// The offset, in bytes, from the beginning of the BITMAPFILEHEADER structure to the bitmap bits.
/// </summary>
public uint OffBits;
}
}

View File

@@ -0,0 +1,94 @@
namespace BurnOutSharp.Models.BMP
{
/// <summary>
/// The BITMAPINFOHEADER structure contains information about the dimensions and
/// color format of a device-independent bitmap (DIB).
/// </summary>
public sealed class BITMAPINFOHEADER
{
/// <summary>
/// Specifies the number of bytes required by the structure. This value does
/// not include the size of the color table or the size of the color masks,
/// if they are appended to the end of structure.
/// </summary>
public uint Size;
/// <summary>
/// Specifies the width of the bitmap, in pixels.
/// </summary>
public int Width;
/// <summary>
/// Specifies the height of the bitmap, in pixels.
/// - For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is a
/// bottom-up DIB with the origin at the lower left corner. If biHeight is
/// negative, the bitmap is a top-down DIB with the origin at the upper left
/// corner.
/// - For YUV bitmaps, the bitmap is always top-down, regardless of the sign of
/// biHeight. Decoders should offer YUV formats with positive biHeight, but for
/// backward compatibility they should accept YUV formats with either positive
/// or negative biHeight.
/// - For compressed formats, biHeight must be positive, regardless of image orientation.
/// </summary>
public int Height;
/// <summary>
/// Specifies the number of planes for the target device. This value must be set to 1.
/// </summary>
public ushort Planes;
/// <summary>
/// Specifies the number of bits per pixel (bpp). For uncompressed formats, this value
/// is the average number of bits per pixel. For compressed formats, this value is the
/// implied bit depth of the uncompressed image, after the image has been decoded.
/// </summary>
public ushort BitCount;
/// <summary>
/// For compressed video and YUV formats, this member is a FOURCC code, specified as a
/// DWORD in little-endian order. For example, YUYV video has the FOURCC 'VYUY' or
/// 0x56595559. For more information, see FOURCC Codes.
///
/// For uncompressed RGB formats, the following values are possible:
/// - BI_RGB: Uncompressed RGB.
/// - BI_BITFIELDS: Uncompressed RGB with color masks. Valid for 16-bpp and 32-bpp bitmaps.
///
/// Note that BI_JPG and BI_PNG are not valid video formats.
///
/// For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is always RGB 555.
/// If biCompression equals BI_BITFIELDS, the format is either RGB 555 or RGB 565. Use
/// the subtype GUID in the AM_MEDIA_TYPE structure to determine the specific RGB type.
/// </summary>
public uint Compression;
/// <summary>
/// Specifies the size, in bytes, of the image. This can be set to 0 for uncompressed
/// RGB bitmaps.
/// </summary>
public uint SizeImage;
/// <summary>
/// Specifies the horizontal resolution, in pixels per meter, of the target device for
/// the bitmap.
/// </summary>
public int XPelsPerMeter;
/// <summary>
/// Specifies the vertical resolution, in pixels per meter, of the target device for
/// the bitmap.
/// </summary>
public int YPelsPerMeter;
/// <summary>
/// Specifies the number of color indices in the color table that are actually used by
/// the bitmap.
/// </summary>
public uint ClrUsed;
/// <summary>
/// Specifies the number of color indices that are considered important for displaying
/// the bitmap. If this value is zero, all colors are important.
/// </summary>
public uint ClrImportant;
}
}

View File

@@ -0,0 +1,25 @@
namespace BurnOutSharp.Models.BSP
{
public static class Constants
{
/// <summary>
/// Number of lumps in a BSP
/// </summary>
public const int HL_BSP_LUMP_COUNT = 15;
/// <summary>
/// Index for the entities lump
/// </summary>
public const int HL_BSP_LUMP_ENTITIES = 0;
/// <summary>
/// Index for the texture data lump
/// </summary>
public const int HL_BSP_LUMP_TEXTUREDATA = 2;
/// <summary>
/// Number of valid mipmap levels
/// </summary>
public const int HL_BSP_MIPMAP_COUNT = 4;
}
}

View File

@@ -0,0 +1,29 @@
namespace BurnOutSharp.Models.BSP
{
/// <summary>
/// Half-Life Level
/// </summary>
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/BSPFile.h"/>
public sealed class File
{
/// <summary>
/// Header data
/// </summary>
public Header Header { get; set; }
/// <summary>
/// Lumps
/// </summary>
public Lump[] Lumps { get; set; }
/// <summary>
/// Texture header data
/// </summary>
public TextureHeader TextureHeader { get; set; }
/// <summary>
/// Textures
/// </summary>
public Texture[] Textures { get; set; }
}
}

View File

@@ -0,0 +1,11 @@
namespace BurnOutSharp.Models.BSP
{
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/BSPFile.h"/>
public sealed class Header
{
/// <summary>
/// Version
/// </summary>
public uint Version;
}
}

View File

@@ -0,0 +1,16 @@
namespace BurnOutSharp.Models.BSP
{
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/BSPFile.h"/>
public sealed class Lump
{
/// <summary>
/// Offset
/// </summary>
public uint Offset;
/// <summary>
/// Length
/// </summary>
public uint Length;
}
}

View File

@@ -0,0 +1,41 @@
namespace BurnOutSharp.Models.BSP
{
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/BSPFile.h"/>
public sealed class Texture
{
/// <summary>
/// Name
/// </summary>
public string Name;
/// <summary>
/// Width
/// </summary>
public uint Width;
/// <summary>
/// Height
/// </summary>
public uint Height;
/// <summary>
/// Offsets
/// </summary>
public uint[] Offsets;
/// <summary>
/// Texture data
/// </summary>
public byte[] TextureData;
/// <summary>
/// Palette size
/// </summary>
public uint PaletteSize;
/// <summary>
/// Palette data
/// </summary>
public byte[] PaletteData;
}
}

View File

@@ -0,0 +1,16 @@
namespace BurnOutSharp.Models.BSP
{
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/BSPFile.h"/>
public sealed class TextureHeader
{
/// <summary>
/// Texture count
/// </summary>
public uint TextureCount;
/// <summary>
/// Offsets
/// </summary>
public uint[] Offsets;
}
}

View File

@@ -1,16 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>netstandard2.0;net6.0</TargetFrameworks>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BurnOutSharp.Models</Title>
<AssemblyName>BurnOutSharp.Models</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.5</Version>
<AssemblyVersion>2.5</AssemblyVersion>
<FileVersion>2.5</FileVersion>
<Version>2.6</Version>
<AssemblyVersion>2.6</AssemblyVersion>
<FileVersion>2.6</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>

View File

@@ -0,0 +1,23 @@
namespace BurnOutSharp.Models.Compression.LZ
{
public static class Constants
{
public const int GETLEN = 2048;
public const int LZ_MAGIC_LEN = 8;
public const int LZ_HEADER_LEN = 14;
public static readonly byte[] MagicBytes = new byte[] { 0x53, 0x5a, 0x44, 0x44, 0x88, 0xf0, 0x27, 0x33 };
public static readonly string MagicString = System.Text.Encoding.ASCII.GetString(MagicBytes);
public const ulong MagicUInt64 = 0x3327f08844445a53;
public const int LZ_TABLE_SIZE = 0x1000;
public const int MAX_LZSTATES = 16;
public const int LZ_MIN_HANDLE = 0x400;
}
}

View File

@@ -0,0 +1,17 @@
namespace BurnOutSharp.Models.Compression.LZ
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/include/lzexpand.h"/>
public enum LZERROR
{
LZERROR_OK = 1,
LZERROR_NOT_LZ = 0,
LZERROR_BADINHANDLE = -1,
LZERROR_BADOUTHANDLE = -2,
LZERROR_READ = -3,
LZERROR_WRITE = -4,
LZERROR_GLOBALLOC = -5,
LZERROR_GLOBLOCK = -6,
LZERROR_BADVALUE = -7,
LZERROR_UNKNOWNALG = -8,
}
}

View File

@@ -0,0 +1,17 @@
namespace BurnOutSharp.Models.Compression.LZ
{
/// <summary>
/// Format of first 14 byte of LZ compressed file
/// </summary>
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/kernel32/lzexpand.c"/>
public sealed class FileHeaader
{
public string Magic;
public byte CompressionType;
public char LastChar;
public uint RealLength;
}
}

View File

@@ -0,0 +1,72 @@
using System.IO;
namespace BurnOutSharp.Models.Compression.LZ
{
public sealed class State
{
/// <summary>
/// Internal backing stream
/// </summary>
public Stream Source { get; set; }
/// <summary>
/// The last char of the filename for replacement
/// </summary>
public char LastChar { get; set; }
/// <summary>
/// Decompressed length of the file
/// </summary>
public uint RealLength { get; set; }
/// <summary>
/// Position the decompressor currently is
/// </summary>
public uint RealCurrent { get; set; }
/// <summary>
/// Position the user wants to read from
/// </summary>
public uint RealWanted { get; set; }
/// <summary>
/// The rotating LZ table
/// </summary>
public byte[] Table { get; set; }
/// <summary>
/// CURrent TABle ENTry
/// </summary>
public uint CurrentTableEntry { get; set; }
/// <summary>
/// Length and position of current string
/// </summary>
public byte StringLength { get; set; }
/// <summary>
/// From stringtable
/// </summary>
public uint StringPosition { get; set; }
/// <summary>
/// Bitmask within blocks
/// </summary>
public ushort ByteType { get; set; }
/// <summary>
/// GETLEN bytes
/// </summary>
public byte[] Window { get; set; }
/// <summary>
/// Current read
/// </summary>
public uint WindowCurrent { get; set; }
/// <summary>
/// Length last got
/// </summary>
public uint WindowLength { get; set; }
}
}

View File

@@ -0,0 +1,61 @@
namespace BurnOutSharp.Models.Compression.LZX
{
/// <summary>
/// An aligned offset block is identical to the verbatim block except for the presence of the aligned offset
/// tree preceding the other trees.
/// </summary>
/// <see href="https://interoperability.blob.core.windows.net/files/MS-PATCH/%5bMS-PATCH%5d.pdf"/>
public class AlignedOffsetBlock
{
/// <summary>
/// Generic block header
/// </summary>
public BlockHeader Header;
/// <summary>
/// Aligned offset tree
/// </summary>
/// <remarks>8 elements, 3 bits each</remarks>
public byte[] AlignedOffsetTree;
/// <summary>
/// Pretree for first 256 elements of main tree
/// </summary>
/// <remarks>20 elements, 4 bits each</remarks>
public byte[] PretreeFirst256;
/// <summary>
/// Path lengths of first 256 elements of main tree
/// </summary>
/// <remarks>Encoded using pretree</remarks>
public int[] PathLengthsFirst256;
/// <summary>
/// Pretree for remainder of main tree
/// </summary>
/// <remarks>20 elements, 4 bits each</remarks>
public byte[] PretreeRemainder;
/// <summary>
/// Path lengths of remaining elements of main tree
/// </summary>
/// <remarks>Encoded using pretree</remarks>
public int[] PathLengthsRemainder;
/// <summary>
/// Pretree for length tree
/// </summary>
/// <remarks>20 elements, 4 bits each</remarks>
public byte[] PretreeLengthTree;
/// <summary>
/// Path lengths of elements in length tree
/// </summary>
/// <remarks>Encoded using pretree</remarks>
public int[] PathLengthsLengthTree;
// Entry Comments Size
// ---------------------------------------------------------------------------------------
// Token sequence (matches and literals) Specified in section 2.6 Variable
}
}

View File

@@ -0,0 +1,38 @@
namespace BurnOutSharp.Models.Compression.LZX
{
/// <summary>
/// An LZXD block represents a sequence of compressed data that is encoded with the same set of
/// Huffman trees, or a sequence of uncompressed data. There can be one or more LZXD blocks in a
/// compressed stream, each with its own set of Huffman trees. Blocks do not have to start or end on a
/// chunk boundary; blocks can span multiple chunks, or a single chunk can contain multiple blocks. The
/// number of chunks is related to the size of the data being compressed, while the number of blocks is
/// related to how well the data is compressed. The Block Type field, as specified in section 2.3.1.1,
/// indicates which type of block follows, and the Block Size field, as specified in section 2.3.1.2,
/// indicates the number of uncompressed bytes represented by the block. Following the generic block
/// header is a type-specific header that describes the remainder of the block.
/// </summary>
/// <see href="https://interoperability.blob.core.windows.net/files/MS-PATCH/%5bMS-PATCH%5d.pdf"/>
public class BlockHeader
{
/// <remarks>3 bits</remarks>
public BlockType BlockType;
/// <summary>
/// Block size is the high 8 bits of 24
/// </summary>
/// <remarks>8 bits</remarks>
public byte BlockSizeMSB;
/// <summary>
/// Block size is the middle 8 bits of 24
/// </summary>
/// <remarks>8 bits</remarks>
public byte BlockSizeByte2;
/// <summary>
/// Block size is the low 8 bits of 24
/// </summary>
/// <remarks>8 bits</remarks>
public byte BlocksizeLSB;
}
}

View File

@@ -0,0 +1,46 @@
namespace BurnOutSharp.Models.Compression.LZX
{
public static class Constants
{
/* some constants defined by the LZX specification */
public const int LZX_MIN_MATCH = (2);
public const int LZX_MAX_MATCH = (257);
public const int LZX_NUM_CHARS = (256);
/// <summary>
/// also blocktypes 4-7 invalid
/// </summary>
public const int LZX_BLOCKTYPE_INVALID = (0);
public const int LZX_BLOCKTYPE_VERBATIM = (1);
public const int LZX_BLOCKTYPE_ALIGNED = (2);
public const int LZX_BLOCKTYPE_UNCOMPRESSED = (3);
public const int LZX_PRETREE_NUM_ELEMENTS = (20);
/// <summary>
/// aligned offset tree #elements
/// </summary>
public const int LZX_ALIGNED_NUM_ELEMENTS = (8);
/// <summary>
/// this one missing from spec!
/// </summary>
public const int LZX_NUM_PRIMARY_LENGTHS = (7);
/// <summary>
/// length tree #elements
/// </summary>
public const int LZX_NUM_SECONDARY_LENGTHS = (249);
/* LZX huffman defines: tweak tablebits as desired */
public const int LZX_PRETREE_MAXSYMBOLS = (LZX_PRETREE_NUM_ELEMENTS);
public const int LZX_PRETREE_TABLEBITS = (6);
public const int LZX_MAINTREE_MAXSYMBOLS = (LZX_NUM_CHARS + 50 * 8);
public const int LZX_MAINTREE_TABLEBITS = (12);
public const int LZX_LENGTH_MAXSYMBOLS = (LZX_NUM_SECONDARY_LENGTHS + 1);
public const int LZX_LENGTH_TABLEBITS = (12);
public const int LZX_ALIGNED_MAXSYMBOLS = (LZX_ALIGNED_NUM_ELEMENTS);
public const int LZX_ALIGNED_TABLEBITS = (7);
public const int LZX_LENTABLE_SAFETY = (64); /* we allow length table decoding overruns */
}
}

View File

@@ -0,0 +1,48 @@
namespace BurnOutSharp.Models.Compression.LZX
{
/// <summary>
/// 3-bit block type
/// </summary>
public enum BlockType : byte
{
/// <summary>
/// Not valid
/// </summary>
INVALID_0 = 0b000,
/// <summary>
/// Verbatim block
/// </summary>
Verbatim = 0b001,
/// <summary>
/// Aligned offset block
/// </summary>
AlignedOffset = 0b010,
/// <summary>
/// Uncompressed block
/// </summary>
Uncompressed = 0b011,
/// <summary>
/// Not valid
/// </summary>
INVALID_4 = 0b100,
/// <summary>
/// Not valid
/// </summary>
INVALID_5 = 0b101,
/// <summary>
/// Not valid
/// </summary>
INVALID_6 = 0b110,
/// <summary>
/// Not valid
/// </summary>
INVALID_7 = 0b111,
}
}

View File

@@ -0,0 +1,102 @@
namespace BurnOutSharp.Models.Compression.LZX
{
public class Header
{
/*
2.2 Header
2.2.1 Chunk Size
The LZXD compressor emits chunks of compressed data. A chunk represents exactly 32 KB of
uncompressed data until the last chunk in the stream, which can represent less than 32 KB. To
ensure that an exact number of input bytes represent an exact number of output bytes for each
chunk, after each 32 KB of uncompressed data is represented in the output compressed bitstream, the
output bitstream is padded with up to 15 bits of zeros to realign the bitstream on a 16-bit boundary
(even byte boundary) for the next 32 KB of data. This results in a compressed chunk of a byte-aligned
size. The compressed chunk could be smaller than 32 KB or larger than 32 KB if the data is
incompressible when the chunk is not the last one.
The LZXD engine encodes a compressed, chunk-size prefix field preceding each compressed chunk in
the compressed byte stream. The compressed, chunk-size prefix field is a byte aligned, little-endian,
16-bit field. The chunk prefix chain could be followed in the compressed stream without
decompressing any data. The next chunk prefix is at a location computed by the absolute byte offset
location of this chunk prefix plus 2 (for the size of the chunk-size prefix field) plus the current chunk
size.
2.2.2 E8 Call Translation
E8 call translation is an optional feature that can be used when the data to compress contains x86
instruction sequences. E8 translation operates as a preprocessing stage before compressing each
chunk, and the compressed stream header contains a bit that indicates whether the decoder shall
reverse the translation as a postprocessing step after decompressing each chunk.
The x86 instruction beginning with a byte value of 0xE8 is followed by a 32-bit, little-endian relative
displacement to the call target. When E8 call translation is enabled, the following preprocessing steps
are performed on the uncompressed input before compression (assuming little-endian byte ordering):
Let chunk_offset refer to the total number of uncompressed bytes preceding this chunk.
Let E8_file_size refer to the caller-specified value given to the compressor or decoded from the header
of the compressed stream during decompression.
The following example shows how E8 translation is performed for each 32-KB chunk of uncompressed
data (or less than 32 KB if last chunk to compress).
if (( chunk_offset < 0x40000000 ) && ( chunk_size > 10 ))
for ( i = 0; i < (chunk_size 10); i++ )
if ( chunk_byte[ i ] == 0xE8 )
long current_pointer = chunk_offset + i;
long displacement = chunk_byte[ i+1 ] |
chunk_byte[ i+2 ] << 8 |
chunk_byte[ i+3 ] << 16 |
chunk_byte[ i+4 ] << 24;
long target = current_pointer + displacement;
if (( target >= 0 ) && ( target < E8_file_size+current_pointer))
if ( target >= E8_file_size )
target = displacement E8_file_size;
endif
chunk_byte[ i+1 ] = (byte)( target );
chunk_byte[ i+2 ] = (byte)( target >> 8 );
chunk_byte[ i+3 ] = (byte)( target >> 16 );
chunk_byte[ i+4 ] = (byte)( target >> 24 );
endif
i += 4;
endif
endfor
endif
After decompression, the E8 scanning algorithm is the same. The following example shows how E8
translation reversal is performed.
long value = chunk_byte[ i+1 ] |
chunk_byte[ i+2 ] << 8 |
chunk_byte[ i+3 ] << 16 |
chunk_byte[ i+4 ] << 24;
if (( value >= -current_pointer ) && ( value < E8_file_size ))
if ( value >= 0 )
displacement = value current_pointer;
else
displacement = value + E8_file_size;
endif
chunk_byte[ i+1 ] = (byte)( displacement );
chunk_byte[ i+2 ] = (byte)( displacement >> 8 );
chunk_byte[ i+3 ] = (byte)( displacement >> 16 );
chunk_byte[ i+4 ] = (byte)( displacement >> 24 );
endif
The first bit in the first chunk in the LZXD bitstream (following the 2-byte, chunk-size prefix described
in section 2.2.1) indicates the presence or absence of two 16-bit fields immediately following the
single bit. If the bit is set, E8 translation is enabled for all the following chunks in the stream using the
32-bit value derived from the two 16-bit fields as the E8_file_size provided to the compressor when E8
translation was enabled. Note that E8_file_size is completely independent of the length of the
uncompressed data. E8 call translation is disabled after the 32,768th chunk (after 1 gigabyte (GB) of
uncompressed data).
Field Comments Size
----------------------------------------------------------------
E8 translation 0-disabled, 1-enabled 1 bit
Translation size high word Only present if enabled 0 or 16 bits
Translation size low word Only present if enabled 0 or 16 bits
*/
}
}

View File

@@ -0,0 +1,59 @@
namespace BurnOutSharp.Models.Compression.LZX
{
/// <summary>
/// Following the generic block header, an uncompressed block begins with 1 to 16 bits of zero padding
/// to align the bit buffer on a 16-bit boundary. At this point, the bitstream ends and a byte stream
/// begins. Following the zero padding, new 32-bit values for R0, R1, and R2 are output in little-endian
/// form, followed by the uncompressed data bytes themselves. Finally, if the uncompressed data length
/// is odd, one extra byte of zero padding is encoded to realign the following bitstream.
///
/// Then the bitstream of byte-swapped 16-bit integers resumes for the next Block Type field (if there
/// are subsequent blocks).
///
/// The decoded R0, R1, and R2 values are used as initial repeated offset values to decode the
/// subsequent compressed block if present.
/// </summary>
/// <see href="https://interoperability.blob.core.windows.net/files/MS-PATCH/%5bMS-PATCH%5d.pdf"/>
public class UncompressedBlock
{
/// <summary>
/// Generic block header
/// </summary>
public BlockHeader Header;
/// <summary>
/// Padding to align following field on 16-bit boundary
/// </summary>
/// <remarks>Bits have a value of zero</remarks>
public ushort PaddingBits;
/// <summary>
/// Least significant to most significant byte (little-endian DWORD ([MS-DTYP]))
/// </summary>
/// <remarks>Encoded directly in the byte stream, not in the bitstream of byte-swapped 16-bit words</remarks>
public uint R0;
/// <summary>
/// Least significant to most significant byte (little-endian DWORD)
/// </summary>
/// <remarks>Encoded directly in the byte stream, not in the bitstream of byte-swapped 16-bit words</remarks>
public uint R1;
/// <summary>
/// Least significant to most significant byte (little-endian DWORD)
/// </summary>
/// <remarks>Encoded directly in the byte stream, not in the bitstream of byte-swapped 16-bit words</remarks>
public uint R2;
/// <summary>
/// Can use the direct memcpy function, as specified in [IEEE1003.1]
/// </summary>
/// <remarks>Encoded directly in the byte stream, not in the bitstream of byte-swapped 16-bit words</remarks>
public byte[] RawDataBytes;
/// <summary>
/// Only if uncompressed size is odd
/// </summary>
public byte AlignmentByte;
}
}

View File

@@ -0,0 +1,54 @@
namespace BurnOutSharp.Models.Compression.LZX
{
/// <summary>
/// The fields of a verbatim block that follow the generic block header
/// </summary>
/// <see href="https://interoperability.blob.core.windows.net/files/MS-PATCH/%5bMS-PATCH%5d.pdf"/>
public class VerbatimBlock
{
/// <summary>
/// Generic block header
/// </summary>
public BlockHeader Header;
/// <summary>
/// Pretree for first 256 elements of main tree
/// </summary>
/// <remarks>20 elements, 4 bits each</remarks>
public byte[] PretreeFirst256;
/// <summary>
/// Path lengths of first 256 elements of main tree
/// </summary>
/// <remarks>Encoded using pretree</remarks>
public int[] PathLengthsFirst256;
/// <summary>
/// Pretree for remainder of main tree
/// </summary>
/// <remarks>20 elements, 4 bits each</remarks>
public byte[] PretreeRemainder;
/// <summary>
/// Path lengths of remaining elements of main tree
/// </summary>
/// <remarks>Encoded using pretree</remarks>
public int[] PathLengthsRemainder;
/// <summary>
/// Pretree for length tree
/// </summary>
/// <remarks>20 elements, 4 bits each</remarks>
public byte[] PretreeLengthTree;
/// <summary>
/// Path lengths of elements in length tree
/// </summary>
/// <remarks>Encoded using pretree</remarks>
public int[] PathLengthsLengthTree;
// Entry Comments Size
// ---------------------------------------------------------------------------------------
// Token sequence (matches and literals) Specified in section 2.6 Variable
}
}

View File

@@ -0,0 +1,28 @@
namespace BurnOutSharp.Models.Compression.MSZIP
{
/// <summary>
/// Each MSZIP block MUST consist of a 2-byte MSZIP signature and one or more RFC 1951 blocks. The
/// 2-byte MSZIP signature MUST consist of the bytes 0x43 and 0x4B. The MSZIP signature MUST be
/// the first 2 bytes in the MSZIP block. The MSZIP signature is shown in the following packet diagram.
///
/// Each MSZIP block is the result of a single deflate compression operation, as defined in [RFC1951].
/// The compressor that performs the compression operation MUST generate one or more RFC 1951
/// blocks, as defined in [RFC1951]. The number, deflation mode, and type of RFC 1951 blocks in each
/// MSZIP block is determined by the compressor, as defined in [RFC1951]. The last RFC 1951 block in
/// each MSZIP block MUST be marked as the "end" of the stream(1), as defined by [RFC1951]
/// section 3.2.3. Decoding trees MUST be discarded after each RFC 1951 block, but the history buffer
/// MUST be maintained.Each MSZIP block MUST represent no more than 32 KB of uncompressed data.
///
/// The maximum compressed size of each MSZIP block is 32 KB + 12 bytes. This enables the MSZIP
/// block to contain 32 KB of data split between two noncompressed RFC 1951 blocks, each of which
/// has a value of BTYPE = 00.
/// </summary>
/// <see href="https://interoperability.blob.core.windows.net/files/MS-MCI/%5bMS-MCI%5d.pdf"/>
public class BlockHeader
{
/// <summary>
/// 'CK'
/// </summary>
public ushort Signature;
}
}

View File

@@ -0,0 +1,89 @@
namespace BurnOutSharp.Models.Compression.MSZIP
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public static class Constants
{
/// <summary>
/// Window size
/// </summary>
public const ushort ZIPWSIZE = 0x8000;
/// <summary>
/// Bits in base literal/length lookup table
/// </summary>
public const int ZIPLBITS = 9;
/// <summary>
/// Bits in base distance lookup table
/// </summary>
public const int ZIPDBITS = 6;
/// <summary>
/// Maximum bit length of any code
/// </summary>
public const int ZIPBMAX = 16;
/// <summary>
/// Maximum number of codes in any set
/// </summary>
public const int ZIPN_MAX = 288;
#region THOSE_ZIP_CONSTS
/// <summary>
/// Order of the bit length code lengths
/// </summary>
public static readonly byte[] BitLengthOrder = new byte[]
{
16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15
};
/// <summary>
/// Copy lengths for literal codes 257..285
/// </summary>
public static readonly ushort[] CopyLengths = new ushort[]
{
3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 35, 43, 51,
59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0
};
/// <summary>
/// Extra bits for literal codes 257..285
/// </summary>
/// <remarks>99 == invalid</remarks>
public static readonly ushort[] LiteralExtraBits = new ushort[]
{
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4,
4, 5, 5, 5, 5, 0, 99, 99
};
/// <summary>
/// Copy offsets for distance codes 0..29
/// </summary>
public static readonly ushort[] CopyOffsets = new ushort[]
{
1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 257, 385,
513, 769, 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577
};
/// <summary>
/// Extra bits for distance codes
/// </summary>
public static readonly ushort[] DistanceExtraBits = new ushort[]
{
0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10,
10, 11, 11, 12, 12, 13, 13
};
/// <summary>
/// And'ing with Zipmask[n] masks the lower n bits
/// </summary>
public static readonly ushort[] BitMasks = new ushort[17]
{
0x0000, 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
};
#endregion
}
}

View File

@@ -0,0 +1,18 @@
namespace BurnOutSharp.Models.Compression.MSZIP
{
/// <see href="https://www.rfc-editor.org/rfc/rfc1951"/>
public class DeflateBlockHeader
{
/// <summary>
/// Set if and only if this is the last block of the data set.
/// </summary>
/// <remarks>Bit 0</remarks>
public bool BFINAL { get; set; }
/// <summary>
/// Specifies how the data are compressed
/// </summary>
/// <remarks>Bits 1-2</remarks>
public CompressionType BTYPE { get; set; }
}
}

View File

@@ -0,0 +1,19 @@
namespace BurnOutSharp.Models.Compression.MSZIP
{
/// <summary>
/// Compression with dynamic Huffman codes (BTYPE=10)
/// </summary>
/// <see href="https://www.rfc-editor.org/rfc/rfc1951"/>
public class DynamicHuffmanCompressedBlockHeader
{
/// <summary>
/// Huffman code lengths for the literal / length alphabet
/// </summary>
public int[] LiteralLengths;
/// <summary>
/// Huffman distance codes for the literal / length alphabet
/// </summary>
public int[] DistanceCodes;
}
}

View File

@@ -0,0 +1,25 @@
namespace BurnOutSharp.Models.Compression.MSZIP
{
public enum CompressionType : byte
{
/// <summary>
/// no compression
/// </summary>
NoCompression = 0b00,
/// <summary>
/// Compressed with fixed Huffman codes
/// </summary>
FixedHuffman = 0b01,
/// <summary>
/// Compressed with dynamic Huffman codes
/// </summary>
DynamicHuffman = 0b10,
/// <summary>
/// Reserved (error)
/// </summary>
Reserved = 0b11,
}
}

View File

@@ -0,0 +1,94 @@
using System;
namespace BurnOutSharp.Models.Compression.MSZIP
{
/// <summary>
/// Compression with fixed Huffman codes (BTYPE=01)
/// </summary>
/// <see href="https://interoperability.blob.core.windows.net/files/MS-MCI/%5bMS-MCI%5d.pdf"/>
/// <see href="https://www.rfc-editor.org/rfc/rfc1951"/>
public class FixedHuffmanCompressedBlockHeader
{
#region Properties
/// <summary>
/// Huffman code lengths for the literal / length alphabet
/// </summary>
public uint[] LiteralLengths
{
get
{
// If we have cached lengths, use those
if (_literalLengths != null)
return _literalLengths;
// Otherwise, build it from scratch
_literalLengths = new uint[288];
// Literal Value 0 - 143, 8 bits
for (int i = 0; i < 144; i++)
_literalLengths[i] = 8;
// Literal Value 144 - 255, 9 bits
for (int i = 144; i < 256; i++)
_literalLengths[i] = 9;
// Literal Value 256 - 279, 7 bits
for (int i = 256; i < 280; i++)
_literalLengths[i] = 7;
// Literal Value 280 - 287, 8 bits
for (int i = 280; i < 288; i++)
_literalLengths[i] = 8;
return _literalLengths;
}
set
{
throw new FieldAccessException();
}
}
/// <summary>
/// Huffman distance codes for the literal / length alphabet
/// </summary>
public uint[] DistanceCodes
{
get
{
// If we have cached distances, use those
if (_distanceCodes != null)
return _distanceCodes;
// Otherwise, build it from scratch
_distanceCodes = new uint[30];
// Fixed length, 5 bits
for (int i = 0; i < 30; i++)
_distanceCodes[i] = 5;
return _distanceCodes;
}
set
{
throw new FieldAccessException();
}
}
#endregion
#region Instance Variables
/// <summary>
/// Huffman code lengths for the literal / length alphabet
/// </summary>
private uint[] _literalLengths = null;
/// <summary>
/// Huffman distance codes for the literal / length alphabet
/// </summary>
private uint[] _distanceCodes = null;
#endregion
}
}

View File

@@ -0,0 +1,21 @@
namespace BurnOutSharp.Models.Compression.MSZIP
{
/// <summary>
/// Non-compressed blocks (BTYPE=00)
/// </summary>
/// <see href="https://www.rfc-editor.org/rfc/rfc1951"/>
public class NonCompressedBlockHeader
{
/// <summary>
/// The number of data bytes in the block
/// </summary>
/// <remarks>Bytes 0-1</remarks>
public ushort LEN;
/// <summary>
/// The one's complement of LEN
/// </summary>
/// <remarks>Bytes 2-3</remarks>
public ushort NLEN;
}
}

View File

@@ -0,0 +1,45 @@
namespace BurnOutSharp.Models.Compression.Quantum
{
public static class Constants
{
/// <summary>
/// Mask for Quantum Compression Level
/// </summary>
public const ushort MASK_QUANTUM_LEVEL = 0x00F0;
/// <summary>
/// Lowest Quantum Level (1)
/// </summary>
public const ushort QUANTUM_LEVEL_LO = 0x0010;
/// <summary>
/// Highest Quantum Level (7)
/// </summary>
public const ushort QUANTUM_LEVEL_HI = 0x0070;
/// <summary>
/// Amount to shift over to get int
/// </summary>
public const ushort SHIFT_QUANTUM_LEVEL = 4;
/// <summary>
/// Mask for Quantum Compression Memory
/// </summary>
public const ushort MASK_QUANTUM_MEM = 0x1F00;
/// <summary>
/// Lowest Quantum Memory (10)
/// </summary>
public const ushort QUANTUM_MEM_LO = 0x0A00;
/// <summary>
/// Highest Quantum Memory (21)
/// </summary>
public const ushort QUANTUM_MEM_HI = 0x1500;
/// <summary>
/// Amount to shift over to get int
/// </summary>
public const ushort SHIFT_QUANTUM_MEM = 8;
}
}

View File

@@ -0,0 +1,45 @@
namespace BurnOutSharp.Models.Compression.Quantum
{
public enum SelectorModel
{
/// <summary>
/// Literal model, 64 entries, start at symbol 0
/// </summary>
SELECTOR_0 = 0,
/// <summary>
/// Literal model, 64 entries, start at symbol 64
/// </summary>
SELECTOR_1 = 1,
/// <summary>
/// Literal model, 64 entries, start at symbol 128
/// </summary>
SELECTOR_2 = 2,
/// <summary>
/// Literal model, 64 entries, start at symbol 192
/// </summary>
SELECTOR_3 = 3,
/// <summary>
/// LZ model, 3 character matches, max 24 entries, start at symbol 0
/// </summary>
SELECTOR_4 = 4,
/// <summary>
/// LZ model, 4 character matches, max 36 entries, start at symbol 0
/// </summary>
SELECTOR_5 = 5,
/// <summary>
/// LZ model, 5+ character matches, max 42 entries, start at symbol 0
/// </summary>
SELECTOR_6_POSITION = 6,
/// <summary>
/// LZ model, 5+ character matches, 27 entries, start at symbol 0
/// </summary>
SELECTOR_6_LENGTH = 7,
}
}

View File

@@ -0,0 +1,15 @@
namespace BurnOutSharp.Models.Compression.Quantum
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
/// <see href="http://www.russotto.net/quantumcomp.html"/>
public sealed class Model
{
public int TimeToReorder;
public int Entries;
public ModelSymbol[] Symbols;
public ushort[] LookupTable = new ushort[256];
}
}

View File

@@ -0,0 +1,11 @@
namespace BurnOutSharp.Models.Compression.Quantum
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
/// <see href="http://www.russotto.net/quantumcomp.html"/>
public sealed class ModelSymbol
{
public ushort Symbol;
public ushort CumulativeFrequency;
}
}

View File

@@ -0,0 +1,41 @@
namespace BurnOutSharp.Models.GCF
{
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/GCFFile.h"/>
public sealed class BlockEntry
{
/// <summary>
/// Flags for the block entry. 0x200F0000 == Not used.
/// </summary>
public uint EntryFlags;
/// <summary>
/// The offset for the data contained in this block entry in the file.
/// </summary>
public uint FileDataOffset;
/// <summary>
/// The length of the data in this block entry.
/// </summary>
public uint FileDataSize;
/// <summary>
/// The offset to the first data block of this block entry's data.
/// </summary>
public uint FirstDataBlockIndex;
/// <summary>
/// The next block entry in the series. (N/A if == BlockCount.)
/// </summary>
public uint NextBlockEntryIndex;
/// <summary>
/// The previous block entry in the series. (N/A if == BlockCount.)
/// </summary>
public uint PreviousBlockEntryIndex;
/// <summary>
/// The offset of the block entry in the directory.
/// </summary>
public uint DirectoryIndex;
}
}

View File

@@ -0,0 +1,46 @@
namespace BurnOutSharp.Models.GCF
{
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/GCFFile.h"/>
public sealed class BlockEntryHeader
{
/// <summary>
/// Number of data blocks.
/// </summary>
public uint BlockCount;
/// <summary>
/// Number of data blocks that point to data.
/// </summary>
public uint BlocksUsed;
/// <summary>
/// Reserved
/// </summary>
public uint Dummy0;
/// <summary>
/// Reserved
/// </summary>
public uint Dummy1;
/// <summary>
/// Reserved
/// </summary>
public uint Dummy2;
/// <summary>
/// Reserved
/// </summary>
public uint Dummy3;
/// <summary>
/// Reserved
/// </summary>
public uint Dummy4;
/// <summary>
/// Header checksum.
/// </summary>
public uint Checksum;
}
}

View File

@@ -0,0 +1,19 @@
namespace BurnOutSharp.Models.GCF
{
/// <remarks>
/// Part of version 5 but not version 6.
/// </remarks>
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/GCFFile.h"/>
public sealed class BlockEntryMap
{
/// <summary>
/// The previous block entry. (N/A if == BlockCount.)
/// </summary>
public uint PreviousBlockEntryIndex;
/// <summary>
/// The next block entry. (N/A if == BlockCount.)
/// </summary>
public uint NextBlockEntryIndex;
}
}

View File

@@ -0,0 +1,34 @@
namespace BurnOutSharp.Models.GCF
{
/// <remarks>
/// Part of version 5 but not version 6.
/// </remarks>
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/GCFFile.h"/>
public sealed class BlockEntryMapHeader
{
/// <summary>
/// Number of data blocks.
/// </summary>
public uint BlockCount;
/// <summary>
/// Index of the first block entry.
/// </summary>
public uint FirstBlockEntryIndex;
/// <summary>
/// Index of the last block entry.
/// </summary>
public uint LastBlockEntryIndex;
/// <summary>
/// Reserved
/// </summary>
public uint Dummy0;
/// <summary>
/// Header checksum.
/// </summary>
public uint Checksum;
}
}

View File

@@ -0,0 +1,11 @@
namespace BurnOutSharp.Models.GCF
{
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/GCFFile.h"/>
public sealed class ChecksumEntry
{
/// <summary>
/// Checksum.
/// </summary>
public uint Checksum;
}
}

View File

@@ -0,0 +1,16 @@
namespace BurnOutSharp.Models.GCF
{
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/GCFFile.h"/>
public sealed class ChecksumHeader
{
/// <summary>
/// Always 0x00000001
/// </summary>
public uint Dummy0;
/// <summary>
/// Size of LPGCFCHECKSUMHEADER & LPGCFCHECKSUMMAPHEADER & in bytes.
/// </summary>
public uint ChecksumSize;
}
}

View File

@@ -0,0 +1,16 @@
namespace BurnOutSharp.Models.GCF
{
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/GCFFile.h"/>
public sealed class ChecksumMapEntry
{
/// <summary>
/// Number of checksums.
/// </summary>
public uint ChecksumCount;
/// <summary>
/// Index of first checksum.
/// </summary>
public uint FirstChecksumIndex;
}
}

Some files were not shown because too many files have changed in this diff Show More