Compare commits

...

182 Commits
2.8.0 ... 3.1.0

Author SHA1 Message Date
Matt Nadareski
5cdf269a3e Bump version 2024-02-26 12:14:30 -05:00
TheRogueArchivist
b9d90ec35d Remove GetVersionFromSHA1Hash from SafeDisc (#284)
* Remove GetVersionFromSHA1Hash from SafeDisc.
* Minor SafeDisc comment cleanup.
2024-02-23 10:58:58 -08:00
Matt Nadareski
5fc1d3254d Remove outdated comments in build scripts 2024-02-21 19:24:33 -05:00
Matt Nadareski
d61bae8e61 Don't use the auto git hash 2024-02-21 00:04:16 -05:00
Matt Nadareski
f8f53869ae Fix build scripts, again 2024-02-20 23:22:10 -05:00
Matt Nadareski
f5146a6e35 osx-arm64 is such a pain 2024-02-20 22:48:12 -05:00
Matt Nadareski
faf96b9375 Wrong array 2024-02-20 22:10:51 -05:00
Matt Nadareski
2228e344f6 Or, not And 2024-02-20 21:53:52 -05:00
Matt Nadareski
9955bdcab1 Not all DLLs, oops 2024-02-20 21:47:59 -05:00
Matt Nadareski
4586d49a3f Use DLL filtering in publish scripts 2024-02-20 21:45:18 -05:00
Matt Nadareski
1f4e24452a Add non-DLL lists, not hooked up 2024-02-20 21:33:14 -05:00
Matt Nadareski
090bac4d59 Remove unbuildable runtime 2024-02-20 21:12:21 -05:00
Matt Nadareski
59bedf5fce Fix DLL bundling 2024-02-20 21:07:29 -05:00
Matt Nadareski
1bbc541957 Limit packing to just BOS library 2024-02-20 21:04:39 -05:00
Matt Nadareski
1bb0107ceb Remove net6.0 from AppVeyor, add win-x86 2024-02-20 21:03:28 -05:00
Matt Nadareski
10dad356cd Expand default and extended publish targets 2024-02-20 20:59:28 -05:00
Matt Nadareski
22b6971e51 Tabs lose this battle 2024-02-20 20:54:28 -05:00
Matt Nadareski
3203b56ef6 Update publish scripts 2024-02-20 20:51:59 -05:00
Matt Nadareski
d6db84152f Limit to MS-CAB to x86 until IntPtr issue resolved 2024-02-20 20:14:38 -05:00
Matt Nadareski
b7afad5a4a Enable MS-CAB extraction on at least x86 2024-02-20 19:59:43 -05:00
Matt Nadareski
9d6c53f631 Update build after submodule update 2024-02-20 18:45:37 -05:00
Matt Nadareski
aa7b02dfc3 Add libmspack4n and LessIO as submodlues 2024-02-20 18:44:51 -05:00
TheRogueArchivist
379ffaf61a Add more empty file checks for SafeDisc (#283) 2024-02-20 06:47:20 -08:00
TheRogueArchivist
1bdfccddbc Fix Hexalock false positive (#281) 2024-02-08 10:09:11 -08:00
Matt Nadareski
c83cdd590c Update libraries 2024-02-06 10:48:59 -05:00
Matt Nadareski
f4770374a7 Update copyright date 2024-02-06 10:44:36 -05:00
TheRogueArchivist
72880e93bc Add new Denuvo Anti-Cheat detections (#279) 2024-02-01 20:27:49 -08:00
TheRogueArchivist
6c9cd72948 Add CD-X notes (#278)
* Add CD-X notes

* Add additional note
2024-01-26 06:40:09 -08:00
TheRogueArchivist
2e71ef4635 Update CopyKiller detection and notes (WIP) (#277)
* Update CopyKiller detection and notes

* Cleanup Copykiller
2024-01-25 21:19:16 -08:00
TheRogueArchivist
04cd1098ea Improve CrypKey detection (#275)
* Improve CrypKey detection

* Add CrypKey file detections.
* Add new CrypKey executable detections.

* Fix CrypKey version parsing

* Address PR reviews

* Check for both "code" and "CODE" sections
2023-12-28 20:35:57 -08:00
TheRogueArchivist
e76ce64568 Update known versions for the first SafeDisc splash-screen (#274) 2023-12-25 17:31:09 -08:00
Matt Nadareski
8fe84abef3 Use more lenient file reading 2023-12-13 15:52:03 -05:00
TheRogueArchivist
1b1fa53547 Add small note about ProtectDISC using CSS (#272) 2023-12-01 21:28:21 -08:00
TheRogueArchivist
5019407f35 Add additional SafeDisc for Mac detections and notes (#271)
* Add additional SafeDisc for Mac detections and notes

* Slightly update notes

* Minor additions

* Update earliest known SafeDisc splash-screen version
2023-12-01 17:42:03 -08:00
Matt Nadareski
83ba19eccb Fix ancient .NET dictionary extensions 2023-11-29 13:13:27 -05:00
Matt Nadareski
936bf38521 Slight cleanup 2023-11-25 22:25:44 -05:00
TheRogueArchivist
f54b0d2bbb Add SafeDisc Splash-Screen detection and notes (#269)
* Add initial SafeDisc splash-screen scanning and notes

So far only SafeDisc 1 has been added.

* Further update SafeDisc splash screen notes

This gets part of the way through SafeDisc 2 at least

* Update SafeDisc splash-screen notes and detection up through SafeDisc 2

* Update splash-screen notes through SafeDisc 3

* Starting adding SafeDisc 4 splash-screen notes

* Finish adding SafeDisc 4 splash-screen support

* Update SafeDisc splash-screen notes

* oops

* oops again
2023-11-25 19:21:59 -08:00
Matt Nadareski
0e32abc76c Bump version 2023-11-22 13:28:56 -05:00
Matt Nadareski
94cb06a3bd Handle some messages 2023-11-22 13:28:13 -05:00
Matt Nadareski
907aea443e Support .NET Framework 2.0 2023-11-22 12:22:01 -05:00
Matt Nadareski
385922723c Upate packages 2023-11-22 10:48:08 -05:00
Matt Nadareski
3061c2f009 Reenable .NET Framework 4.0 2023-11-21 10:59:29 -05:00
Matt Nadareski
5c0ccbde35 Temporarily remove .NET Framework 4.0 2023-11-21 10:19:15 -05:00
Matt Nadareski
7b998de2ca Handle more C# 12 syntax 2023-11-21 10:17:25 -05:00
Matt Nadareski
b0d49f52a5 Bump version 2023-11-20 12:16:53 -05:00
Matt Nadareski
6f9bcc2111 Fix multiple invocation bug
This bug arose when all of the libraries were consoliated into the same library for better packaging. Each set of classes was being instantiated 3 times as a result.
2023-11-18 20:55:32 -05:00
Matt Nadareski
0fb0ecd28a Update ST libraries for bugfixes 2023-11-15 12:54:32 -05:00
Matt Nadareski
6194d88aec Correct the excludes 2023-11-15 11:26:00 -05:00
Matt Nadareski
b02c3121fe Use official package for IProgress 2023-11-15 00:24:47 -05:00
Matt Nadareski
580db0cb65 Minor tweaks to reduce warnings 2023-11-14 23:48:35 -05:00
Matt Nadareski
6bcdc0e3c6 Bump version 2023-11-14 16:16:08 -05:00
Matt Nadareski
9b4fd91717 Update build scripts 2023-11-14 16:13:49 -05:00
Matt Nadareski
9421249b8e Support ancient .NET 2023-11-14 16:10:10 -05:00
Matt Nadareski
e823cbaee5 Expand supported RIDs 2023-11-08 22:51:47 -05:00
Matt Nadareski
c34618554b Update project comment 2023-11-08 12:08:22 -05:00
Matt Nadareski
6ab7b4a004 Enable latest language version 2023-11-08 11:59:50 -05:00
Matt Nadareski
5b6bf3b73e Rename to BinaryObjectScanner 2023-11-08 11:37:27 -05:00
Matt Nadareski
2a30a13f5f Omit submodule code for modern .NET 2023-10-27 00:58:03 -04:00
Matt Nadareski
3c05a112ca Remove DLLs from modern .NET builds 2023-10-27 00:55:31 -04:00
Matt Nadareski
66af2d83b8 Bump version 2023-10-26 00:35:18 -04:00
Matt Nadareski
fc3be76657 Figure out last couple of nullability issues 2023-10-26 00:27:38 -04:00
Matt Nadareski
259a91dd77 Ensure that the nupkg has the right name 2023-10-26 00:18:48 -04:00
Matt Nadareski
e957b29bae Remove unncessary/broken appveyor zips 2023-10-26 00:10:18 -04:00
Matt Nadareski
33060d9787 Sync appveyor script further to build release 2023-10-26 00:08:22 -04:00
Matt Nadareski
71ca79a456 Update SharpCompress and SharpZipLib 2023-10-26 00:00:34 -04:00
Matt Nadareski
fc744d241a Normalize archive naming with MPF 2023-10-25 23:56:20 -04:00
Matt Nadareski
78b5b3dbc1 Add publish scripts for easier distribution 2023-10-25 23:51:07 -04:00
Matt Nadareski
cb3846261a Fix more nullability locations 2023-10-25 23:33:51 -04:00
Matt Nadareski
9a3dcf70de Slight reordering of XMID/XeMID 2023-10-25 23:16:51 -04:00
Matt Nadareski
670c78302f Add printing for IRD, XMID, XeMID 2023-10-25 23:03:40 -04:00
Matt Nadareski
f4cb97b3bf Bump all ST library versions 2023-10-25 23:03:20 -04:00
TheRogueArchivist
be6d44ed04 Add WEB-Cops detection to CD/DVD-Cops (#268) 2023-10-04 17:55:10 -07:00
TheRogueArchivist
cfc9092479 Add DigiGuard detection (#267)
* Add DigiGuard detection

* Use FilePathMatch instead of PathMatch
2023-10-01 19:05:04 -07:00
TheRogueArchivist
d674ae5b1f Add new CD/DVD-Cops detections (#266) 2023-10-01 11:59:20 -07:00
Matt Nadareski
864972e575 Update packages to fix printing issues 2023-09-29 11:48:42 -04:00
Matt Nadareski
7d97850cb0 Fix some more nullability warnings 2023-09-26 15:31:43 -04:00
Matt Nadareski
3e33f098a6 Fix solution file 2023-09-26 15:25:30 -04:00
Matt Nadareski
adf9ce5e2a Matching to 1.1.1, OpenMcdf to 2.3.0 2023-09-18 15:53:24 -04:00
Matt Nadareski
0c5dff71e9 More nullability fixes 2023-09-18 14:58:33 -04:00
Matt Nadareski
715f773672 Fix DLL inclusion issues 2023-09-18 14:15:51 -04:00
Matt Nadareski
a54b89d380 Make the leap to BinaryObjectScanner 2023-09-18 13:56:07 -04:00
Matt Nadareski
2085c306ab Move stormlibsharp to main library 2023-09-18 12:52:42 -04:00
Matt Nadareski
98b99da0bc Remove stormlibsharp from Compression 2023-09-18 12:15:24 -04:00
Matt Nadareski
e00238b24e Migrate to compression library, mostly 2023-09-18 12:09:54 -04:00
Matt Nadareski
aef2b756c9 Add warning in Extractor 2023-09-18 11:35:52 -04:00
Matt Nadareski
f1f1e20a3e Remove reference to incomplete compressions 2023-09-18 11:33:25 -04:00
Matt Nadareski
62a2fdeaa6 Remove incomplete compressions 2023-09-18 11:24:12 -04:00
Matt Nadareski
37c1852058 Remove unnecessary package references 2023-09-18 01:24:44 -04:00
Matt Nadareski
99c4a08d83 Remove two incomplete compressions 2023-09-18 01:14:01 -04:00
Matt Nadareski
9d4bc6bfab Move psxt001z to its own library 2023-09-18 00:59:37 -04:00
Matt Nadareski
71fd5af48e Move some classes to new library 2023-09-18 00:33:24 -04:00
Matt Nadareski
995521b789 Move Utilities to new library 2023-09-18 00:13:23 -04:00
Matt Nadareski
5031985883 Shift references around to be more accurate 2023-09-18 00:06:07 -04:00
Matt Nadareski
5759090291 Move FileType to new library 2023-09-17 23:58:42 -04:00
Matt Nadareski
008c1c89fb Move Protection to new library 2023-09-17 23:53:14 -04:00
Matt Nadareski
624eb40315 Move Packer to new library 2023-09-17 23:40:29 -04:00
Matt Nadareski
aaa12ae817 Move GameEngine to new library 2023-09-17 23:35:59 -04:00
Matt Nadareski
d37c90878c Move Interfaces to new library 2023-09-17 23:32:29 -04:00
Matt Nadareski
668a631c11 Create new base library 2023-09-17 23:28:27 -04:00
Matt Nadareski
f06c0f4553 Simplify some directives 2023-09-17 23:11:32 -04:00
Matt Nadareski
a0b13a6e6f Fix templated nullability issues 2023-09-17 22:37:01 -04:00
Matt Nadareski
1bd9f3fd88 Open the nullability floodgates 2023-09-17 00:20:33 -04:00
Matt Nadareski
93ba88a35f Enable nullable context and move WrapperFactory 2023-09-17 00:18:04 -04:00
Matt Nadareski
c86b1251a3 Extraction can go in FileType 2023-09-16 23:09:07 -04:00
Matt Nadareski
6f6954b270 Printing is only used by Test 2023-09-16 22:33:43 -04:00
Matt Nadareski
1d4ed425f1 Remove unnecessary dependency 2023-09-16 22:24:00 -04:00
Matt Nadareski
ba657e28ad Migrate to matching library 2023-09-16 22:08:18 -04:00
Matt Nadareski
7575353597 Add nullability to matching library, fix warnings 2023-09-16 21:25:50 -04:00
Matt Nadareski
10d3c09cfa Migrate to printing library 2023-09-16 16:28:41 -04:00
Matt Nadareski
5001c4a881 Fix build from previous updates 2023-09-16 02:04:47 -04:00
Matt Nadareski
7ecd0d1893 Fix using statements in interfaces 2023-09-16 01:00:47 -04:00
Matt Nadareski
ae802d5d75 Update serialization library 2023-09-16 00:58:24 -04:00
Matt Nadareski
3b4266246d Update serialization library 2023-09-16 00:50:48 -04:00
Matt Nadareski
a801e720b2 Use wrappers from Serialization (nw) 2023-09-16 00:44:22 -04:00
Matt Nadareski
a52d45f7c2 Use exposed model directly in more places 2023-09-15 22:21:05 -04:00
Matt Nadareski
57eaa1f04c Make wrapper model visible 2023-09-15 14:15:28 -04:00
Matt Nadareski
6cc2cc5be2 Port XZP to new printing 2023-09-15 13:55:56 -04:00
Matt Nadareski
ebcdc08a77 Port WAD to new printing 2023-09-15 12:01:56 -04:00
Matt Nadareski
7aebdf56fc Port VPK to new printing 2023-09-15 11:50:40 -04:00
Matt Nadareski
6de36eb71c Port VBSP to new printing 2023-09-15 11:37:04 -04:00
Matt Nadareski
5ebd392c5b Port SGA to new printing 2023-09-15 11:25:21 -04:00
Matt Nadareski
5fc2029725 Port Quantum to new printing 2023-09-15 02:48:19 -04:00
Matt Nadareski
45e4a01fc1 Port PE to new printing 2023-09-15 02:37:45 -04:00
Matt Nadareski
c4ea7891ea Port PlayJ to new printing 2023-09-15 01:29:29 -04:00
Matt Nadareski
de871fb8c1 Port PFF to new printing 2023-09-15 01:03:48 -04:00
Matt Nadareski
c322eebb98 Port PAK to new printing 2023-09-15 00:54:40 -04:00
Matt Nadareski
ea6b0f1ca3 Port Nitro to new printing 2023-09-15 00:47:44 -04:00
Matt Nadareski
a35a9a4ab6 Add byte array to encoding extension 2023-09-15 00:23:03 -04:00
Matt Nadareski
9a93c7b15d Port NE to new printing 2023-09-15 00:21:12 -04:00
Matt Nadareski
43eed75635 Port NCF to new printing 2023-09-14 23:52:06 -04:00
Matt Nadareski
74ea6e6002 Port N3DS to new printing 2023-09-14 23:36:29 -04:00
Matt Nadareski
91e2157622 Retrofit existing printers to use extensions 2023-09-14 23:25:39 -04:00
Matt Nadareski
87961e5451 Add StringBuilder extensions to be more consistent 2023-09-14 21:21:08 -04:00
Matt Nadareski
4ff203f393 Port MZ to new printing 2023-09-13 22:46:46 -04:00
Matt Nadareski
410b2bef2b Fix one nullability issue 2023-09-13 22:40:00 -04:00
Matt Nadareski
a9792fdff1 Port MS-CAB to new printing 2023-09-13 22:38:01 -04:00
Matt Nadareski
92e36527fd Port LE/LX to new printing 2023-09-13 22:03:11 -04:00
Matt Nadareski
aa1d7d475c Fix build from oversight 2023-09-13 21:28:09 -04:00
Matt Nadareski
91185c4fe1 Port IS-CAB to new printing 2023-09-13 21:21:19 -04:00
Matt Nadareski
c527b1911f Port GCF to new printing 2023-09-13 13:52:54 -04:00
Matt Nadareski
d1501b2e3e Remove .NET 8 entirely (thanks AppVeyor) 2023-09-13 13:17:08 -04:00
Matt Nadareski
03ac117844 Disable .NET 8 for Test 2023-09-13 13:14:11 -04:00
Matt Nadareski
515be8b025 Port CIA to new printing 2023-09-13 13:12:31 -04:00
Matt Nadareski
c3479450f5 Disable AppVeyor restore until .NET 8 support 2023-09-13 12:52:57 -04:00
Matt Nadareski
611aa3229c Port CFB to new printing 2023-09-13 12:49:46 -04:00
Matt Nadareski
5a865f3d08 Fix one nullability issue 2023-09-13 12:35:08 -04:00
Matt Nadareski
9d8d5c23c8 Clean up project files a little 2023-09-13 11:21:06 -04:00
Matt Nadareski
3fed1a3282 Allow building with .NET 8 2023-09-13 10:48:21 -04:00
Matt Nadareski
f47387c3a4 Port BSP to new printing 2023-09-13 10:41:24 -04:00
Matt Nadareski
133272acb7 Flatten some of the printing code 2023-09-13 10:32:02 -04:00
Matt Nadareski
c49ae98df5 Port BFPK to new printing 2023-09-13 01:37:13 -04:00
Matt Nadareski
a1672a9bc0 Port BD+ to new printing 2023-09-13 01:29:50 -04:00
Matt Nadareski
ffc2e23b2f Let's try a new library for this instead 2023-09-13 01:24:42 -04:00
Matt Nadareski
c481e73418 Take it one step further (test) 2023-09-13 01:15:57 -04:00
Matt Nadareski
06b3124b62 Attempt to make most printing static (test) 2023-09-13 01:14:25 -04:00
Matt Nadareski
d7d81665a0 Fix miscellaneous nullability warnings 2023-09-13 00:29:21 -04:00
Matt Nadareski
24c542c22f Fix "converting null literal" warnings 2023-09-13 00:16:27 -04:00
Matt Nadareski
ec616fcdac Fix "dereference" warnings 2023-09-13 00:08:11 -04:00
Matt Nadareski
24e9455733 Fix "possible null reference" warnings 2023-09-12 17:12:23 -04:00
Matt Nadareski
58aaf46a0e Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2023-09-12 12:35:41 -04:00
Matt Nadareski
54e92fe9c8 Fix "nullability of reference types" warnings 2023-09-12 12:35:19 -04:00
Matt Nadareski
4bb83c5d86 Don't allow model to be nullable 2023-09-11 23:38:51 -04:00
Matt Nadareski
2f9280460e Make wrappers more type-defined 2023-09-11 23:25:09 -04:00
Matt Nadareski
30e8e79cf7 Fix PE references 2023-09-11 21:08:08 -04:00
Matt Nadareski
7bb0d4f39a Migrate to ASN1 package 2023-09-11 20:59:11 -04:00
TheRogueArchivist
046814b7c4 Add new EasyAntiCheat file detections (#265)
* Add new EasyAntiCheat file detections

* Add additional EasyAntiCheat file detections
2023-09-11 07:42:54 -07:00
Matt Nadareski
cf00348d46 Update Serialization to 1.1.1 2023-09-11 01:21:38 -04:00
Matt Nadareski
8466edf80f Migrate to Serialization package 2023-09-10 23:51:38 -04:00
Matt Nadareski
ccdf539ed4 Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2023-09-10 23:26:54 -04:00
Matt Nadareski
aeee6e9cda Fix build from package update 2023-09-10 23:26:32 -04:00
TheRogueArchivist
2af21cb245 Add DRML link to engine32 (#264) 2023-09-10 20:21:27 -07:00
Matt Nadareski
96fb5a2f93 Update Models to 1.1.1 2023-09-10 21:56:14 -04:00
Matt Nadareski
1eaefb16ba Use IO package for array and stream extensions 2023-09-08 16:33:06 -04:00
Matt Nadareski
1c972a29a7 Migrate INI reading to Nuget 2023-09-08 12:19:31 -04:00
Matt Nadareski
aa33c083fe Fix package reference for Models 2023-09-08 10:36:45 -04:00
Matt Nadareski
a9454e96ed Migrate to Nuget package for Models 2023-09-04 23:44:45 -04:00
Matt Nadareski
fb6fa85cd3 Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2023-09-04 22:58:25 -04:00
TheRogueArchivist
3c12bdc212 Add Engine32 detection (#263)
* Add Engine32 detection

* Address PR comment
2023-08-28 12:09:25 -07:00
Matt Nadareski
5eeee760f7 Remove unnecessary param summaries 2023-08-26 22:53:35 -04:00
Matt Nadareski
cfe889d5b3 Add and use FilePathMatch (fixes #262) 2023-08-26 22:51:55 -04:00
TheRogueArchivist
3045c41eda Fix Uplay false positive (#261)
Fix a false positive that resulted in "yuPlay.exe" being detected as Uplay.
2023-08-26 19:02:26 -07:00
TheRogueArchivist
d194ef9dd8 Add new SafeCast version (#260) 2023-07-19 10:30:48 -07:00
716 changed files with 10084 additions and 91807 deletions

10
.gitmodules vendored
View File

@@ -1,3 +1,9 @@
[submodule "BinaryObjectScanner.Compression/_EXTERNAL/stormlibsharp"]
path = BinaryObjectScanner.Compression/_EXTERNAL/stormlibsharp
[submodule "BinaryObjectScanner/_EXTERNAL/stormlibsharp"]
path = BinaryObjectScanner/_EXTERNAL/stormlibsharp
url = https://github.com/robpaveza/stormlibsharp.git
[submodule "BinaryObjectScanner/_EXTERNAL/libmspack4n"]
path = BinaryObjectScanner/_EXTERNAL/libmspack4n
url = https://github.com/activescott/libmspack4n.git
[submodule "BinaryObjectScanner/_EXTERNAL/LessIO"]
path = BinaryObjectScanner/_EXTERNAL/LessIO
url = https://github.com/activescott/LessIO.git

3
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,3 @@
{
"dotnet.defaultSolution": "BinaryObjectScanner.sln"
}

6
.vscode/tasks.json vendored
View File

@@ -7,7 +7,7 @@
"type": "process",
"args": [
"build",
"${workspaceFolder}/BurnOutSharp.sln",
"${workspaceFolder}/BinaryObjectScanner.sln",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
],
@@ -19,7 +19,7 @@
"type": "process",
"args": [
"publish",
"${workspaceFolder}/BurnOutSharp.sln",
"${workspaceFolder}/BinaryObjectScanner.sln",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
],
@@ -32,7 +32,7 @@
"args": [
"watch",
"run",
"${workspaceFolder}/BurnOutSharp.sln",
"${workspaceFolder}/BinaryObjectScanner.sln",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
],

View File

@@ -1,31 +0,0 @@
using System.Collections.Generic;
namespace BinaryObjectScanner.ASN1
{
/// <summary>
/// ASN.1 Parser
/// </summary>
public static class AbstractSyntaxNotationOne
{
/// <summary>
/// Parse a byte array into a DER-encoded ASN.1 structure
/// </summary>
/// <param name="data">Byte array representing the data</param>
/// <param name="pointer">Current pointer into the data</param>
/// <returns></returns>
public static List<TypeLengthValue> Parse(byte[] data, int pointer)
{
// Create the output list to return
var topLevelValues = new List<TypeLengthValue>();
// Loop through the data and return all top-level values
while (pointer < data.Length)
{
var topLevelValue = new TypeLengthValue(data, ref pointer);
topLevelValues.Add(topLevelValue);
}
return topLevelValues;
}
}
}

View File

@@ -1,27 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BinaryObjectScanner.ASN1</Title>
<AssemblyName>BinaryObjectScanner.ASN1</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.8</Version>
<AssemblyVersion>2.8</AssemblyVersion>
<FileVersion>2.8</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\BinaryObjectScanner.Utilities\BinaryObjectScanner.Utilities.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,55 +0,0 @@
using System;
namespace BinaryObjectScanner.ASN1
{
/// <summary>
/// ASN.1 type indicators
/// </summary>
[Flags]
public enum ASN1Type : byte
{
#region Modifiers
V_ASN1_UNIVERSAL = 0x00,
V_ASN1_PRIMITIVE_TAG = 0x1F,
V_ASN1_CONSTRUCTED = 0x20,
V_ASN1_APPLICATION = 0x40,
V_ASN1_CONTEXT_SPECIFIC = 0x80,
V_ASN1_PRIVATE = 0xC0,
#endregion
#region Types
V_ASN1_EOC = 0x00,
V_ASN1_BOOLEAN = 0x01,
V_ASN1_INTEGER = 0x02,
V_ASN1_BIT_STRING = 0x03,
V_ASN1_OCTET_STRING = 0x04,
V_ASN1_NULL = 0x05,
V_ASN1_OBJECT = 0x06,
V_ASN1_OBJECT_DESCRIPTOR = 0x07,
V_ASN1_EXTERNAL = 0x08,
V_ASN1_REAL = 0x09,
V_ASN1_ENUMERATED = 0x0A,
V_ASN1_UTF8STRING = 0x0C,
V_ASN1_SEQUENCE = 0x10,
V_ASN1_SET = 0x11,
V_ASN1_NUMERICSTRING = 0x12,
V_ASN1_PRINTABLESTRING = 0x13,
V_ASN1_T61STRING = 0x14,
V_ASN1_TELETEXSTRING = 0x14,
V_ASN1_VIDEOTEXSTRING = 0x15,
V_ASN1_IA5STRING = 0x16,
V_ASN1_UTCTIME = 0x17,
V_ASN1_GENERALIZEDTIME = 0x18,
V_ASN1_GRAPHICSTRING = 0x19,
V_ASN1_ISO64STRING = 0x1A,
V_ASN1_VISIBLESTRING = 0x1A,
V_ASN1_GENERALSTRING = 0x1B,
V_ASN1_UNIVERSALSTRING = 0x1C,
V_ASN1_BMPSTRING = 0x1E,
#endregion
}
}

View File

@@ -1,25 +0,0 @@
namespace BinaryObjectScanner.ASN1
{
#pragma warning disable IDE0011
/// <summary>
/// Methods related to Object Identifiers (OID) and ASN.1 notation
/// </summary>
public static partial class ObjectIdentifier
{
/// <summary>
/// Parse an OID in separated-value notation into ASN.1 notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <param name="index">Current index into the list</param>
/// <returns>ASN.1 formatted string, if possible</returns>
/// <remarks>
public static string ParseOIDToASN1Notation(ulong[] values, ref int index)
{
// TODO: Once the modified OID-IRI formatting is done, make an ASN.1 notation version
return null;
}
}
#pragma warning restore IDE0011
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,870 +0,0 @@
using System.Linq;
using System.Text;
namespace BinaryObjectScanner.ASN1
{
#pragma warning disable IDE0011
/// <summary>
/// Methods related to Object Identifiers (OID) and OID-IRI formatting
/// </summary>
public static partial class ObjectIdentifier
{
/// <summary>
/// Parse an OID in separated-value notation into OID-IRI notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <param name="index">Current index into the list</param>
/// <returns>OID-IRI formatted string, if possible</returns>
/// <see href="http://www.oid-info.com/index.htm"/>
public static string ParseOIDToOIDIRINotation(ulong[] values)
{
// If we have an invalid set of values, we can't do anything
if (values == null || values.Length == 0)
return null;
// Set the initial index
int index = 0;
// Get a string builder for the path
var nameBuilder = new StringBuilder();
// Try to parse the standard value
string standard = ParseOIDToOIDIRINotation(values, ref index);
if (standard == null)
return null;
// Add the standard value to the output
nameBuilder.Append(standard);
// If we have no more items
if (index == values.Length)
return nameBuilder.ToString();
// Add trailing items as just values
nameBuilder.Append("/");
nameBuilder.Append(string.Join("/", values.Skip(index)));
// Create and return the string
return nameBuilder.ToString();
}
/// <summary>
/// Parse an OID in separated-value notation into OID-IRI notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <param name="index">Current index into the list</param>
/// <returns>OID-IRI formatted string, if possible</returns>
/// <see href="http://www.oid-info.com/index.htm"/>
private static string ParseOIDToOIDIRINotation(ulong[] values, ref int index)
{
// If we have an invalid set of values, we can't do anything
if (values == null || values.Length == 0)
return null;
// If we have an invalid index, we can't do anything
if (index < 0 || index >= values.Length)
return null;
#region Start
switch (values[index++])
{
case 0: goto oid_0;
case 1: goto oid_1;
case 2: goto oid_2;
default: return $"/{values[index - 1]}";
}
#endregion
// itu-t, ccitt, itu-r
#region 0.*
oid_0:
if (index == values.Length) return "/ITU-T";
switch (values[index++])
{
case 0: goto oid_0_0;
case 2: return "/ITU-T/Administration";
case 3: return "/ITU-T/Network-Operator";
case 4: return "/ITU-T/Identified-Organization";
case 5: return "/ITU-R/R-Recommendation";
case 9: return "/ITU-T/Data";
default: return $"/ITU-T/{values[index - 1]}";
};
// recommendation
#region 0.0.*
oid_0_0:
if (index == values.Length) return "/ITU-T/Recommendation";
switch (values[index++])
{
case 1: return "/ITU-T/Recommendation/A";
case 2: return "/ITU-T/Recommendation/B";
case 3: return "/ITU-T/Recommendation/C";
case 4: return "/ITU-T/Recommendation/D";
case 5: return "/ITU-T/Recommendation/E";
case 6: return "/ITU-T/Recommendation/F";
case 7: return "/ITU-T/Recommendation/G";
case 8: return "/ITU-T/Recommendation/H";
case 9: return "/ITU-T/Recommendation/I";
case 10: return "/ITU-T/Recommendation/J";
case 11: return "/ITU-T/Recommendation/K";
case 12: return "/ITU-T/Recommendation/L";
case 13: return "/ITU-T/Recommendation/M";
case 14: return "/ITU-T/Recommendation/N";
case 15: return "/ITU-T/Recommendation/O";
case 16: return "/ITU-T/Recommendation/P";
case 17: return "/ITU-T/Recommendation/Q";
case 18: return "/ITU-T/Recommendation/R";
case 19: return "/ITU-T/Recommendation/S";
case 20: return "/ITU-T/Recommendation/T";
case 21: return "/ITU-T/Recommendation/U";
case 22: return "/ITU-T/Recommendation/V";
case 24: return "/ITU-T/Recommendation/X";
case 25: return "/ITU-T/Recommendation/Y";
case 26: return "/ITU-T/Recommendation/Z";
default: return $"/ITU-T/Recommendation/{values[index - 1]}";
}
#endregion
#endregion
// iso
#region 1.*
oid_1:
if (index == values.Length) return "/ISO";
switch (values[index++])
{
case 0: return "/ISO/Standard";
case 1: return "/ISO/Registration-Authority";
case 2: goto oid_1_2;
case 3: return "/ISO/Identified-Organization";
default: return $"/ISO/{values[index - 1]}";
}
// member-body
#region 1.2.*
oid_1_2:
if (index == values.Length) return "/ISO/Member-Body";
switch (values[index++])
{
case 36: return "/ISO/Member-Body/AU";
case 40: return "/ISO/Member-Body/AT";
case 56: return "/ISO/Member-Body/BE";
case 124: return "/ISO/Member-Body/CA";
case 156: return "/ISO/Member-Body/CN";
case 203: return "/ISO/Member-Body/CZ";
case 208: return "/ISO/Member-Body/DK";
case 246: return "/ISO/Member-Body/FI";
case 250: return "/ISO/Member-Body/FR";
case 276: return "/ISO/Member-Body/DE";
case 300: return "/ISO/Member-Body/GR";
case 344: return "/ISO/Member-Body/HK";
case 372: return "/ISO/Member-Body/IE";
case 392: return "/ISO/Member-Body/JP";
case 398: return "/ISO/Member-Body/KZ";
case 410: return "/ISO/Member-Body/KR";
case 498: return "/ISO/Member-Body/MD";
case 528: return "/ISO/Member-Body/NL";
case 566: return "/ISO/Member-Body/NG";
case 578: return "/ISO/Member-Body/NO";
case 616: return "/ISO/Member-Body/PL";
case 643: return "/ISO/Member-Body/RU";
case 702: return "/ISO/Member-Body/SG";
case 752: return "/ISO/Member-Body/SE";
case 804: return "/ISO/Member-Body/UA";
case 826: return "/ISO/Member-Body/GB";
case 840: return "/ISO/Member-Body/US";
default: return $"/ISO/Member-Body/{values[index - 1]}";
}
#endregion
#endregion
// joint-iso-itu-t, joint-iso-ccitt
#region 2.*
oid_2:
if (index == values.Length) return "/Joint-ISO-ITU-T";
switch (values[index++])
{
case 1: return "/ASN.1";
case 16: goto oid_2_16;
case 17: return "/Joint-ISO-ITU-T/Registration-Procedures";
case 23: return "/Joint-ISO-ITU-T/International-Organizations";
case 25: goto oid_2_25;
case 27: return "/Tag-Based";
case 28: return "/Joint-ISO-ITU-T/ITS";
case 41: return "/BIP";
case 42: goto oid_2_42;
case 48: goto oid_2_48;
case 49: goto oid_2_49;
case 50: return "/OIDResolutionSystem";
case 51: return "/GS1";
case 52: return "/Joint-ISO-ITU-T/UAV";
case 999: return "/Joint-ISO-ITU-T/Example";
default: return $"/Joint-ISO-ITU-T/{values[index - 1]}";
}
// country
#region 2.16.*
oid_2_16:
if (index == values.Length) return "/Country";
switch (values[index++])
{
case 4: return "/Country/AF";
case 8: return "/Country/AL";
case 12: return "/Country/DZ";
case 20: return "/Country/AD";
case 24: return "/Country/AO";
case 28: return "/Country/AG";
case 31: return "/Country/AZ";
case 32: return "/Country/AR";
case 36: return "/Country/AU";
case 40: return "/Country/AT";
case 44: return "/Country/BS";
case 48: return "/Country/BH";
case 50: return "/Country/BD";
case 51: return "/Country/AM";
case 52: return "/Country/BB";
case 56: return "/Country/BE";
case 60: return "/Country/BM";
case 64: return "/Country/BT";
case 68: return "/Country/BO";
case 70: return "/Country/BA";
case 72: return "/Country/BW";
case 76: return "/Country/BR";
case 84: return "/Country/BZ";
case 90: return "/Country/SB";
case 96: return "/Country/BN";
case 100: return "/Country/BG";
case 104: return "/Country/MM";
case 108: return "/Country/BI";
case 112: return "/Country/BY";
case 116: return "/Country/KH";
case 120: return "/Country/CM";
case 124: return "/Country/CA";
case 132: return "/Country/CV";
case 140: return "/Country/CF";
case 144: return "/Country/LK";
case 148: return "/Country/TD";
case 152: return "/Country/CL";
case 156: return "/Country/CN";
case 158: return "/Country/TW";
case 170: return "/Country/CO";
case 174: return "/Country/KM";
case 178: return "/Country/CG";
case 180: return "/Country/CD";
case 188: return "/Country/CR";
case 191: return "/Country/HR";
case 192: return "/Country/CU";
case 196: return "/Country/CY";
case 203: return "/Country/CZ";
case 204: return "/Country/BJ";
case 208: return "/Country/DK";
case 212: return "/Country/DM";
case 214: return "/Country/DO";
case 218: return "/Country/EC";
case 222: return "/Country/SV";
case 226: return "/Country/GQ";
case 231: return "/Country/ET";
case 232: return "/Country/ER";
case 233: return "/Country/EE";
case 242: return "/Country/FJ";
case 246: return "/Country/FI";
case 250: return "/Country/FR";
case 262: return "/Country/DJ";
case 266: return "/Country/GA";
case 268: return "/Country/GE";
case 270: return "/Country/GM";
case 275: return "/Country/PS";
case 276: return "/Country/DE";
case 288: return "/Country/GH";
case 296: return "/Country/KI";
case 300: return "/Country/GR";
case 308: return "/Country/GD";
case 320: return "/Country/GT";
case 324: return "/Country/GN";
case 328: return "/Country/GY";
case 332: return "/Country/HT";
case 336: return "/Country/VA";
case 340: return "/Country/HN";
case 344: return "/Country/HK";
case 348: return "/Country/HU";
case 352: return "/Country/IS";
case 356: return "/Country/IN";
case 360: return "/Country/ID";
case 364: return "/Country/IR";
case 368: return "/Country/IQ";
case 372: return "/Country/IE";
case 376: return "/Country/IL";
case 380: return "/Country/IT";
case 384: return "/Country/CI";
case 388: return "/Country/JM";
case 392: return "/Country/JP";
case 398: return "/Country/KZ";
case 400: return "/Country/JO";
case 404: return "/Country/KE";
case 408: return "/Country/KP";
case 410: return "/Country/KR";
case 414: return "/Country/KW";
case 417: return "/Country/KG";
case 418: return "/Country/LA";
case 422: return "/Country/LB";
case 426: return "/Country/LS";
case 428: return "/Country/LV";
case 430: return "/Country/LR";
case 434: return "/Country/LY";
case 438: return "/Country/LI";
case 440: return "/Country/LT";
case 442: return "/Country/LU";
case 450: return "/Country/MG";
case 454: return "/Country/MW";
case 458: return "/Country/MY";
case 462: return "/Country/MV";
case 466: return "/Country/ML";
case 470: return "/Country/MT";
case 478: return "/Country/MR";
case 480: return "/Country/MU";
case 484: return "/Country/MX";
case 492: return "/Country/MC";
case 496: return "/Country/MN";
case 498: return "/Country/MD";
case 499: return "/Country/ME";
case 504: return "/Country/MA";
case 508: return "/Country/MZ";
case 512: return "/Country/OM";
case 516: return "/Country/NA";
case 520: return "/Country/NR";
case 524: return "/Country/NP";
case 528: return "/Country/NL";
case 530: return "/Country/AN";
case 548: return "/Country/VU";
case 554: return "/Country/NZ";
case 558: return "/Country/NI";
case 562: return "/Country/NE";
case 566: return "/Country/NG";
case 578: return "/Country/NO";
case 583: return "/Country/FM";
case 584: return "/Country/MH";
case 585: return "/Country/PW";
case 586: return "/Country/PK";
case 591: return "/Country/PA";
case 598: return "/Country/PG";
case 600: return "/Country/PY";
case 604: return "/Country/PE";
case 608: return "/Country/PH";
case 616: return "/Country/PL";
case 620: return "/Country/PT";
case 624: return "/Country/GW";
case 626: return "/Country/TL";
case 634: return "/Country/QA";
case 642: return "/Country/RO";
case 643: return "/Country/RU";
case 646: return "/Country/RW";
case 659: return "/Country/KN";
case 662: return "/Country/LC";
case 670: return "/Country/VC";
case 674: return "/Country/SM";
case 678: return "/Country/ST";
case 682: return "/Country/SA";
case 686: return "/Country/SN";
case 688: return "/Country/RS";
case 690: return "/Country/SC";
case 694: return "/Country/SL";
case 702: return "/Country/SG";
case 703: return "/Country/SK";
case 704: return "/Country/VN";
case 705: return "/Country/SI";
case 706: return "/Country/SO";
case 710: return "/Country/ZA";
case 716: return "/Country/ZW";
case 724: return "/Country/ES";
case 728: return "/Country/SS";
case 729: return "/Country/SD";
case 740: return "/Country/SR";
case 748: return "/Country/SZ";
case 752: return "/Country/SE";
case 756: return "/Country/CH";
case 760: return "/Country/SY";
case 762: return "/Country/TJ";
case 764: return "/Country/TH";
case 768: return "/Country/TG";
case 776: return "/Country/TO";
case 780: return "/Country/TT";
case 784: return "/Country/AE";
case 788: return "/Country/TN";
case 792: return "/Country/TR";
case 795: return "/Country/TM";
case 798: return "/Country/TV";
case 800: return "/Country/UG";
case 804: return "/Country/UA";
case 807: return "/Country/MK";
case 818: return "/Country/EG";
case 826: return "/Country/GB";
case 834: return "/Country/TZ";
case 840: return "/Country/US";
case 854: return "/Country/BF";
case 858: return "/Country/UY";
case 860: return "/Country/UZ";
case 862: return "/Country/VE";
case 882: return "/Country/WS";
case 887: return "/Country/YE";
case 894: return "/Country/ZM";
default: return $"/Country/{values[index - 1]}";
}
#endregion
// uuid [TODO: Requires 128-bit values]
#region 2.25.*
oid_2_25:
if (index == values.Length) return "/Joint-ISO-ITU-T/UUID";
switch (values[index++])
{
case 0: return "/Joint-ISO-ITU-T/UUID/00000000-0000-0000-0000-000000000000";
//case 288786655511405443130567505384701230: return "/Joint-ISO-ITU-T/UUID/00379e48-0a2b-1085-b288-0002a5d5fd2e";
//case 987895962269883002155146617097157934: return "/Joint-ISO-ITU-T/UUID/00be4308-0c89-1085-8ea0-0002a5d5fd2e";
//case 1858228783942312576083372383319475483: return "/Joint-ISO-ITU-T/UUID/0165e1c0-a655-11e0-95b8-0002a5d5c51b";
//case 2474299330026746002885628159579243803: return "/Joint-ISO-ITU-T/UUID/01dc8860-25fb-11da-82b2-0002a5d5c51b";
//case 3263645701162998421821186056373271854: return "/Joint-ISO-ITU-T/UUID/02748e28-08c4-1085-b21d-0002a5d5fd2e";
//case 3325839809379844461264382260940242222: return "/Joint-ISO-ITU-T/UUID/02808890-0ad8-1085-9bdf-0002a5d5fd2e";
// TODO: Left off at http://www.oid-info.com/get/2.25.3664154270495270126161055518190585115
default: return $"/Joint-ISO-ITU-T/UUID/{values[index - 1]}";
}
#endregion
// telebiometrics
#region 2.42.*
oid_2_42:
if (index == values.Length) return "/Telebiometrics";
switch (values[index++])
{
case 0: goto oid_2_42_0;
case 1: goto oid_2_42_1;
case 2: goto oid_2_42_2;
case 3: goto oid_2_42_3;
default: return $"/Telebiometrics/{values[index - 1]}";
}
// modules
#region 2.42.0.*
oid_2_42_0:
if (index == values.Length) return "/Telebiometrics/Modules";
switch (values[index++])
{
case 0: goto oid_2_42_0_0;
default: return $"/Telebiometrics/Modules/{values[index - 1]}";
}
// main
#region 2.42.0.0.*
oid_2_42_0_0:
if (index == values.Length) return "/Telebiometrics/Modules/Main_Module";
switch (values[index++])
{
case 1: return "/Telebiometrics/Modules/Main_Module/Version1";
default: return $"/Telebiometrics/Modules/Main_Module/{values[index - 1]}";
}
#endregion
#endregion
// tmm
#region 2.42.1.*
oid_2_42_1:
if (index == values.Length) return "/Telebiometrics/TMM";
switch (values[index++])
{
case 0: goto oid_2_42_1_0;
case 1: goto oid_2_42_1_1;
case 2: goto oid_2_42_1_2;
case 3: goto oid_2_42_1_3;
case 4: return "/Telebiometrics/TMM/Practitioners";
default: return $"/Telebiometrics/TMM/{values[index - 1]}";
}
// modules
#region 2.42.1.0.*
oid_2_42_1_0:
if (index == values.Length) return "/Telebiometrics/TMM/Modules";
switch (values[index++])
{
case 0: goto oid_2_42_1_0_0;
default: return $"/Telebiometrics/TMM/Modules/{values[index - 1]}";
}
// main
#region 2.42.1.0.0.*
oid_2_42_1_0_0:
if (index == values.Length) return "/Telebiometrics/TMM/Modules/Main";
switch (values[index++])
{
case 0: return "/Telebiometrics/TMM/Modules/Main/First_Version";
default: return $"/Telebiometrics/TMM/Modules/Main/{values[index - 1]}";
}
#endregion
#endregion
// measures, metric
#region 2.42.1.1.*
oid_2_42_1_1:
if (index == values.Length) return "/Telebiometrics/TMM/Measures";
switch (values[index++])
{
case 1: goto oid_2_42_1_1_1;
case 2: return "/Telebiometrics/TMM/Measures/Units";
case 3: return "/Telebiometrics/TMM/Measures/Symbols";
case 4: return "/Telebiometrics/TMM/Measures/Conditions";
case 5: goto oid_2_42_1_1_5;
default: return $"/Telebiometrics/TMM/Measures/{values[index - 1]}";
}
// quantities
#region 2.42.1.1.1.*
oid_2_42_1_1_1:
if (index == values.Length) return "/Telebiometrics/TMM/Measures/Quantities";
switch (values[index++])
{
case 1: return "/Telebiometrics/TMM/Measures/Quantities/Physics";
case 2: return "/Telebiometrics/TMM/Measures/Quantities/Chemistry";
case 3: return "/Telebiometrics/TMM/Measures/Quantities/Biology";
case 4: return "/Telebiometrics/TMM/Measures/Quantities/Culturology";
case 5: return "/Telebiometrics/TMM/Measures/Quantities/Psychology";
default: return $"/Telebiometrics/TMM/Measures/Quantities/{values[index - 1]}";
}
#endregion
// methods
#region 2.42.1.1.5.*
oid_2_42_1_1_5:
if (index == values.Length) return "/Telebiometrics/TMM/Measures/Methods";
switch (values[index++])
{
case 1: return "/Telebiometrics/TMM/Measures/Methods/Physics";
case 2: return "/Telebiometrics/TMM/Measures/Methods/Chemistry";
case 3: return "/Telebiometrics/TMM/Measures/Methods/Biology";
case 4: return "/Telebiometrics/TMM/Measures/Methods/Culturology";
case 5: return "/Telebiometrics/TMM/Measures/Methods/Psychology";
default: return $"/Telebiometrics/TMM/Measures/Methods/{values[index - 1]}";
}
#endregion
#endregion
// fields-of-study, scientific
#region 2.42.1.2.*
oid_2_42_1_2:
if (index == values.Length) return "/Telebiometrics/TMM/Fields_of_Study";
switch (values[index++])
{
case 1: return "/Telebiometrics/TMM/Fields_of_Study/Physics";
case 2: return "/Telebiometrics/TMM/Fields_of_Study/Chemistry";
case 3: return "/Telebiometrics/TMM/Fields_of_Study/Biology";
case 4: return "/Telebiometrics/TMM/Fields_of_Study/Culturology";
case 5: return "/Telebiometrics/TMM/Fields_of_Study/Psychology";
default: return $"/Telebiometrics/TMM/Fields_of_Study/{values[index - 1]}";
}
#endregion
// modalities, sensory
#region 2.42.1.3.*
oid_2_42_1_3:
if (index == values.Length) return "/Telebiometrics/TMM/Modalities";
switch (values[index++])
{
case 1: return "/Telebiometrics/TMM/Modalities/Tango";
case 2: return "/Telebiometrics/TMM/Modalities/Video";
case 3: return "/Telebiometrics/TMM/Modalities/Audio";
case 4: return "/Telebiometrics/TMM/Modalities/Chemo";
case 5: return "/Telebiometrics/TMM/Modalities/Radio";
case 6: return "/Telebiometrics/TMM/Modalities/Calor";
case 7: return "/Telebiometrics/TMM/Modalities/Electro";
default: return $"/Telebiometrics/TMM/Modalities/{values[index - 1]}";
}
#endregion
#endregion
// human-physiology
#region 2.42.2.*
oid_2_42_2:
if (index == values.Length) return "/Telebiometrics/Human_Physiology";
switch (values[index++])
{
case 0: goto oid_2_42_2_0;
case 1: goto oid_2_42_2_1;
case 2: return "/Telebiometrics/Human_Physiology/Symbol_Combinations";
default: return $"/Telebiometrics/Human_Physiology/{values[index - 1]}";
}
// modules
#region 2.42.2.0.*
oid_2_42_2_0:
if (index == values.Length) return "/Telebiometrics/Human_Physiology/Modules";
switch (values[index++])
{
case 0: goto oid_2_42_2_0_0;
default: return $"/Telebiometrics/Human_Physiology/Modules/{values[index - 1]}";
}
// main
#region 2.42.2.0.0.*
oid_2_42_2_0_0:
if (index == values.Length) return "/Telebiometrics/Human_Physiology/Modules/Main_Module";
switch (values[index++])
{
case 0: return "/Telebiometrics/Human_Physiology/Modules/Main_Module/First_Version";
default: return $"/Telebiometrics/Human_Physiology/Modules/Main_Module/{values[index - 1]}";
}
#endregion
#endregion
// symbols
#region 2.42.2.1.*
oid_2_42_2_1:
if (index == values.Length) return "/Telebiometrics/Human_Physiology/Symbols";
switch (values[index++])
{
case 1: return "/Telebiometrics/Human_Physiology/Symbols/Tango_in";
case 2: return "/Telebiometrics/Human_Physiology/Symbols/Video_in";
case 3: return "/Telebiometrics/Human_Physiology/Symbols/Audio_in";
case 4: return "/Telebiometrics/Human_Physiology/Symbols/Chemo_in";
case 5: return "/Telebiometrics/Human_Physiology/Symbols/Radio_in";
case 6: return "/Telebiometrics/Human_Physiology/Symbols/Calor_in";
case 7: return "/Telebiometrics/Human_Physiology/Symbols/Tango_out";
case 8: return "/Telebiometrics/Human_Physiology/Symbols/Video_out";
case 9: return "/Telebiometrics/Human_Physiology/Symbols/Audio_out";
case 10: return "/Telebiometrics/Human_Physiology/Symbols/Chemo_out";
case 11: return "/Telebiometrics/Human_Physiology/Symbols/Radio_out";
case 12: return "/Telebiometrics/Human_Physiology/Symbols/Calor_out";
case 13: return "/Telebiometrics/Human_Physiology/Symbols/Safe";
case 14: return "/Telebiometrics/Human_Physiology/Symbols/Threshold";
default: return $"/Telebiometrics/Human_Physiology/Symbols/{values[index - 1]}";
}
#endregion
#endregion
// obj-cat, telehealth, e-health-protocol, th
#region 2.42.3.*
oid_2_42_3:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol";
switch (values[index++])
{
case 0: goto oid_2_42_3_0;
case 1: return "/Telebiometrics/E_Health_Protocol/[Patient schemes]";
case 2: return "/Telebiometrics/E_Health_Protocol/[Medical staff schemes]";
case 3: return "/Telebiometrics/E_Health_Protocol/[Observer schemes]";
case 4: return "/Telebiometrics/E_Health_Protocol/[Pharmaceutical schemes]";
case 5: return "/Telebiometrics/E_Health_Protocol/[Laboratory schemes]";
case 6: return "/Telebiometrics/E_Health_Protocol/[Drug manufacturer schemes]";
case 7: return "/Telebiometrics/E_Health_Protocol/[Medical device schemes]";
case 8: return "/Telebiometrics/E_Health_Protocol/[Medical software schemes]";
case 9: return "/Telebiometrics/E_Health_Protocol/[Medical insurance schemes]";
case 10: return "/Telebiometrics/E_Health_Protocol/[Medical record schemes]";
default: return $"/Telebiometrics/E_Health_Protocol/{values[index - 1]}";
}
// obj-cat, telehealth, e-health-protocol, th
#region 2.42.3.0.*
oid_2_42_3_0:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules";
switch (values[index++])
{
case 0: goto oid_2_42_3_0_0;
case 1: goto oid_2_42_3_0_1;
case 2: goto oid_2_42_3_0_2;
case 3: goto oid_2_42_3_0_3;
case 4: goto oid_2_42_3_0_4;
case 5: goto oid_2_42_3_0_5;
default: return $"/Telebiometrics/E_Health_Protocol/Modules/{values[index - 1]}";
}
// identification
#region 2.42.3.0.0.*
oid_2_42_3_0_0:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Identification";
switch (values[index++])
{
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Identification/Version1";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Identification/{values[index - 1]}";
}
#endregion
// set-up
#region 2.42.3.0.1.*
oid_2_42_3_0_1:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Setup";
switch (values[index++])
{
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Setup/Version1";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Setup/{values[index - 1]}";
}
#endregion
// send-and-ack
#region 2.42.3.0.2.*
oid_2_42_3_0_2:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Send-and-ack";
switch (values[index++])
{
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Send-and-ack/Version1";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Send-and-ack/{values[index - 1]}";
}
#endregion
// command-response
#region 2.42.3.0.3.*
oid_2_42_3_0_3:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Command-response";
switch (values[index++])
{
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Command-response/Version1";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Command-response/{values[index - 1]}";
}
#endregion
// quantity-and-units
#region 2.42.3.0.4.*
oid_2_42_3_0_4:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Quantities_And_Units";
switch (values[index++])
{
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Quantities_And_Units/Version1";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Quantities_And_Units/{values[index - 1]}";
}
#endregion
// examples
#region 2.42.3.0.5.*
oid_2_42_3_0_5:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Examples";
switch (values[index++])
{
case 0: return "/Telebiometrics/E_Health_Protocol/Modules/Examples/Command_Response";
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Examples/Data_Message";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Examples/{values[index - 1]}";
}
#endregion
#endregion
#endregion
#endregion
// cybersecurity
#region 2.48.*
oid_2_48:
if (index == values.Length) return "/Cybersecurity";
switch (values[index++])
{
case 1: return "/Cybersecurity/Country";
case 2: return "/Cybersecurity/International-Org";
default: return $"/Cybersecurity/{values[index - 1]}";
}
#endregion
// alerting
#region 2.49.*
oid_2_49:
if (index == values.Length) return "/Alerting";
switch (values[index++])
{
case 0: return "/Alerting/WMO";
default: return $"/Alerting/{values[index - 1]}";
}
#endregion
#endregion
}
}
#pragma warning restore IDE0011
}

View File

@@ -1,71 +0,0 @@
using System;
using System.Collections.Generic;
namespace BinaryObjectScanner.ASN1
{
/// <summary>
/// Methods related to Object Identifiers (OID)
/// </summary>
public static partial class ObjectIdentifier
{
// TODO: ulong[] isn't going to work. If we can use .NET 7, we can use UInt128
// We might want to look into storing all values as GUID? I don't remember if
// you can do value comparisions between an integral value and a GUID, though.
/// <summary>
/// Parse an OID in DER-encoded byte notation into a list of values
/// </summary>
/// <param name="data">Byte array representing the data to read</param>
/// <param name="length">Total length of the data according to the DER TLV</param>
/// <returns>Array of values representing the OID</returns>
public static ulong[] ParseDERIntoArray(byte[] data, ulong length)
{
// The first byte contains nodes 1 and 2
int firstNode = Math.DivRem(data[0], 40, out int secondNode);
// Create a list for all nodes
List<ulong> nodes = new List<ulong> { (ulong)firstNode, (ulong)secondNode };
// All other nodes are encoded uniquely
int offset = 1;
while (offset < (long)length)
{
// If bit 7 is not set
if ((data[offset] & 0x80) == 0)
{
nodes.Add(data[offset]);
offset++;
continue;
}
// Otherwise, read the encoded value in a loop
ulong dotValue = 0;
bool doneProcessing = false;
do
{
// Shift the current encoded value
dotValue <<= 7;
// If we have a leading zero byte, we're at the end
if ((data[offset] & 0x80) == 0)
doneProcessing = true;
// Clear the top byte
unchecked { data[offset] &= (byte)~0x80; }
// Add the new value to the result
dotValue |= data[offset];
// Increment the offset
offset++;
} while (offset < data.Length && !doneProcessing);
// Add the parsed value to the output
nodes.Add(dotValue);
}
return nodes.ToArray();
}
}
}

View File

@@ -1,26 +0,0 @@
namespace BinaryObjectScanner.ASN1
{
#pragma warning disable IDE0011
/// <summary>
/// Methods related to Object Identifiers (OID) and dot notation
/// </summary>
public static partial class ObjectIdentifier
{
/// <summary>
/// Parse an OID in separated-value notation into dot notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <returns>List of values representing the dot notation</returns>
public static string ParseOIDToDotNotation(ulong[] values)
{
// If we have an invalid set of values, we can't do anything
if (values == null || values.Length == 0)
return null;
return string.Join(".", values);
}
}
#pragma warning restore IDE0011
}

View File

@@ -1,259 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Numerics;
using System.Text;
using BinaryObjectScanner.Utilities;
namespace BinaryObjectScanner.ASN1
{
/// <summary>
/// ASN.1 type/length/value class that all types are based on
/// </summary>
public class TypeLengthValue
{
/// <summary>
/// The ASN.1 type
/// </summary>
public ASN1Type Type { get; private set; }
/// <summary>
/// Length of the value
/// </summary>
public ulong Length { get; private set; }
/// <summary>
/// Generic value associated with <see cref="Type"/>
/// </summary>
public object Value { get; private set; }
/// <summary>
/// Read from the source data array at an index
/// </summary>
/// <param name="data">Byte array representing data to read</param>
/// <param name="index">Index within the array to read at</param>
public TypeLengthValue(byte[] data, ref int index)
{
// Get the type and modifiers
this.Type = (ASN1Type)data[index++];
// If we have an end indicator, we just return
if (this.Type == ASN1Type.V_ASN1_EOC)
return;
// Get the length of the value
this.Length = ReadLength(data, ref index);
// Read the value
if (this.Type.HasFlag(ASN1Type.V_ASN1_CONSTRUCTED))
{
var valueList = new List<TypeLengthValue>();
int currentIndex = index;
while (index < currentIndex + (int)this.Length)
{
valueList.Add(new TypeLengthValue(data, ref index));
}
this.Value = valueList.ToArray();
}
else
{
// TODO: Get more granular based on type
this.Value = data.ReadBytes(ref index, (int)this.Length);
}
}
/// <summary>
/// Format the TLV as a string
/// </summary>
/// <param name="paddingLevel">Padding level of the item when formatting</param>
/// <returns>String representing the TLV, if possible</returns>
public string Format(int paddingLevel = 0)
{
// Create the left-padding string
string padding = new string(' ', paddingLevel);
// If we have an invalid item
if (this.Type == 0)
return $"{padding}UNKNOWN TYPE";
// Create the string builder
StringBuilder formatBuilder = new StringBuilder();
// Append the type
formatBuilder.Append($"{padding}Type: {this.Type}");
if (this.Type == ASN1Type.V_ASN1_EOC)
return formatBuilder.ToString();
// Append the length
formatBuilder.Append($", Length: {this.Length}");
if (this.Length == 0)
return formatBuilder.ToString();
// If we have a constructed type
if (this.Type.HasFlag(ASN1Type.V_ASN1_CONSTRUCTED))
{
var valueAsObjectArray = this.Value as TypeLengthValue[];
if (valueAsObjectArray == null)
{
formatBuilder.Append(", Value: [INVALID DATA TYPE]");
return formatBuilder.ToString();
}
formatBuilder.Append(", Value:\n");
for (int i = 0; i < valueAsObjectArray.Length; i++)
{
var child = valueAsObjectArray[i];
string childString = child.Format(paddingLevel + 1);
formatBuilder.Append($"{childString}\n");
}
return formatBuilder.ToString().TrimEnd('\n');
}
// Get the value as a byte array
byte[] valueAsByteArray = this.Value as byte[];
if (valueAsByteArray == null)
{
formatBuilder.Append(", Value: [INVALID DATA TYPE]");
return formatBuilder.ToString();
}
// If we have a primitive type
switch (this.Type)
{
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-boolean"/>
case ASN1Type.V_ASN1_BOOLEAN:
if (this.Length > 1 || valueAsByteArray.Length > 1)
formatBuilder.Append($" [Expected length of 1]");
bool booleanValue = valueAsByteArray[0] == 0x00 ? false : true;
formatBuilder.Append($", Value: {booleanValue}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer"/>
case ASN1Type.V_ASN1_INTEGER:
Array.Reverse(valueAsByteArray);
BigInteger integerValue = new BigInteger(valueAsByteArray);
formatBuilder.Append($", Value: {integerValue}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string"/>
case ASN1Type.V_ASN1_BIT_STRING:
// TODO: Read into a BitArray and print that out instead?
int unusedBits = valueAsByteArray[0];
formatBuilder.Append($", Value with {unusedBits} unused bits: {BitConverter.ToString(valueAsByteArray.Skip(1).ToArray()).Replace('-', ' ')}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-octet-string"/>
case ASN1Type.V_ASN1_OCTET_STRING:
formatBuilder.Append($", Value: {BitConverter.ToString(valueAsByteArray).Replace('-', ' ')}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier"/>
/// <see cref="http://snmpsharpnet.com/index.php/2009/03/02/ber-encoding-and-decoding-oid-values/"/>
case ASN1Type.V_ASN1_OBJECT:
// Derive array of values
ulong[] objectNodes = ObjectIdentifier.ParseDERIntoArray(valueAsByteArray, this.Length);
// Append the dot and modified OID-IRI notations
string dotNotationString = ObjectIdentifier.ParseOIDToDotNotation(objectNodes);
string oidIriString = ObjectIdentifier.ParseOIDToOIDIRINotation(objectNodes);
formatBuilder.Append($", Value: {dotNotationString} ({oidIriString})");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-utf8string"/>
case ASN1Type.V_ASN1_UTF8STRING:
formatBuilder.Append($", Value: {Encoding.UTF8.GetString(valueAsByteArray)}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-printablestring"/>
case ASN1Type.V_ASN1_PRINTABLESTRING:
formatBuilder.Append($", Value: {Encoding.ASCII.GetString(valueAsByteArray)}");
break;
//case ASN1Type.V_ASN1_T61STRING:
case ASN1Type.V_ASN1_TELETEXSTRING:
formatBuilder.Append($", Value: {Encoding.ASCII.GetString(valueAsByteArray)}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-ia5string"/>
case ASN1Type.V_ASN1_IA5STRING:
formatBuilder.Append($", Value: {Encoding.ASCII.GetString(valueAsByteArray)}");
break;
case ASN1Type.V_ASN1_UTCTIME:
string utctimeString = Encoding.ASCII.GetString(valueAsByteArray);
if (DateTime.TryParse(utctimeString, out DateTime utctimeDateTime))
formatBuilder.Append($", Value: {utctimeDateTime}");
else
formatBuilder.Append($", Value: {utctimeString}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bmpstring"/>
case ASN1Type.V_ASN1_BMPSTRING:
formatBuilder.Append($", Value: {Encoding.Unicode.GetString(valueAsByteArray)}");
break;
default:
formatBuilder.Append($", Value (Unknown Format): {BitConverter.ToString(this.Value as byte[]).Replace('-', ' ')}");
break;
}
// Return the formatted string
return formatBuilder.ToString();
}
/// <summary>
/// Reads the length field for a type
/// </summary>
/// <param name="data">Byte array representing data to read</param>
/// <param name="index">Index within the array to read at</param>
/// <returns>The length value read from the array</returns>
private static ulong ReadLength(byte[] data, ref int index)
{
// If we have invalid data, throw an exception
if (data == null || index < 0 && index >= data.Length)
throw new ArgumentException();
// Read the first byte, assuming it's the length
byte length = data[index++];
// If the bit 7 is not set, then use the value as it is
if ((length & 0x80) == 0)
return length;
// Otherwise, use the value as the number of remaining bytes to read
int bytesToRead = length & ~0x80;
byte[] bytesRead = data.ReadBytes(ref index, bytesToRead);
// TODO: Write extensions to read big-endian
// Reverse the bytes to be in big-endian order
Array.Reverse(bytesRead);
switch (bytesRead.Length)
{
case 1:
return bytesRead[0];
case 2:
return BitConverter.ToUInt16(bytesRead, 0);
case 3:
Array.Resize(ref bytesRead, 4);
goto case 4;
case 4:
return BitConverter.ToUInt32(bytesRead, 0);
case 5:
case 6:
case 7:
Array.Resize(ref bytesRead, 8);
goto case 8;
case 8:
return BitConverter.ToUInt64(bytesRead, 0);
default:
throw new InvalidOperationException();
}
}
}
}

View File

@@ -1,470 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.AACS;
using BinaryObjectScanner.Utilities;
namespace BinaryObjectScanner.Builders
{
public class AACS
{
#region Byte Data
/// <summary>
/// Parse a byte array into an AACS media key block
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static MediaKeyBlock ParseMediaKeyBlock(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseMediaKeyBlock(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into an AACS media key block
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cmedia key block on success, null on error</returns>
public static MediaKeyBlock ParseMediaKeyBlock(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new media key block to fill
var mediaKeyBlock = new MediaKeyBlock();
#region Records
// Create the records list
var records = new List<Record>();
// Try to parse the records
while (data.Position < data.Length)
{
// Try to parse the record
var record = ParseRecord(data);
if (record == null)
return null;
// Add the record
records.Add(record);
// If we have an end of media key block record
if (record.RecordType == RecordType.EndOfMediaKeyBlock)
break;
// Align to the 4-byte boundary if we're not at the end
if (data.Position != data.Length)
{
while ((data.Position % 4) != 0)
_ = data.ReadByteValue();
}
else
{
break;
}
}
// Set the records
mediaKeyBlock.Records = records.ToArray();
#endregion
return mediaKeyBlock;
}
/// <summary>
/// Parse a Stream into a record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled record on success, null on error</returns>
private static Record ParseRecord(Stream data)
{
// TODO: Use marshalling here instead of building
// The first 4 bytes make up the type and length
byte[] typeAndLength = data.ReadBytes(4);
RecordType type = (RecordType)typeAndLength[0];
// Remove the first byte and parse as big-endian
typeAndLength[0] = 0x00;
Array.Reverse(typeAndLength);
uint length = BitConverter.ToUInt32(typeAndLength, 0);
// Create a record based on the type
switch (type)
{
// Recognized record types
case RecordType.EndOfMediaKeyBlock: return ParseEndOfMediaKeyBlockRecord(data, type, length);
case RecordType.ExplicitSubsetDifference: return ParseExplicitSubsetDifferenceRecord(data, type, length);
case RecordType.MediaKeyData: return ParseMediaKeyDataRecord(data, type, length);
case RecordType.SubsetDifferenceIndex: return ParseSubsetDifferenceIndexRecord(data, type, length);
case RecordType.TypeAndVersion: return ParseTypeAndVersionRecord(data, type, length);
case RecordType.DriveRevocationList: return ParseDriveRevocationListRecord(data, type, length);
case RecordType.HostRevocationList: return ParseHostRevocationListRecord(data, type, length);
case RecordType.VerifyMediaKey: return ParseVerifyMediaKeyRecord(data, type, length);
case RecordType.Copyright: return ParseCopyrightRecord(data, type, length);
// Unrecognized record type
default:
return null;
}
}
/// <summary>
/// Parse a Stream into an end of media key block record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled end of media key block record on success, null on error</returns>
private static EndOfMediaKeyBlockRecord ParseEndOfMediaKeyBlockRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.EndOfMediaKeyBlock)
return null;
// TODO: Use marshalling here instead of building
var record = new EndOfMediaKeyBlockRecord();
record.RecordType = type;
record.RecordLength = length;
if (length > 4)
record.SignatureData = data.ReadBytes((int)(length - 4));
return record;
}
/// <summary>
/// Parse a Stream into an explicit subset-difference record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled explicit subset-difference record on success, null on error</returns>
private static ExplicitSubsetDifferenceRecord ParseExplicitSubsetDifferenceRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.ExplicitSubsetDifference)
return null;
// TODO: Use marshalling here instead of building
var record = new ExplicitSubsetDifferenceRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
// Create the subset difference list
var subsetDifferences = new List<SubsetDifference>();
// Try to parse the subset differences
while (data.Position < initialOffset + length - 5)
{
var subsetDifference = new SubsetDifference();
subsetDifference.Mask = data.ReadByteValue();
subsetDifference.Number = data.ReadUInt32BE();
subsetDifferences.Add(subsetDifference);
}
// Set the subset differences
record.SubsetDifferences = subsetDifferences.ToArray();
// If there's any data left, discard it
if (data.Position < initialOffset + length)
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
return record;
}
/// <summary>
/// Parse a Stream into a media key data record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled media key data record on success, null on error</returns>
private static MediaKeyDataRecord ParseMediaKeyDataRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.MediaKeyData)
return null;
// TODO: Use marshalling here instead of building
var record = new MediaKeyDataRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
// Create the media key list
var mediaKeys = new List<byte[]>();
// Try to parse the media keys
while (data.Position < initialOffset + length)
{
byte[] mediaKey = data.ReadBytes(0x10);
mediaKeys.Add(mediaKey);
}
// Set the media keys
record.MediaKeyData = mediaKeys.ToArray();
return record;
}
/// <summary>
/// Parse a Stream into a subset-difference index record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled subset-difference index record on success, null on error</returns>
private static SubsetDifferenceIndexRecord ParseSubsetDifferenceIndexRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.SubsetDifferenceIndex)
return null;
// TODO: Use marshalling here instead of building
var record = new SubsetDifferenceIndexRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
record.Span = data.ReadUInt32BE();
// Create the offset list
var offsets = new List<uint>();
// Try to parse the offsets
while (data.Position < initialOffset + length)
{
uint offset = data.ReadUInt32BE();
offsets.Add(offset);
}
// Set the offsets
record.Offsets = offsets.ToArray();
return record;
}
/// <summary>
/// Parse a Stream into a type and version record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled type and version record on success, null on error</returns>
private static TypeAndVersionRecord ParseTypeAndVersionRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.TypeAndVersion)
return null;
// TODO: Use marshalling here instead of building
var record = new TypeAndVersionRecord();
record.RecordType = type;
record.RecordLength = length;
record.MediaKeyBlockType = (MediaKeyBlockType)data.ReadUInt32BE();
record.VersionNumber = data.ReadUInt32BE();
return record;
}
/// <summary>
/// Parse a Stream into a drive revocation list record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled drive revocation list record on success, null on error</returns>
private static DriveRevocationListRecord ParseDriveRevocationListRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.DriveRevocationList)
return null;
// TODO: Use marshalling here instead of building
var record = new DriveRevocationListRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
record.TotalNumberOfEntries = data.ReadUInt32BE();
// Create the signature blocks list
var blocks = new List<DriveRevocationSignatureBlock>();
// Try to parse the signature blocks
int entryCount = 0;
while (entryCount < record.TotalNumberOfEntries && data.Position < initialOffset + length)
{
var block = new DriveRevocationSignatureBlock();
block.NumberOfEntries = data.ReadUInt32BE();
block.EntryFields = new DriveRevocationListEntry[block.NumberOfEntries];
for (int i = 0; i < block.EntryFields.Length; i++)
{
var entry = new DriveRevocationListEntry();
entry.Range = data.ReadUInt16BE();
entry.DriveID = data.ReadBytes(6);
block.EntryFields[i] = entry;
entryCount++;
}
blocks.Add(block);
// If we have an empty block
if (block.NumberOfEntries == 0)
break;
}
// Set the signature blocks
record.SignatureBlocks = blocks.ToArray();
// If there's any data left, discard it
if (data.Position < initialOffset + length)
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
return record;
}
/// <summary>
/// Parse a Stream into a host revocation list record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled host revocation list record on success, null on error</returns>
private static HostRevocationListRecord ParseHostRevocationListRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.HostRevocationList)
return null;
// TODO: Use marshalling here instead of building
var record = new HostRevocationListRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
record.TotalNumberOfEntries = data.ReadUInt32BE();
// Create the signature blocks list
var blocks = new List<HostRevocationSignatureBlock>();
// Try to parse the signature blocks
int entryCount = 0;
while (entryCount < record.TotalNumberOfEntries && data.Position < initialOffset + length)
{
var block = new HostRevocationSignatureBlock();
block.NumberOfEntries = data.ReadUInt32BE();
block.EntryFields = new HostRevocationListEntry[block.NumberOfEntries];
for (int i = 0; i < block.EntryFields.Length; i++)
{
var entry = new HostRevocationListEntry();
entry.Range = data.ReadUInt16BE();
entry.HostID = data.ReadBytes(6);
block.EntryFields[i] = entry;
entryCount++;
}
blocks.Add(block);
// If we have an empty block
if (block.NumberOfEntries == 0)
break;
}
// Set the signature blocks
record.SignatureBlocks = blocks.ToArray();
// If there's any data left, discard it
if (data.Position < initialOffset + length)
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
return record;
}
/// <summary>
/// Parse a Stream into a verify media key record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled verify media key record on success, null on error</returns>
private static VerifyMediaKeyRecord ParseVerifyMediaKeyRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.VerifyMediaKey)
return null;
// TODO: Use marshalling here instead of building
var record = new VerifyMediaKeyRecord();
record.RecordType = type;
record.RecordLength = length;
record.CiphertextValue = data.ReadBytes(0x10);
return record;
}
/// <summary>
/// Parse a Stream into a copyright record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled copyright record on success, null on error</returns>
private static CopyrightRecord ParseCopyrightRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.Copyright)
return null;
// TODO: Use marshalling here instead of building
var record = new CopyrightRecord();
record.RecordType = type;
record.RecordLength = length;
if (length > 4)
{
byte[] copyright = data.ReadBytes((int)(length - 4));
record.Copyright = Encoding.ASCII.GetString(copyright).TrimEnd('\0');
}
return record;
}
#endregion
}
}

View File

@@ -1,95 +0,0 @@
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.BDPlus;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.BDPlus.Constants;
namespace BinaryObjectScanner.Builders
{
public class BDPlus
{
#region Byte Data
/// <summary>
/// Parse a byte array into a BD+ SVM
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled BD+ SVM on success, null on error</returns>
public static SVM ParseSVM(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseSVM(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into an BD+ SVM
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BD+ SVM on success, null on error</returns>
public static SVM ParseSVM(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Try to parse the SVM
return ParseSVMData(data);
}
/// <summary>
/// Parse a Stream into an SVM
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SVM on success, null on error</returns>
private static SVM ParseSVMData(Stream data)
{
// TODO: Use marshalling here instead of building
var svm = new SVM();
byte[] signature = data.ReadBytes(8);
svm.Signature = Encoding.ASCII.GetString(signature);
if (svm.Signature != SignatureString)
return null;
svm.Unknown1 = data.ReadBytes(5);
svm.Year = data.ReadUInt16BE();
svm.Month = data.ReadByteValue();
if (svm.Month < 1 || svm.Month > 12)
return null;
svm.Day = data.ReadByteValue();
if (svm.Day < 1 || svm.Day > 31)
return null;
svm.Unknown2 = data.ReadBytes(4);
svm.Length = data.ReadUInt32();
// if (svm.Length > 0)
// svm.Data = data.ReadBytes((int)svm.Length);
return svm;
}
#endregion
}
}

View File

@@ -1,150 +0,0 @@
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.BFPK;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.BFPK.Constants;
namespace BinaryObjectScanner.Builders
{
public class BFPK
{
#region Byte Data
/// <summary>
/// Parse a byte array into a BFPK archive
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseArchive(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a BFPK archive
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the archive header
archive.Header = header;
#endregion
#region Files
// If we have any files
if (header.Files > 0)
{
var files = new FileEntry[header.Files];
// Read all entries in turn
for (int i = 0; i < header.Files; i++)
{
var file = ParseFileEntry(data);
if (file == null)
return null;
files[i] = file;
}
// Set the files
archive.Files = files;
}
#endregion
return archive;
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] magic = data.ReadBytes(4);
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.Version = data.ReadInt32();
header.Files = data.ReadInt32();
return header;
}
/// <summary>
/// Parse a Stream into a file entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file entry on success, null on error</returns>
private static FileEntry ParseFileEntry(Stream data)
{
// TODO: Use marshalling here instead of building
FileEntry fileEntry = new FileEntry();
fileEntry.NameSize = data.ReadInt32();
if (fileEntry.NameSize > 0)
{
byte[] name = data.ReadBytes(fileEntry.NameSize);
fileEntry.Name = Encoding.ASCII.GetString(name);
}
fileEntry.UncompressedSize = data.ReadInt32();
fileEntry.Offset = data.ReadInt32();
if (fileEntry.Offset > 0)
{
long currentOffset = data.Position;
data.Seek(fileEntry.Offset, SeekOrigin.Begin);
fileEntry.CompressedSize = data.ReadInt32();
data.Seek(currentOffset, SeekOrigin.Begin);
}
return fileEntry;
}
#endregion
}
}

View File

@@ -1,250 +0,0 @@
using System.IO;
using System.Linq;
using System.Text;
using BinaryObjectScanner.Models.BSP;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.BSP.Constants;
namespace BinaryObjectScanner.Builders
{
public static class BSP
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Level
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Level on success, null on error</returns>
public static Models.BSP.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Level
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level on success, null on error</returns>
public static Models.BSP.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new Half-Life Level to fill
var file = new Models.BSP.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the level header
file.Header = header;
#endregion
#region Lumps
// Create the lump array
file.Lumps = new Lump[HL_BSP_LUMP_COUNT];
// Try to parse the lumps
for (int i = 0; i < HL_BSP_LUMP_COUNT; i++)
{
var lump = ParseLump(data);
file.Lumps[i] = lump;
}
#endregion
#region Texture header
// Try to get the texture header lump
var textureDataLump = file.Lumps[HL_BSP_LUMP_TEXTUREDATA];
if (textureDataLump.Offset == 0 || textureDataLump.Length == 0)
return null;
// Seek to the texture header
data.Seek(textureDataLump.Offset, SeekOrigin.Begin);
// Try to parse the texture header
var textureHeader = ParseTextureHeader(data);
if (textureHeader == null)
return null;
// Set the texture header
file.TextureHeader = textureHeader;
#endregion
#region Textures
// Create the texture array
file.Textures = new Texture[textureHeader.TextureCount];
// Try to parse the textures
for (int i = 0; i < textureHeader.TextureCount; i++)
{
// Get the texture offset
int offset = (int)(textureHeader.Offsets[i] + file.Lumps[HL_BSP_LUMP_TEXTUREDATA].Offset);
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the texture
data.Seek(offset, SeekOrigin.Begin);
var texture = ParseTexture(data);
file.Textures[i] = texture;
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Level header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
// Only recognized versions are 29 and 30
header.Version = data.ReadUInt32();
if (header.Version != 29 && header.Version != 30)
return null;
return header;
}
/// <summary>
/// Parse a Stream into a lump
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled lump on success, null on error</returns>
private static Lump ParseLump(Stream data)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.Length = data.ReadUInt32();
return lump;
}
/// <summary>
/// Parse a Stream into a Half-Life Level texture header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level texture header on success, null on error</returns>
private static TextureHeader ParseTextureHeader(Stream data)
{
// TODO: Use marshalling here instead of building
TextureHeader textureHeader = new TextureHeader();
textureHeader.TextureCount = data.ReadUInt32();
var offsets = new uint[textureHeader.TextureCount];
for (int i = 0; i < textureHeader.TextureCount; i++)
{
offsets[i] = data.ReadUInt32();
}
textureHeader.Offsets = offsets;
return textureHeader;
}
/// <summary>
/// Parse a Stream into a texture
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="mipmap">Mipmap level</param>
/// <returns>Filled texture on success, null on error</returns>
private static Texture ParseTexture(Stream data, uint mipmap = 0)
{
// TODO: Use marshalling here instead of building
Texture texture = new Texture();
byte[] name = data.ReadBytes(16).TakeWhile(c => c != '\0').ToArray();
texture.Name = Encoding.ASCII.GetString(name);
texture.Width = data.ReadUInt32();
texture.Height = data.ReadUInt32();
texture.Offsets = new uint[4];
for (int i = 0; i < 4; i++)
{
texture.Offsets[i] = data.ReadUInt32();
}
// Get the size of the pixel data
uint pixelSize = 0;
for (int i = 0; i < HL_BSP_MIPMAP_COUNT; i++)
{
if (texture.Offsets[i] != 0)
{
pixelSize += (texture.Width >> i) * (texture.Height >> i);
}
}
// If we have no pixel data
if (pixelSize == 0)
return texture;
texture.TextureData = data.ReadBytes((int)pixelSize);
texture.PaletteSize = data.ReadUInt16();
texture.PaletteData = data.ReadBytes((int)(texture.PaletteSize * 3));
// Adjust the dimensions based on mipmap level
switch (mipmap)
{
case 1:
texture.Width /= 2;
texture.Height /= 2;
break;
case 2:
texture.Width /= 4;
texture.Height /= 4;
break;
case 3:
texture.Width /= 8;
texture.Height /= 8;
break;
}
return texture;
}
#endregion
}
}

View File

@@ -1,28 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BinaryObjectScanner.Builders</Title>
<AssemblyName>BinaryObjectScanner.Builders</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.8</Version>
<AssemblyVersion>2.8</AssemblyVersion>
<FileVersion>2.8</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\BinaryObjectScanner.Models\BinaryObjectScanner.Models.csproj" />
<ProjectReference Include="..\BinaryObjectScanner.Utilities\BinaryObjectScanner.Utilities.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,391 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.CFB;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.CFB.Constants;
namespace BinaryObjectScanner.Builders
{
public class CFB
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Compound File Binary
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Compound File Binary on success, null on error</returns>
public static Binary ParseBinary(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseBinary(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Compound File Binary
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Compound File Binary on success, null on error</returns>
public static Binary ParseBinary(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new binary to fill
var binary = new Binary();
#region Header
// Try to parse the file header
var fileHeader = ParseFileHeader(data);
if (fileHeader == null)
return null;
// Set the file header
binary.Header = fileHeader;
#endregion
#region DIFAT Sector Numbers
// Create a DIFAT sector table
var difatSectors = new List<SectorNumber>();
// Add the sectors from the header
difatSectors.AddRange(fileHeader.DIFAT);
// Loop through and add the DIFAT sectors
SectorNumber currentSector = (SectorNumber)fileHeader.FirstDIFATSectorLocation;
for (int i = 0; i < fileHeader.NumberOfDIFATSectors; i++)
{
// If we have a readable sector
if (currentSector <= SectorNumber.MAXREGSECT)
{
// Get the new next sector information
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
if (sectorOffset < 0 || sectorOffset >= data.Length)
return null;
// Seek to the next sector
data.Seek(sectorOffset, SeekOrigin.Begin);
// Try to parse the sectors
var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift);
if (sectorNumbers == null)
return null;
// Add the sector shifts
difatSectors.AddRange(sectorNumbers);
}
// Get the next sector from the DIFAT
currentSector = difatSectors[i];
}
// Assign the DIFAT sectors table
binary.DIFATSectorNumbers = difatSectors.ToArray();
#endregion
#region FAT Sector Numbers
// Create a FAT sector table
var fatSectors = new List<SectorNumber>();
// Loop through and add the FAT sectors
currentSector = binary.DIFATSectorNumbers[0];
for (int i = 0; i < fileHeader.NumberOfFATSectors; i++)
{
// If we have a readable sector
if (currentSector <= SectorNumber.MAXREGSECT)
{
// Get the new next sector information
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
if (sectorOffset < 0 || sectorOffset >= data.Length)
return null;
// Seek to the next sector
data.Seek(sectorOffset, SeekOrigin.Begin);
// Try to parse the sectors
var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift);
if (sectorNumbers == null)
return null;
// Add the sector shifts
fatSectors.AddRange(sectorNumbers);
}
// Get the next sector from the DIFAT
currentSector = binary.DIFATSectorNumbers[i];
}
// Assign the FAT sectors table
binary.FATSectorNumbers = fatSectors.ToArray();
#endregion
#region Mini FAT Sector Numbers
// Create a mini FAT sector table
var miniFatSectors = new List<SectorNumber>();
// Loop through and add the mini FAT sectors
currentSector = (SectorNumber)fileHeader.FirstMiniFATSectorLocation;
for (int i = 0; i < fileHeader.NumberOfMiniFATSectors; i++)
{
// If we have a readable sector
if (currentSector <= SectorNumber.MAXREGSECT)
{
// Get the new next sector information
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
if (sectorOffset < 0 || sectorOffset >= data.Length)
return null;
// Seek to the next sector
data.Seek(sectorOffset, SeekOrigin.Begin);
// Try to parse the sectors
var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift);
if (sectorNumbers == null)
return null;
// Add the sector shifts
miniFatSectors.AddRange(sectorNumbers);
}
// Get the next sector from the DIFAT
currentSector = binary.DIFATSectorNumbers[i];
}
// Assign the mini FAT sectors table
binary.MiniFATSectorNumbers = miniFatSectors.ToArray();
#endregion
#region Directory Entries
// Get the offset of the first directory sector
long firstDirectoryOffset = (long)(fileHeader.FirstDirectorySectorLocation * Math.Pow(2, fileHeader.SectorShift));
if (firstDirectoryOffset < 0 || firstDirectoryOffset >= data.Length)
return null;
// Seek to the first directory sector
data.Seek(firstDirectoryOffset, SeekOrigin.Begin);
// Create a directory sector table
var directorySectors = new List<DirectoryEntry>();
// Get the number of directory sectors
uint directorySectorCount = 0;
switch (fileHeader.MajorVersion)
{
case 3:
directorySectorCount = int.MaxValue;
break;
case 4:
directorySectorCount = fileHeader.NumberOfDirectorySectors;
break;
}
// Loop through and add the directory sectors
currentSector = (SectorNumber)fileHeader.FirstDirectorySectorLocation;
for (int i = 0; i < directorySectorCount; i++)
{
// If we have an end of chain
if (currentSector == SectorNumber.ENDOFCHAIN)
break;
// If we have a readable sector
if (currentSector <= SectorNumber.MAXREGSECT)
{
// Get the new next sector information
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
if (sectorOffset < 0 || sectorOffset >= data.Length)
return null;
// Seek to the next sector
data.Seek(sectorOffset, SeekOrigin.Begin);
// Try to parse the sectors
var directoryEntries = ParseDirectoryEntries(data, fileHeader.SectorShift, fileHeader.MajorVersion);
if (directoryEntries == null)
return null;
// Add the sector shifts
directorySectors.AddRange(directoryEntries);
}
// Get the next sector from the DIFAT
currentSector = binary.DIFATSectorNumbers[i];
}
// Assign the Directory sectors table
binary.DirectoryEntries = directorySectors.ToArray();
#endregion
return binary;
}
/// <summary>
/// Parse a Stream into a file header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file header on success, null on error</returns>
private static FileHeader ParseFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
FileHeader header = new FileHeader();
header.Signature = data.ReadUInt64();
if (header.Signature != SignatureUInt64)
return null;
header.CLSID = data.ReadGuid();
header.MinorVersion = data.ReadUInt16();
header.MajorVersion = data.ReadUInt16();
header.ByteOrder = data.ReadUInt16();
if (header.ByteOrder != 0xFFFE)
return null;
header.SectorShift = data.ReadUInt16();
if (header.MajorVersion == 3 && header.SectorShift != 0x0009)
return null;
else if (header.MajorVersion == 4 && header.SectorShift != 0x000C)
return null;
header.MiniSectorShift = data.ReadUInt16();
header.Reserved = data.ReadBytes(6);
header.NumberOfDirectorySectors = data.ReadUInt32();
if (header.MajorVersion == 3 && header.NumberOfDirectorySectors != 0)
return null;
header.NumberOfFATSectors = data.ReadUInt32();
header.FirstDirectorySectorLocation = data.ReadUInt32();
header.TransactionSignatureNumber = data.ReadUInt32();
header.MiniStreamCutoffSize = data.ReadUInt32();
if (header.MiniStreamCutoffSize != 0x00001000)
return null;
header.FirstMiniFATSectorLocation = data.ReadUInt32();
header.NumberOfMiniFATSectors = data.ReadUInt32();
header.FirstDIFATSectorLocation = data.ReadUInt32();
header.NumberOfDIFATSectors = data.ReadUInt32();
header.DIFAT = new SectorNumber[109];
for (int i = 0; i < header.DIFAT.Length; i++)
{
header.DIFAT[i] = (SectorNumber)data.ReadUInt32();
}
// Skip rest of sector for version 4
if (header.MajorVersion == 4)
_ = data.ReadBytes(3584);
return header;
}
/// <summary>
/// Parse a Stream into a sector full of sector numbers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="sectorShift">Sector shift from the header</param>
/// <returns>Filled sector full of sector numbers on success, null on error</returns>
private static SectorNumber[] ParseSectorNumbers(Stream data, ushort sectorShift)
{
// TODO: Use marshalling here instead of building
int sectorCount = (int)(Math.Pow(2, sectorShift) / sizeof(uint));
SectorNumber[] sectorNumbers = new SectorNumber[sectorCount];
for (int i = 0; i < sectorNumbers.Length; i++)
{
sectorNumbers[i] = (SectorNumber)data.ReadUInt32();
}
return sectorNumbers;
}
/// <summary>
/// Parse a Stream into a sector full of directory entries
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="sectorShift">Sector shift from the header</param>
/// <param name="majorVersion">Major version from the header</param>
/// <returns>Filled sector full of directory entries on success, null on error</returns>
private static DirectoryEntry[] ParseDirectoryEntries(Stream data, ushort sectorShift, ushort majorVersion)
{
// TODO: Use marshalling here instead of building
const int directoryEntrySize = 64 + 2 + 1 + 1 + 4 + 4 + 4 + 16 + 4 + 8 + 8 + 4 + 8;
int sectorCount = (int)(Math.Pow(2, sectorShift) / directoryEntrySize);
DirectoryEntry[] directoryEntries = new DirectoryEntry[sectorCount];
for (int i = 0; i < directoryEntries.Length; i++)
{
var directoryEntry = ParseDirectoryEntry(data, majorVersion);
if (directoryEntry == null)
return null;
directoryEntries[i] = directoryEntry;
}
return directoryEntries;
}
/// <summary>
/// Parse a Stream into a directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version from the header</param>
/// <returns>Filled directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data, ushort majorVersion)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
byte[] name = data.ReadBytes(64);
directoryEntry.Name = Encoding.Unicode.GetString(name).TrimEnd('\0');
directoryEntry.NameLength = data.ReadUInt16();
directoryEntry.ObjectType = (ObjectType)data.ReadByteValue();
directoryEntry.ColorFlag = (ColorFlag)data.ReadByteValue();
directoryEntry.LeftSiblingID = (StreamID)data.ReadUInt32();
directoryEntry.RightSiblingID = (StreamID)data.ReadUInt32();
directoryEntry.ChildID = (StreamID)data.ReadUInt32();
directoryEntry.CLSID = data.ReadGuid();
directoryEntry.StateBits = data.ReadUInt32();
directoryEntry.CreationTime = data.ReadUInt64();
directoryEntry.ModifiedTime = data.ReadUInt64();
directoryEntry.StartingSectorLocation = data.ReadUInt32();
directoryEntry.StreamSize = data.ReadUInt64();
if (majorVersion == 3)
directoryEntry.StreamSize &= 0x0000FFFF;
return directoryEntry;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,775 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.GCF;
using BinaryObjectScanner.Utilities;
namespace BinaryObjectScanner.Builders
{
public static class GCF
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Game Cache
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
public static Models.GCF.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Game Cache
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
public static Models.GCF.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life Game Cache to fill
var file = new Models.GCF.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the game cache header
file.Header = header;
#endregion
#region Block Entry Header
// Try to parse the block entry header
var blockEntryHeader = ParseBlockEntryHeader(data);
if (blockEntryHeader == null)
return null;
// Set the game cache block entry header
file.BlockEntryHeader = blockEntryHeader;
#endregion
#region Block Entries
// Create the block entry array
file.BlockEntries = new BlockEntry[blockEntryHeader.BlockCount];
// Try to parse the block entries
for (int i = 0; i < blockEntryHeader.BlockCount; i++)
{
var blockEntry = ParseBlockEntry(data);
file.BlockEntries[i] = blockEntry;
}
#endregion
#region Fragmentation Map Header
// Try to parse the fragmentation map header
var fragmentationMapHeader = ParseFragmentationMapHeader(data);
if (fragmentationMapHeader == null)
return null;
// Set the game cache fragmentation map header
file.FragmentationMapHeader = fragmentationMapHeader;
#endregion
#region Fragmentation Maps
// Create the fragmentation map array
file.FragmentationMaps = new FragmentationMap[fragmentationMapHeader.BlockCount];
// Try to parse the fragmentation maps
for (int i = 0; i < fragmentationMapHeader.BlockCount; i++)
{
var fragmentationMap = ParseFragmentationMap(data);
file.FragmentationMaps[i] = fragmentationMap;
}
#endregion
#region Block Entry Map Header
if (header.MinorVersion < 6)
{
// Try to parse the block entry map header
var blockEntryMapHeader = ParseBlockEntryMapHeader(data);
if (blockEntryMapHeader == null)
return null;
// Set the game cache block entry map header
file.BlockEntryMapHeader = blockEntryMapHeader;
}
#endregion
#region Block Entry Maps
if (header.MinorVersion < 6)
{
// Create the block entry map array
file.BlockEntryMaps = new BlockEntryMap[file.BlockEntryMapHeader.BlockCount];
// Try to parse the block entry maps
for (int i = 0; i < file.BlockEntryMapHeader.BlockCount; i++)
{
var blockEntryMap = ParseBlockEntryMap(data);
file.BlockEntryMaps[i] = blockEntryMap;
}
}
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Directory Header
// Try to parse the directory header
var directoryHeader = ParseDirectoryHeader(data);
if (directoryHeader == null)
return null;
// Set the game cache directory header
file.DirectoryHeader = directoryHeader;
#endregion
#region Directory Entries
// Create the directory entry array
file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount];
// Try to parse the directory entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.DirectoryEntries[i] = directoryEntry;
}
#endregion
#region Directory Names
if (directoryHeader.NameSize > 0)
{
// Get the current offset for adjustment
long directoryNamesStart = data.Position;
// Get the ending offset
long directoryNamesEnd = data.Position + directoryHeader.NameSize;
// Create the string dictionary
file.DirectoryNames = new Dictionary<long, string>();
// Loop and read the null-terminated strings
while (data.Position < directoryNamesEnd)
{
long nameOffset = data.Position - directoryNamesStart;
string directoryName = data.ReadString(Encoding.ASCII);
if (data.Position > directoryNamesEnd)
{
data.Seek(-directoryName.Length, SeekOrigin.Current);
byte[] endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
if (endingData != null)
directoryName = Encoding.ASCII.GetString(endingData);
else
directoryName = null;
}
file.DirectoryNames[nameOffset] = directoryName;
}
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}
#endregion
#region Directory Info 1 Entries
// Create the directory info 1 entry array
file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count];
// Try to parse the directory info 1 entries
for (int i = 0; i < directoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
}
#endregion
#region Directory Info 2 Entries
// Create the directory info 2 entry array
file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount];
// Try to parse the directory info 2 entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
}
#endregion
#region Directory Copy Entries
// Create the directory copy entry array
file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount];
// Try to parse the directory copy entries
for (int i = 0; i < directoryHeader.CopyCount; i++)
{
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
file.DirectoryCopyEntries[i] = directoryCopyEntry;
}
#endregion
#region Directory Local Entries
// Create the directory local entry array
file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount];
// Try to parse the directory local entries
for (int i = 0; i < directoryHeader.LocalCount; i++)
{
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
file.DirectoryLocalEntries[i] = directoryLocalEntry;
}
#endregion
// Seek to end of directory section, just in case
data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin);
#region Directory Map Header
if (header.MinorVersion >= 5)
{
// Try to parse the directory map header
var directoryMapHeader = ParseDirectoryMapHeader(data);
if (directoryMapHeader == null)
return null;
// Set the game cache directory map header
file.DirectoryMapHeader = directoryMapHeader;
}
#endregion
#region Directory Map Entries
// Create the directory map entry array
file.DirectoryMapEntries = new DirectoryMapEntry[directoryHeader.ItemCount];
// Try to parse the directory map entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryMapEntry = ParseDirectoryMapEntry(data);
file.DirectoryMapEntries[i] = directoryMapEntry;
}
#endregion
#region Checksum Header
// Try to parse the checksum header
var checksumHeader = ParseChecksumHeader(data);
if (checksumHeader == null)
return null;
// Set the game cache checksum header
file.ChecksumHeader = checksumHeader;
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Checksum Map Header
// Try to parse the checksum map header
var checksumMapHeader = ParseChecksumMapHeader(data);
if (checksumMapHeader == null)
return null;
// Set the game cache checksum map header
file.ChecksumMapHeader = checksumMapHeader;
#endregion
#region Checksum Map Entries
// Create the checksum map entry array
file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount];
// Try to parse the checksum map entries
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = ParseChecksumMapEntry(data);
file.ChecksumMapEntries[i] = checksumMapEntry;
}
#endregion
#region Checksum Entries
// Create the checksum entry array
file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount];
// Try to parse the checksum entries
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = ParseChecksumEntry(data);
file.ChecksumEntries[i] = checksumEntry;
}
#endregion
// Seek to end of checksum section, just in case
data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin);
#region Data Block Header
// Try to parse the data block header
var dataBlockHeader = ParseDataBlockHeader(data, header.MinorVersion);
if (dataBlockHeader == null)
return null;
// Set the game cache data block header
file.DataBlockHeader = dataBlockHeader;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.Dummy0 = data.ReadUInt32();
if (header.Dummy0 != 0x00000001)
return null;
header.MajorVersion = data.ReadUInt32();
if (header.MajorVersion != 0x00000001)
return null;
header.MinorVersion = data.ReadUInt32();
if (header.MinorVersion != 3 && header.MinorVersion != 5 && header.MinorVersion != 6)
return null;
header.CacheID = data.ReadUInt32();
header.LastVersionPlayed = data.ReadUInt32();
header.Dummy1 = data.ReadUInt32();
header.Dummy2 = data.ReadUInt32();
header.FileSize = data.ReadUInt32();
header.BlockSize = data.ReadUInt32();
header.BlockCount = data.ReadUInt32();
header.Dummy3 = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry header on success, null on error</returns>
private static BlockEntryHeader ParseBlockEntryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryHeader blockEntryHeader = new BlockEntryHeader();
blockEntryHeader.BlockCount = data.ReadUInt32();
blockEntryHeader.BlocksUsed = data.ReadUInt32();
blockEntryHeader.Dummy0 = data.ReadUInt32();
blockEntryHeader.Dummy1 = data.ReadUInt32();
blockEntryHeader.Dummy2 = data.ReadUInt32();
blockEntryHeader.Dummy3 = data.ReadUInt32();
blockEntryHeader.Dummy4 = data.ReadUInt32();
blockEntryHeader.Checksum = data.ReadUInt32();
return blockEntryHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry on success, null on error</returns>
private static BlockEntry ParseBlockEntry(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntry blockEntry = new BlockEntry();
blockEntry.EntryFlags = data.ReadUInt32();
blockEntry.FileDataOffset = data.ReadUInt32();
blockEntry.FileDataSize = data.ReadUInt32();
blockEntry.FirstDataBlockIndex = data.ReadUInt32();
blockEntry.NextBlockEntryIndex = data.ReadUInt32();
blockEntry.PreviousBlockEntryIndex = data.ReadUInt32();
blockEntry.DirectoryIndex = data.ReadUInt32();
return blockEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache fragmentation map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache fragmentation map header on success, null on error</returns>
private static FragmentationMapHeader ParseFragmentationMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
FragmentationMapHeader fragmentationMapHeader = new FragmentationMapHeader();
fragmentationMapHeader.BlockCount = data.ReadUInt32();
fragmentationMapHeader.FirstUnusedEntry = data.ReadUInt32();
fragmentationMapHeader.Terminator = data.ReadUInt32();
fragmentationMapHeader.Checksum = data.ReadUInt32();
return fragmentationMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache fragmentation map
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache fragmentation map on success, null on error</returns>
private static FragmentationMap ParseFragmentationMap(Stream data)
{
// TODO: Use marshalling here instead of building
FragmentationMap fragmentationMap = new FragmentationMap();
fragmentationMap.NextDataBlockIndex = data.ReadUInt32();
return fragmentationMap;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry map header on success, null on error</returns>
private static BlockEntryMapHeader ParseBlockEntryMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryMapHeader blockEntryMapHeader = new BlockEntryMapHeader();
blockEntryMapHeader.BlockCount = data.ReadUInt32();
blockEntryMapHeader.FirstBlockEntryIndex = data.ReadUInt32();
blockEntryMapHeader.LastBlockEntryIndex = data.ReadUInt32();
blockEntryMapHeader.Dummy0 = data.ReadUInt32();
blockEntryMapHeader.Checksum = data.ReadUInt32();
return blockEntryMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry map
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry map on success, null on error</returns>
private static BlockEntryMap ParseBlockEntryMap(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryMap blockEntryMap = new BlockEntryMap();
blockEntryMap.PreviousBlockEntryIndex = data.ReadUInt32();
blockEntryMap.NextBlockEntryIndex = data.ReadUInt32();
return blockEntryMap;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory header on success, null on error</returns>
private static DirectoryHeader ParseDirectoryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryHeader directoryHeader = new DirectoryHeader();
directoryHeader.Dummy0 = data.ReadUInt32();
directoryHeader.CacheID = data.ReadUInt32();
directoryHeader.LastVersionPlayed = data.ReadUInt32();
directoryHeader.ItemCount = data.ReadUInt32();
directoryHeader.FileCount = data.ReadUInt32();
directoryHeader.Dummy1 = data.ReadUInt32();
directoryHeader.DirectorySize = data.ReadUInt32();
directoryHeader.NameSize = data.ReadUInt32();
directoryHeader.Info1Count = data.ReadUInt32();
directoryHeader.CopyCount = data.ReadUInt32();
directoryHeader.LocalCount = data.ReadUInt32();
directoryHeader.Dummy2 = data.ReadUInt32();
directoryHeader.Dummy3 = data.ReadUInt32();
directoryHeader.Checksum = data.ReadUInt32();
return directoryHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.NameOffset = data.ReadUInt32();
directoryEntry.ItemSize = data.ReadUInt32();
directoryEntry.ChecksumIndex = data.ReadUInt32();
directoryEntry.DirectoryFlags = (HL_GCF_FLAG)data.ReadUInt32();
directoryEntry.ParentIndex = data.ReadUInt32();
directoryEntry.NextIndex = data.ReadUInt32();
directoryEntry.FirstIndex = data.ReadUInt32();
return directoryEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory info 1 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory info 1 entry on success, null on error</returns>
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
return directoryInfo1Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory info 2 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory info 2 entry on success, null on error</returns>
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
return directoryInfo2Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory copy entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory copy entry on success, null on error</returns>
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
return directoryCopyEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory local entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory local entry on success, null on error</returns>
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
return directoryLocalEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory map header on success, null on error</returns>
private static DirectoryMapHeader ParseDirectoryMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapHeader directoryMapHeader = new DirectoryMapHeader();
directoryMapHeader.Dummy0 = data.ReadUInt32();
if (directoryMapHeader.Dummy0 != 0x00000001)
return null;
directoryMapHeader.Dummy1 = data.ReadUInt32();
if (directoryMapHeader.Dummy1 != 0x00000000)
return null;
return directoryMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory map entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory map entry on success, null on error</returns>
private static DirectoryMapEntry ParseDirectoryMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapEntry directoryMapEntry = new DirectoryMapEntry();
directoryMapEntry.FirstBlockIndex = data.ReadUInt32();
return directoryMapEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum header on success, null on error</returns>
private static ChecksumHeader ParseChecksumHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumHeader checksumHeader = new ChecksumHeader();
checksumHeader.Dummy0 = data.ReadUInt32();
if (checksumHeader.Dummy0 != 0x00000001)
return null;
checksumHeader.ChecksumSize = data.ReadUInt32();
return checksumHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum map header on success, null on error</returns>
private static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
checksumMapHeader.Dummy0 = data.ReadUInt32();
if (checksumMapHeader.Dummy0 != 0x14893721)
return null;
checksumMapHeader.Dummy1 = data.ReadUInt32();
if (checksumMapHeader.Dummy1 != 0x00000001)
return null;
checksumMapHeader.ItemCount = data.ReadUInt32();
checksumMapHeader.ChecksumCount = data.ReadUInt32();
return checksumMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum map entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum map entry on success, null on error</returns>
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
checksumMapEntry.ChecksumCount = data.ReadUInt32();
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
return checksumMapEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum entry on success, null on error</returns>
private static ChecksumEntry ParseChecksumEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumEntry checksumEntry = new ChecksumEntry();
checksumEntry.Checksum = data.ReadUInt32();
return checksumEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache data block header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="minorVersion">Minor version field from the header</param>
/// <returns>Filled Half-Life Game Cache data block header on success, null on error</returns>
private static DataBlockHeader ParseDataBlockHeader(Stream data, uint minorVersion)
{
// TODO: Use marshalling here instead of building
DataBlockHeader dataBlockHeader = new DataBlockHeader();
// In version 3 the DataBlockHeader is missing the LastVersionPlayed field.
if (minorVersion >= 5)
dataBlockHeader.LastVersionPlayed = data.ReadUInt32();
dataBlockHeader.BlockCount = data.ReadUInt32();
dataBlockHeader.BlockSize = data.ReadUInt32();
dataBlockHeader.FirstBlockOffset = data.ReadUInt32();
dataBlockHeader.BlocksUsed = data.ReadUInt32();
dataBlockHeader.Checksum = data.ReadUInt32();
return dataBlockHeader;
}
#endregion
}
}

View File

@@ -1,808 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.InstallShieldCabinet;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.InstallShieldCabinet.Constants;
namespace BinaryObjectScanner.Builders
{
// TODO: Add multi-cabinet reading
public class InstallShieldCabinet
{
#region Byte Data
/// <summary>
/// Parse a byte array into a InstallShield Cabinet file
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Cabinet ParseCabinet(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseCabinet(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a InstallShield Cabinet file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Cabinet ParseCabinet(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new cabinet to fill
var cabinet = new Cabinet();
#region Common Header
// Try to parse the cabinet header
var commonHeader = ParseCommonHeader(data);
if (commonHeader == null)
return null;
// Set the cabinet header
cabinet.CommonHeader = commonHeader;
#endregion
#region Volume Header
// Try to parse the volume header
var volumeHeader = ParseVolumeHeader(data, GetMajorVersion(commonHeader));
if (volumeHeader == null)
return null;
// Set the volume header
cabinet.VolumeHeader = volumeHeader;
#endregion
#region Descriptor
// Get the descriptor offset
uint descriptorOffset = commonHeader.DescriptorOffset;
if (descriptorOffset < 0 || descriptorOffset >= data.Length)
return null;
// Seek to the descriptor
data.Seek(descriptorOffset, SeekOrigin.Begin);
// Try to parse the descriptor
var descriptor = ParseDescriptor(data);
if (descriptor == null)
return null;
// Set the descriptor
cabinet.Descriptor = descriptor;
#endregion
#region File Descriptor Offsets
// Get the file table offset
uint fileTableOffset = commonHeader.DescriptorOffset + descriptor.FileTableOffset;
if (fileTableOffset < 0 || fileTableOffset >= data.Length)
return null;
// Seek to the file table
data.Seek(fileTableOffset, SeekOrigin.Begin);
// Get the number of file table items
uint fileTableItems;
if (GetMajorVersion(commonHeader) <= 5)
fileTableItems = descriptor.DirectoryCount + descriptor.FileCount;
else
fileTableItems = descriptor.DirectoryCount;
// Create and fill the file table
cabinet.FileDescriptorOffsets = new uint[fileTableItems];
for (int i = 0; i < cabinet.FileDescriptorOffsets.Length; i++)
{
cabinet.FileDescriptorOffsets[i] = data.ReadUInt32();
}
#endregion
#region Directory Descriptors
// Create and fill the directory descriptors
cabinet.DirectoryNames = new string[descriptor.DirectoryCount];
for (int i = 0; i < descriptor.DirectoryCount; i++)
{
// Get the directory descriptor offset
uint offset = descriptorOffset
+ descriptor.FileTableOffset
+ cabinet.FileDescriptorOffsets[i];
// If we have an invalid offset
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the file descriptor offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the file descriptor
string directoryName = ParseDirectoryName(data, GetMajorVersion(commonHeader));
cabinet.DirectoryNames[i] = directoryName;
}
#endregion
#region File Descriptors
// Create and fill the file descriptors
cabinet.FileDescriptors = new FileDescriptor[descriptor.FileCount];
for (int i = 0; i < descriptor.FileCount; i++)
{
// Get the file descriptor offset
uint offset;
if (GetMajorVersion(commonHeader) <= 5)
{
offset = descriptorOffset
+ descriptor.FileTableOffset
+ cabinet.FileDescriptorOffsets[descriptor.DirectoryCount + i];
}
else
{
offset = descriptorOffset
+ descriptor.FileTableOffset
+ descriptor.FileTableOffset2
+ (uint)(i * 0x57);
}
// If we have an invalid offset
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the file descriptor offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the file descriptor
FileDescriptor fileDescriptor = ParseFileDescriptor(data, GetMajorVersion(commonHeader), descriptorOffset + descriptor.FileTableOffset);
cabinet.FileDescriptors[i] = fileDescriptor;
}
#endregion
#region File Group Offsets
// Create and fill the file group offsets
cabinet.FileGroupOffsets = new Dictionary<long, OffsetList>();
for (int i = 0; i < descriptor.FileGroupOffsets.Length; i++)
{
// Get the file group offset
uint offset = descriptor.FileGroupOffsets[i];
if (offset == 0)
continue;
// Adjust the file group offset
offset += commonHeader.DescriptorOffset;
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the file group offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the offset
OffsetList offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
cabinet.FileGroupOffsets[descriptor.FileGroupOffsets[i]] = offsetList;
// If we have a nonzero next offset
uint nextOffset = offsetList.NextOffset;
while (nextOffset != 0)
{
// Get the next offset to read
uint internalOffset = nextOffset + commonHeader.DescriptorOffset;
// Seek to the file group offset
data.Seek(internalOffset, SeekOrigin.Begin);
// Create and add the offset
offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
cabinet.FileGroupOffsets[nextOffset] = offsetList;
// Set the next offset
nextOffset = offsetList.NextOffset;
}
}
#endregion
#region File Groups
// Create the file groups array
cabinet.FileGroups = new FileGroup[cabinet.FileGroupOffsets.Count];
// Create and fill the file groups
int fileGroupId = 0;
foreach (var kvp in cabinet.FileGroupOffsets)
{
// Get the offset
OffsetList list = kvp.Value;
if (list == null)
{
fileGroupId++;
continue;
}
// If we have an invalid offset
if (list.DescriptorOffset <= 0)
{
fileGroupId++;
continue;
}
/// Seek to the file group
data.Seek(list.DescriptorOffset + descriptorOffset, SeekOrigin.Begin);
// Try to parse the file group
var fileGroup = ParseFileGroup(data, GetMajorVersion(commonHeader), descriptorOffset);
if (fileGroup == null)
return null;
// Add the file group
cabinet.FileGroups[fileGroupId++] = fileGroup;
}
#endregion
#region Component Offsets
// Create and fill the component offsets
cabinet.ComponentOffsets = new Dictionary<long, OffsetList>();
for (int i = 0; i < descriptor.ComponentOffsets.Length; i++)
{
// Get the component offset
uint offset = descriptor.ComponentOffsets[i];
if (offset == 0)
continue;
// Adjust the component offset
offset += commonHeader.DescriptorOffset;
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the component offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the offset
OffsetList offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
cabinet.ComponentOffsets[descriptor.ComponentOffsets[i]] = offsetList;
// If we have a nonzero next offset
uint nextOffset = offsetList.NextOffset;
while (nextOffset != 0)
{
// Get the next offset to read
uint internalOffset = nextOffset + commonHeader.DescriptorOffset;
// Seek to the file group offset
data.Seek(internalOffset, SeekOrigin.Begin);
// Create and add the offset
offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
cabinet.ComponentOffsets[nextOffset] = offsetList;
// Set the next offset
nextOffset = offsetList.NextOffset;
}
}
#endregion
#region Components
// Create the components array
cabinet.Components = new Component[cabinet.ComponentOffsets.Count];
// Create and fill the components
int componentId = 0;
foreach (KeyValuePair<long, OffsetList> kvp in cabinet.ComponentOffsets)
{
// Get the offset
OffsetList list = kvp.Value;
if (list == null)
{
componentId++;
continue;
}
// If we have an invalid offset
if (list.DescriptorOffset <= 0)
{
componentId++;
continue;
}
// Seek to the component
data.Seek(list.DescriptorOffset + descriptorOffset, SeekOrigin.Begin);
// Try to parse the component
var component = ParseComponent(data, GetMajorVersion(commonHeader), descriptorOffset);
if (component == null)
return null;
// Add the component
cabinet.Components[componentId++] = component;
}
#endregion
// TODO: Parse setup types
return cabinet;
}
/// <summary>
/// Parse a Stream into a common header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled common header on success, null on error</returns>
private static CommonHeader ParseCommonHeader(Stream data)
{
CommonHeader commonHeader = new CommonHeader();
byte[] signature = data.ReadBytes(4);
commonHeader.Signature = Encoding.ASCII.GetString(signature);
if (commonHeader.Signature != SignatureString)
return null;
commonHeader.Version = data.ReadUInt32();
commonHeader.VolumeInfo = data.ReadUInt32();
commonHeader.DescriptorOffset = data.ReadUInt32();
commonHeader.DescriptorSize = data.ReadUInt32();
return commonHeader;
}
/// <summary>
/// Parse a Stream into a volume header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled volume header on success, null on error</returns>
private static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
{
VolumeHeader volumeHeader = new VolumeHeader();
// Read the descriptor based on version
if (majorVersion <= 5)
{
volumeHeader.DataOffset = data.ReadUInt32();
_ = data.ReadBytes(0x04); // Skip 0x04 bytes, unknown data?
volumeHeader.FirstFileIndex = data.ReadUInt32();
volumeHeader.LastFileIndex = data.ReadUInt32();
volumeHeader.FirstFileOffset = data.ReadUInt32();
volumeHeader.FirstFileSizeExpanded = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressed = data.ReadUInt32();
volumeHeader.LastFileOffset = data.ReadUInt32();
volumeHeader.LastFileSizeExpanded = data.ReadUInt32();
volumeHeader.LastFileSizeCompressed = data.ReadUInt32();
}
else
{
// TODO: Should standard and high values be combined?
volumeHeader.DataOffset = data.ReadUInt32();
volumeHeader.DataOffsetHigh = data.ReadUInt32();
volumeHeader.FirstFileIndex = data.ReadUInt32();
volumeHeader.LastFileIndex = data.ReadUInt32();
volumeHeader.FirstFileOffset = data.ReadUInt32();
volumeHeader.FirstFileOffsetHigh = data.ReadUInt32();
volumeHeader.FirstFileSizeExpanded = data.ReadUInt32();
volumeHeader.FirstFileSizeExpandedHigh = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressed = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressedHigh = data.ReadUInt32();
volumeHeader.LastFileOffset = data.ReadUInt32();
volumeHeader.LastFileOffsetHigh = data.ReadUInt32();
volumeHeader.LastFileSizeExpanded = data.ReadUInt32();
volumeHeader.LastFileSizeExpandedHigh = data.ReadUInt32();
volumeHeader.LastFileSizeCompressed = data.ReadUInt32();
volumeHeader.LastFileSizeCompressedHigh = data.ReadUInt32();
}
return volumeHeader;
}
/// <summary>
/// Parse a Stream into a descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled descriptor on success, null on error</returns>
private static Descriptor ParseDescriptor(Stream data)
{
Descriptor descriptor = new Descriptor();
descriptor.StringsOffset = data.ReadUInt32();
descriptor.Reserved0 = data.ReadBytes(4);
descriptor.ComponentListOffset = data.ReadUInt32();
descriptor.FileTableOffset = data.ReadUInt32();
descriptor.Reserved1 = data.ReadBytes(4);
descriptor.FileTableSize = data.ReadUInt32();
descriptor.FileTableSize2 = data.ReadUInt32();
descriptor.DirectoryCount = data.ReadUInt16();
descriptor.Reserved2 = data.ReadBytes(4);
descriptor.Reserved3 = data.ReadBytes(2);
descriptor.Reserved4 = data.ReadBytes(4);
descriptor.FileCount = data.ReadUInt32();
descriptor.FileTableOffset2 = data.ReadUInt32();
descriptor.ComponentTableInfoCount = data.ReadUInt16();
descriptor.ComponentTableOffset = data.ReadUInt32();
descriptor.Reserved5 = data.ReadBytes(4);
descriptor.Reserved6 = data.ReadBytes(4);
descriptor.FileGroupOffsets = new uint[MAX_FILE_GROUP_COUNT];
for (int i = 0; i < descriptor.FileGroupOffsets.Length; i++)
{
descriptor.FileGroupOffsets[i] = data.ReadUInt32();
}
descriptor.ComponentOffsets = new uint[MAX_COMPONENT_COUNT];
for (int i = 0; i < descriptor.ComponentOffsets.Length; i++)
{
descriptor.ComponentOffsets[i] = data.ReadUInt32();
}
descriptor.SetupTypesOffset = data.ReadUInt32();
descriptor.SetupTableOffset = data.ReadUInt32();
descriptor.Reserved7 = data.ReadBytes(4);
descriptor.Reserved8 = data.ReadBytes(4);
return descriptor;
}
/// <summary>
/// Parse a Stream into an offset list
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled offset list on success, null on error</returns>
private static OffsetList ParseOffsetList(Stream data, int majorVersion, uint descriptorOffset)
{
OffsetList offsetList = new OffsetList();
offsetList.NameOffset = data.ReadUInt32();
offsetList.DescriptorOffset = data.ReadUInt32();
offsetList.NextOffset = data.ReadUInt32();
// Cache the current offset
long currentOffset = data.Position;
// Seek to the name offset
data.Seek(offsetList.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
offsetList.Name = data.ReadString(Encoding.Unicode);
else
offsetList.Name = data.ReadString(Encoding.ASCII);
// Seek back to the correct offset
data.Seek(currentOffset, SeekOrigin.Begin);
return offsetList;
}
/// <summary>
/// Parse a Stream into a file group
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file group on success, null on error</returns>
private static FileGroup ParseFileGroup(Stream data, int majorVersion, uint descriptorOffset)
{
FileGroup fileGroup = new FileGroup();
fileGroup.NameOffset = data.ReadUInt32();
fileGroup.ExpandedSize = data.ReadUInt32();
fileGroup.Reserved0 = data.ReadBytes(4);
fileGroup.CompressedSize = data.ReadUInt32();
fileGroup.Reserved1 = data.ReadBytes(4);
fileGroup.Reserved2 = data.ReadBytes(2);
fileGroup.Attribute1 = data.ReadUInt16();
fileGroup.Attribute2 = data.ReadUInt16();
// TODO: Figure out what data lives in this area for V5 and below
if (majorVersion <= 5)
data.Seek(0x36, SeekOrigin.Current);
fileGroup.FirstFile = data.ReadUInt32();
fileGroup.LastFile = data.ReadUInt32();
fileGroup.UnknownOffset = data.ReadUInt32();
fileGroup.Var4Offset = data.ReadUInt32();
fileGroup.Var1Offset = data.ReadUInt32();
fileGroup.HTTPLocationOffset = data.ReadUInt32();
fileGroup.FTPLocationOffset = data.ReadUInt32();
fileGroup.MiscOffset = data.ReadUInt32();
fileGroup.Var2Offset = data.ReadUInt32();
fileGroup.TargetDirectoryOffset = data.ReadUInt32();
fileGroup.Reserved3 = data.ReadBytes(2);
fileGroup.Reserved4 = data.ReadBytes(2);
fileGroup.Reserved5 = data.ReadBytes(2);
fileGroup.Reserved6 = data.ReadBytes(2);
fileGroup.Reserved7 = data.ReadBytes(2);
// Cache the current position
long currentPosition = data.Position;
// Read the name, if possible
if (fileGroup.NameOffset != 0)
{
// Seek to the name
data.Seek(fileGroup.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
fileGroup.Name = data.ReadString(Encoding.Unicode);
else
fileGroup.Name = data.ReadString(Encoding.ASCII);
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return fileGroup;
}
/// <summary>
/// Parse a Stream into a component
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled component on success, null on error</returns>
private static Component ParseComponent(Stream data, int majorVersion, uint descriptorOffset)
{
Component component = new Component();
component.IdentifierOffset = data.ReadUInt32();
component.DescriptorOffset = data.ReadUInt32();
component.DisplayNameOffset = data.ReadUInt32();
component.Reserved0 = data.ReadBytes(2);
component.ReservedOffset0 = data.ReadUInt32();
component.ReservedOffset1 = data.ReadUInt32();
component.ComponentIndex = data.ReadUInt16();
component.NameOffset = data.ReadUInt32();
component.ReservedOffset2 = data.ReadUInt32();
component.ReservedOffset3 = data.ReadUInt32();
component.ReservedOffset4 = data.ReadUInt32();
component.Reserved1 = data.ReadBytes(32);
component.CLSIDOffset = data.ReadUInt32();
component.Reserved2 = data.ReadBytes(28);
component.Reserved3 = data.ReadBytes(majorVersion <= 5 ? 2 : 1);
component.DependsCount = data.ReadUInt16();
component.DependsOffset = data.ReadUInt32();
component.FileGroupCount = data.ReadUInt16();
component.FileGroupNamesOffset = data.ReadUInt32();
component.X3Count = data.ReadUInt16();
component.X3Offset = data.ReadUInt32();
component.SubComponentsCount = data.ReadUInt16();
component.SubComponentsOffset = data.ReadUInt32();
component.NextComponentOffset = data.ReadUInt32();
component.ReservedOffset5 = data.ReadUInt32();
component.ReservedOffset6 = data.ReadUInt32();
component.ReservedOffset7 = data.ReadUInt32();
component.ReservedOffset8 = data.ReadUInt32();
// Cache the current position
long currentPosition = data.Position;
// Read the identifier, if possible
if (component.IdentifierOffset != 0)
{
// Seek to the identifier
data.Seek(component.IdentifierOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
component.Identifier = data.ReadString(Encoding.Unicode);
else
component.Identifier = data.ReadString(Encoding.ASCII);
}
// Read the display name, if possible
if (component.DisplayNameOffset != 0)
{
// Seek to the name
data.Seek(component.DisplayNameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
component.DisplayName = data.ReadString(Encoding.Unicode);
else
component.DisplayName = data.ReadString(Encoding.ASCII);
}
// Read the name, if possible
if (component.NameOffset != 0)
{
// Seek to the name
data.Seek(component.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
component.Name = data.ReadString(Encoding.Unicode);
else
component.Name = data.ReadString(Encoding.ASCII);
}
// Read the CLSID, if possible
if (component.CLSIDOffset != 0)
{
// Seek to the CLSID
data.Seek(component.CLSIDOffset + descriptorOffset, SeekOrigin.Begin);
// Read the GUID
component.CLSID = data.ReadGuid();
}
// Read the file group names, if possible
if (component.FileGroupCount != 0 && component.FileGroupNamesOffset != 0)
{
// Seek to the file group table offset
data.Seek(component.FileGroupNamesOffset + descriptorOffset, SeekOrigin.Begin);
// Read the file group names table
component.FileGroupNames = new string[component.FileGroupCount];
for (int j = 0; j < component.FileGroupCount; j++)
{
// Get the name offset
uint nameOffset = data.ReadUInt32();
// Cache the current offset
long preNameOffset = data.Position;
// Seek to the name offset
data.Seek(nameOffset + descriptorOffset, SeekOrigin.Begin);
if (majorVersion >= 17)
component.FileGroupNames[j] = data.ReadString(Encoding.Unicode);
else
component.FileGroupNames[j] = data.ReadString(Encoding.ASCII);
// Seek back to the original position
data.Seek(preNameOffset, SeekOrigin.Begin);
}
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return component;
}
/// <summary>
/// Parse a Stream into a directory name
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled directory name on success, null on error</returns>
private static string ParseDirectoryName(Stream data, int majorVersion)
{
// Read the string
if (majorVersion >= 17)
return data.ReadString(Encoding.Unicode);
else
return data.ReadString(Encoding.ASCII);
}
/// <summary>
/// Parse a Stream into a file descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file descriptor on success, null on error</returns>
private static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset)
{
FileDescriptor fileDescriptor = new FileDescriptor();
// Read the descriptor based on version
if (majorVersion <= 5)
{
fileDescriptor.Volume = 0xFFFF; // Set by the header index
fileDescriptor.NameOffset = data.ReadUInt32();
fileDescriptor.DirectoryIndex = data.ReadUInt32();
fileDescriptor.Flags = (FileFlags)data.ReadUInt16();
fileDescriptor.ExpandedSize = data.ReadUInt32();
fileDescriptor.CompressedSize = data.ReadUInt32();
_ = data.ReadBytes(0x14); // Skip 0x14 bytes, unknown data?
fileDescriptor.DataOffset = data.ReadUInt32();
if (majorVersion == 5)
fileDescriptor.MD5 = data.ReadBytes(0x10);
}
else
{
fileDescriptor.Flags = (FileFlags)data.ReadUInt16();
fileDescriptor.ExpandedSize = data.ReadUInt64();
fileDescriptor.CompressedSize = data.ReadUInt64();
fileDescriptor.DataOffset = data.ReadUInt64();
fileDescriptor.MD5 = data.ReadBytes(0x10);
_ = data.ReadBytes(0x10); // Skip 0x10 bytes, unknown data?
fileDescriptor.NameOffset = data.ReadUInt32();
fileDescriptor.DirectoryIndex = data.ReadUInt16();
_ = data.ReadBytes(0x0C); // Skip 0x0C bytes, unknown data?
fileDescriptor.LinkPrevious = data.ReadUInt32();
fileDescriptor.LinkNext = data.ReadUInt32();
fileDescriptor.LinkFlags = (LinkFlags)data.ReadByteValue();
fileDescriptor.Volume = data.ReadUInt16();
}
// Cache the current position
long currentPosition = data.Position;
// Read the name, if possible
if (fileDescriptor.NameOffset != 0)
{
// Seek to the name
data.Seek(fileDescriptor.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
fileDescriptor.Name = data.ReadString(Encoding.Unicode);
else
fileDescriptor.Name = data.ReadString(Encoding.ASCII);
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return fileDescriptor;
}
#endregion
#region Helpers
/// <summary>
/// Get the major version of the cabinet
/// </summary>
/// <remarks>This should live in the wrapper but is needed during parsing</remarks>
private static int GetMajorVersion(CommonHeader commonHeader)
{
uint majorVersion = commonHeader.Version;
if (majorVersion >> 24 == 1)
{
majorVersion = (majorVersion >> 12) & 0x0F;
}
else if (majorVersion >> 24 == 2 || majorVersion >> 24 == 4)
{
majorVersion = majorVersion & 0xFFFF;
if (majorVersion != 0)
majorVersion /= 100;
}
return (int)majorVersion;
}
#endregion
}
}

View File

@@ -1,943 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.LinearExecutable;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.LinearExecutable.Constants;
namespace BinaryObjectScanner.Builders
{
public static class LinearExecutable
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Linear Executable
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseExecutable(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Linear Executable
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new executable to fill
var executable = new Executable();
#region MS-DOS Stub
// Parse the MS-DOS stub
var stub = MSDOS.ParseExecutable(data);
if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0)
return null;
// Set the MS-DOS stub
executable.Stub = stub;
#endregion
#region Information Block
// Try to parse the executable header
data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin);
var informationBlock = ParseInformationBlock(data);
if (informationBlock == null)
return null;
// Set the executable header
executable.InformationBlock = informationBlock;
#endregion
#region Object Table
// Get the object table offset
long offset = informationBlock.ObjectTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the object table
data.Seek(offset, SeekOrigin.Begin);
// Create the object table
executable.ObjectTable = new ObjectTableEntry[informationBlock.ObjectTableCount];
// Try to parse the object table
for (int i = 0; i < executable.ObjectTable.Length; i++)
{
var entry = ParseObjectTableEntry(data);
if (entry == null)
return null;
executable.ObjectTable[i] = entry;
}
}
#endregion
#region Object Page Map
// Get the object page map offset
offset = informationBlock.ObjectPageMapOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the object page map
data.Seek(offset, SeekOrigin.Begin);
// Create the object page map
executable.ObjectPageMap = new ObjectPageMapEntry[informationBlock.ObjectTableCount];
// Try to parse the object page map
for (int i = 0; i < executable.ObjectPageMap.Length; i++)
{
var entry = ParseObjectPageMapEntry(data);
if (entry == null)
return null;
executable.ObjectPageMap[i] = entry;
}
}
#endregion
#region Object Iterate Data Map
offset = informationBlock.ObjectIterateDataMapOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the object page map
data.Seek(offset, SeekOrigin.Begin);
// TODO: Implement when model found
// No model has been found in the documentation about what
// each of the entries looks like for this map.
}
#endregion
#region Resource Table
// Get the resource table offset
offset = informationBlock.ResourceTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the resource table
data.Seek(offset, SeekOrigin.Begin);
// Create the resource table
executable.ResourceTable = new ResourceTableEntry[informationBlock.ResourceTableCount];
// Try to parse the resource table
for (int i = 0; i < executable.ResourceTable.Length; i++)
{
var entry = ParseResourceTableEntry(data);
if (entry == null)
return null;
executable.ResourceTable[i] = entry;
}
}
#endregion
#region Resident Names Table
// Get the resident names table offset
offset = informationBlock.ResidentNamesTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the resident names table
data.Seek(offset, SeekOrigin.Begin);
// Create the resident names table
var residentNamesTable = new List<ResidentNamesTableEntry>();
// Try to parse the resident names table
while (true)
{
var entry = ParseResidentNamesTableEntry(data);
residentNamesTable.Add(entry);
// If we have a 0-length entry
if (entry.Length == 0)
break;
}
// Assign the resident names table
executable.ResidentNamesTable = residentNamesTable.ToArray();
}
#endregion
#region Entry Table
// Get the entry table offset
offset = informationBlock.EntryTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the entry table
data.Seek(offset, SeekOrigin.Begin);
// Create the entry table
var entryTable = new List<EntryTableBundle>();
// Try to parse the entry table
while (true)
{
var bundle = ParseEntryTableBundle(data);
entryTable.Add(bundle);
// If we have a 0-length entry
if (bundle.Entries == 0)
break;
}
// Assign the entry table
executable.EntryTable = entryTable.ToArray();
}
#endregion
#region Module Format Directives Table
// Get the module format directives table offset
offset = informationBlock.ModuleDirectivesTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the module format directives table
data.Seek(offset, SeekOrigin.Begin);
// Create the module format directives table
executable.ModuleFormatDirectivesTable = new ModuleFormatDirectivesTableEntry[informationBlock.ModuleDirectivesCount];
// Try to parse the module format directives table
for (int i = 0; i < executable.ModuleFormatDirectivesTable.Length; i++)
{
var entry = ParseModuleFormatDirectivesTableEntry(data);
if (entry == null)
return null;
executable.ModuleFormatDirectivesTable[i] = entry;
}
}
#endregion
#region Verify Record Directive Table
// TODO: Figure out where the offset to this table is stored
// The documentation suggests it's either part of or immediately following
// the Module Format Directives Table
#endregion
#region Fix-up Page Table
// Get the fix-up page table offset
offset = informationBlock.FixupPageTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the fix-up page table
data.Seek(offset, SeekOrigin.Begin);
// Create the fix-up page table
executable.FixupPageTable = new FixupPageTableEntry[executable.ObjectPageMap.Length + 1];
// Try to parse the fix-up page table
for (int i = 0; i < executable.FixupPageTable.Length; i++)
{
var entry = ParseFixupPageTableEntry(data);
if (entry == null)
return null;
executable.FixupPageTable[i] = entry;
}
}
#endregion
#region Fix-up Record Table
// Get the fix-up record table offset
offset = informationBlock.FixupRecordTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the fix-up record table
data.Seek(offset, SeekOrigin.Begin);
// Create the fix-up record table
executable.FixupRecordTable = new FixupRecordTableEntry[executable.ObjectPageMap.Length + 1];
// Try to parse the fix-up record table
for (int i = 0; i < executable.FixupRecordTable.Length; i++)
{
var entry = ParseFixupRecordTableEntry(data);
if (entry == null)
return null;
executable.FixupRecordTable[i] = entry;
}
}
#endregion
#region Imported Module Name Table
// Get the imported module name table offset
offset = informationBlock.ImportedModulesNameTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the imported module name table
data.Seek(offset, SeekOrigin.Begin);
// Create the imported module name table
executable.ImportModuleNameTable = new ImportModuleNameTableEntry[informationBlock.ImportedModulesCount];
// Try to parse the imported module name table
for (int i = 0; i < executable.ImportModuleNameTable.Length; i++)
{
var entry = ParseImportModuleNameTableEntry(data);
if (entry == null)
return null;
executable.ImportModuleNameTable[i] = entry;
}
}
#endregion
#region Imported Module Procedure Name Table
// Get the imported module procedure name table offset
offset = informationBlock.ImportProcedureNameTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the imported module procedure name table
data.Seek(offset, SeekOrigin.Begin);
// Get the size of the imported module procedure name table
long tableSize = informationBlock.FixupPageTableOffset
+ informationBlock.FixupSectionSize
- informationBlock.ImportProcedureNameTableOffset;
// Create the imported module procedure name table
var importModuleProcedureNameTable = new List<ImportModuleProcedureNameTableEntry>();
// Try to parse the imported module procedure name table
while (data.Position < offset + tableSize)
{
var entry = ParseImportModuleProcedureNameTableEntry(data);
if (entry == null)
return null;
importModuleProcedureNameTable.Add(entry);
}
// Assign the resident names table
executable.ImportModuleProcedureNameTable = importModuleProcedureNameTable.ToArray();
}
#endregion
#region Per-Page Checksum Table
// Get the per-page checksum table offset
offset = informationBlock.PerPageChecksumTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the per-page checksum name table
data.Seek(offset, SeekOrigin.Begin);
// Create the per-page checksum name table
executable.PerPageChecksumTable = new PerPageChecksumTableEntry[informationBlock.ModuleNumberPages];
// Try to parse the per-page checksum name table
for (int i = 0; i < executable.PerPageChecksumTable.Length; i++)
{
var entry = ParsePerPageChecksumTableEntry(data);
if (entry == null)
return null;
executable.PerPageChecksumTable[i] = entry;
}
}
#endregion
#region Non-Resident Names Table
// Get the non-resident names table offset
offset = informationBlock.NonResidentNamesTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the non-resident names table
data.Seek(offset, SeekOrigin.Begin);
// Create the non-resident names table
var nonResidentNamesTable = new List<NonResidentNamesTableEntry>();
// Try to parse the non-resident names table
while (true)
{
var entry = ParseNonResidentNameTableEntry(data);
nonResidentNamesTable.Add(entry);
// If we have a 0-length entry
if (entry.Length == 0)
break;
}
// Assign the non-resident names table
executable.NonResidentNamesTable = nonResidentNamesTable.ToArray();
}
#endregion
#region Debug Information
// Get the debug information offset
offset = informationBlock.DebugInformationOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the debug information
data.Seek(offset, SeekOrigin.Begin);
// Try to parse the debug information
var debugInformation = ParseDebugInformation(data, informationBlock.DebugInformationLength);
if (debugInformation == null)
return null;
// Set the debug information
executable.DebugInformation = debugInformation;
}
#endregion
return executable;
}
/// <summary>
/// Parse a Stream into an information block
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled information block on success, null on error</returns>
private static InformationBlock ParseInformationBlock(Stream data)
{
// TODO: Use marshalling here instead of building
var informationBlock = new InformationBlock();
byte[] magic = data.ReadBytes(2);
informationBlock.Signature = Encoding.ASCII.GetString(magic);
if (informationBlock.Signature != LESignatureString && informationBlock.Signature != LXSignatureString)
return null;
informationBlock.ByteOrder = (ByteOrder)data.ReadByteValue();
informationBlock.WordOrder = (WordOrder)data.ReadByteValue();
informationBlock.ExecutableFormatLevel = data.ReadUInt32();
informationBlock.CPUType = (CPUType)data.ReadUInt16();
informationBlock.ModuleOS = (OperatingSystem)data.ReadUInt16();
informationBlock.ModuleVersion = data.ReadUInt32();
informationBlock.ModuleTypeFlags = (ModuleFlags)data.ReadUInt32();
informationBlock.ModuleNumberPages = data.ReadUInt32();
informationBlock.InitialObjectCS = data.ReadUInt32();
informationBlock.InitialEIP = data.ReadUInt32();
informationBlock.InitialObjectSS = data.ReadUInt32();
informationBlock.InitialESP = data.ReadUInt32();
informationBlock.MemoryPageSize = data.ReadUInt32();
informationBlock.BytesOnLastPage = data.ReadUInt32();
informationBlock.FixupSectionSize = data.ReadUInt32();
informationBlock.FixupSectionChecksum = data.ReadUInt32();
informationBlock.LoaderSectionSize = data.ReadUInt32();
informationBlock.LoaderSectionChecksum = data.ReadUInt32();
informationBlock.ObjectTableOffset = data.ReadUInt32();
informationBlock.ObjectTableCount = data.ReadUInt32();
informationBlock.ObjectPageMapOffset = data.ReadUInt32();
informationBlock.ObjectIterateDataMapOffset = data.ReadUInt32();
informationBlock.ResourceTableOffset = data.ReadUInt32();
informationBlock.ResourceTableCount = data.ReadUInt32();
informationBlock.ResidentNamesTableOffset = data.ReadUInt32();
informationBlock.EntryTableOffset = data.ReadUInt32();
informationBlock.ModuleDirectivesTableOffset = data.ReadUInt32();
informationBlock.ModuleDirectivesCount = data.ReadUInt32();
informationBlock.FixupPageTableOffset = data.ReadUInt32();
informationBlock.FixupRecordTableOffset = data.ReadUInt32();
informationBlock.ImportedModulesNameTableOffset = data.ReadUInt32();
informationBlock.ImportedModulesCount = data.ReadUInt32();
informationBlock.ImportProcedureNameTableOffset = data.ReadUInt32();
informationBlock.PerPageChecksumTableOffset = data.ReadUInt32();
informationBlock.DataPagesOffset = data.ReadUInt32();
informationBlock.PreloadPageCount = data.ReadUInt32();
informationBlock.NonResidentNamesTableOffset = data.ReadUInt32();
informationBlock.NonResidentNamesTableLength = data.ReadUInt32();
informationBlock.NonResidentNamesTableChecksum = data.ReadUInt32();
informationBlock.AutomaticDataObject = data.ReadUInt32();
informationBlock.DebugInformationOffset = data.ReadUInt32();
informationBlock.DebugInformationLength = data.ReadUInt32();
informationBlock.PreloadInstancePagesNumber = data.ReadUInt32();
informationBlock.DemandInstancePagesNumber = data.ReadUInt32();
informationBlock.ExtraHeapAllocation = data.ReadUInt32();
return informationBlock;
}
/// <summary>
/// Parse a Stream into an object table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled object table entry on success, null on error</returns>
private static ObjectTableEntry ParseObjectTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ObjectTableEntry();
entry.VirtualSegmentSize = data.ReadUInt32();
entry.RelocationBaseAddress = data.ReadUInt32();
entry.ObjectFlags = (ObjectFlags)data.ReadUInt16();
entry.PageTableIndex = data.ReadUInt32();
entry.PageTableEntries = data.ReadUInt32();
entry.Reserved = data.ReadUInt32();
return entry;
}
/// <summary>
/// Parse a Stream into an object page map entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled object page map entry on success, null on error</returns>
private static ObjectPageMapEntry ParseObjectPageMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ObjectPageMapEntry();
entry.PageDataOffset = data.ReadUInt32();
entry.DataSize = data.ReadUInt16();
entry.Flags = (ObjectPageFlags)data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into a resource table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resource table entry on success, null on error</returns>
private static ResourceTableEntry ParseResourceTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ResourceTableEntry();
entry.TypeID = (ResourceTableEntryType)data.ReadUInt32();
entry.NameID = data.ReadUInt16();
entry.ResourceSize = data.ReadUInt32();
entry.ObjectNumber = data.ReadUInt16();
entry.Offset = data.ReadUInt32();
return entry;
}
/// <summary>
/// Parse a Stream into a resident names table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resident names table entry on success, null on error</returns>
private static ResidentNamesTableEntry ParseResidentNamesTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ResidentNamesTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
entry.OrdinalNumber = data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into an entry table bundle
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled entry table bundle on success, null on error</returns>
private static EntryTableBundle ParseEntryTableBundle(Stream data)
{
// TODO: Use marshalling here instead of building
var bundle = new EntryTableBundle();
bundle.Entries = data.ReadByteValue();
if (bundle.Entries == 0)
return bundle;
bundle.BundleType = (BundleType)data.ReadByteValue();
bundle.TableEntries = new EntryTableEntry[bundle.Entries];
for (int i = 0; i < bundle.Entries; i++)
{
var entry = new EntryTableEntry();
switch (bundle.BundleType & ~BundleType.ParameterTypingInformationPresent)
{
case BundleType.UnusedEntry:
// Empty entry with no information
break;
case BundleType.SixteenBitEntry:
entry.SixteenBitObjectNumber = data.ReadUInt16();
entry.SixteenBitEntryFlags = (EntryFlags)data.ReadByteValue();
entry.SixteenBitOffset = data.ReadUInt16();
break;
case BundleType.TwoEightySixCallGateEntry:
entry.TwoEightySixObjectNumber = data.ReadUInt16();
entry.TwoEightySixEntryFlags = (EntryFlags)data.ReadByteValue();
entry.TwoEightySixOffset = data.ReadUInt16();
entry.TwoEightySixCallgate = data.ReadUInt16();
break;
case BundleType.ThirtyTwoBitEntry:
entry.ThirtyTwoBitObjectNumber = data.ReadUInt16();
entry.ThirtyTwoBitEntryFlags = (EntryFlags)data.ReadByteValue();
entry.ThirtyTwoBitOffset = data.ReadUInt32();
break;
case BundleType.ForwarderEntry:
entry.ForwarderReserved = data.ReadUInt16();
entry.ForwarderFlags = (ForwarderFlags)data.ReadByteValue();
entry.ForwarderModuleOrdinalNumber = data.ReadUInt16();
entry.ProcedureNameOffset = data.ReadUInt32();
entry.ImportOrdinalNumber = data.ReadUInt32();
break;
default:
return null;
}
bundle.TableEntries[i] = entry;
}
return bundle;
}
/// <summary>
/// Parse a Stream into a module format directives table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled module format directives table entry on success, null on error</returns>
private static ModuleFormatDirectivesTableEntry ParseModuleFormatDirectivesTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ModuleFormatDirectivesTableEntry();
entry.DirectiveNumber = (DirectiveNumber)data.ReadUInt16();
entry.DirectiveDataLength = data.ReadUInt16();
entry.DirectiveDataOffset = data.ReadUInt32();
return entry;
}
/// <summary>
/// Parse a Stream into a verify record directive table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled verify record directive table entry on success, null on error</returns>
private static VerifyRecordDirectiveTableEntry ParseVerifyRecordDirectiveTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new VerifyRecordDirectiveTableEntry();
entry.EntryCount = data.ReadUInt16();
entry.OrdinalIndex = data.ReadUInt16();
entry.Version = data.ReadUInt16();
entry.ObjectEntriesCount = data.ReadUInt16();
entry.ObjectNumberInModule = data.ReadUInt16();
entry.ObjectLoadBaseAddress = data.ReadUInt16();
entry.ObjectVirtualAddressSize = data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into a fix-up page table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled fix-up page table entry on success, null on error</returns>
private static FixupPageTableEntry ParseFixupPageTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new FixupPageTableEntry();
entry.Offset = data.ReadUInt32();
return entry;
}
/// <summary>
/// Parse a Stream into a fix-up record table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled fix-up record table entry on success, null on error</returns>
private static FixupRecordTableEntry ParseFixupRecordTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new FixupRecordTableEntry();
entry.SourceType = (FixupRecordSourceType)data.ReadByteValue();
entry.TargetFlags = (FixupRecordTargetFlags)data.ReadByteValue();
// Source list flag
if (entry.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag))
entry.SourceOffsetListCount = data.ReadByteValue();
else
entry.SourceOffset = data.ReadUInt16();
// OBJECT / TRGOFF
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReference))
{
// 16-bit Object Number/Module Ordinal Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
entry.TargetObjectNumberWORD = data.ReadUInt16();
else
entry.TargetObjectNumberByte = data.ReadByteValue();
// 16-bit Selector fixup
if (!entry.SourceType.HasFlag(FixupRecordSourceType.SixteenBitSelectorFixup))
{
// 32-bit Target Offset Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
entry.TargetOffsetDWORD = data.ReadUInt32();
else
entry.TargetOffsetWORD = data.ReadUInt16();
}
}
// MOD ORD# / IMPORT ORD / ADDITIVE
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByOrdinal))
{
// 16-bit Object Number/Module Ordinal Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
else
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
// 8-bit Ordinal Flag & 32-bit Target Offset Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.EightBitOrdinalFlag))
entry.ImportedOrdinalNumberByte = data.ReadByteValue();
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
entry.ImportedOrdinalNumberDWORD = data.ReadUInt32();
else
entry.ImportedOrdinalNumberWORD = data.ReadUInt16();
// Additive Fixup Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
{
// 32-bit Additive Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
else
entry.AdditiveFixupValueWORD = data.ReadUInt16();
}
}
// MOD ORD# / PROCEDURE NAME OFFSET / ADDITIVE
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByName))
{
// 16-bit Object Number/Module Ordinal Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
else
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
// 32-bit Target Offset Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
entry.OffsetImportProcedureNameTableDWORD = data.ReadUInt32();
else
entry.OffsetImportProcedureNameTableWORD = data.ReadUInt16();
// Additive Fixup Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
{
// 32-bit Additive Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
else
entry.AdditiveFixupValueWORD = data.ReadUInt16();
}
}
// ORD # / ADDITIVE
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReferenceViaEntryTable))
{
// 16-bit Object Number/Module Ordinal Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
else
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
// Additive Fixup Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
{
// 32-bit Additive Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
else
entry.AdditiveFixupValueWORD = data.ReadUInt16();
}
}
// No other top-level flags recognized
else
{
return null;
}
#region SCROFFn
if (entry.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag))
{
entry.SourceOffsetList = new ushort[entry.SourceOffsetListCount];
for (int i = 0; i < entry.SourceOffsetList.Length; i++)
{
entry.SourceOffsetList[i] = data.ReadUInt16();
}
}
#endregion
return entry;
}
/// <summary>
/// Parse a Stream into a import module name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled import module name table entry on success, null on error</returns>
private static ImportModuleNameTableEntry ParseImportModuleNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ImportModuleNameTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
return entry;
}
/// <summary>
/// Parse a Stream into a import module name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled import module name table entry on success, null on error</returns>
private static ImportModuleProcedureNameTableEntry ParseImportModuleProcedureNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ImportModuleProcedureNameTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
return entry;
}
/// <summary>
/// Parse a Stream into a per-page checksum table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled per-page checksum table entry on success, null on error</returns>
private static PerPageChecksumTableEntry ParsePerPageChecksumTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new PerPageChecksumTableEntry();
entry.Checksum = data.ReadUInt32();
return entry;
}
/// <summary>
/// Parse a Stream into a non-resident names table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled non-resident names table entry on success, null on error</returns>
private static NonResidentNamesTableEntry ParseNonResidentNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new NonResidentNamesTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
entry.OrdinalNumber = data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into a debug information
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="size">Total size of the debug information</param>
/// <returns>Filled debug information on success, null on error</returns>
private static DebugInformation ParseDebugInformation(Stream data, long size)
{
// TODO: Use marshalling here instead of building
var debugInformation = new DebugInformation();
byte[] signature = data.ReadBytes(3);
debugInformation.Signature = Encoding.ASCII.GetString(signature);
if (debugInformation.Signature != DebugInformationSignatureString)
return null;
debugInformation.FormatType = (DebugFormatType)data.ReadByteValue();
debugInformation.DebuggerData = data.ReadBytes((int)(size - 4));
return debugInformation;
}
#endregion
}
}

View File

@@ -1,175 +0,0 @@
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.MSDOS;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.MSDOS.Constants;
namespace BinaryObjectScanner.Builders
{
public static class MSDOS
{
#region Byte Data
/// <summary>
/// Parse a byte array into an MS-DOS executable
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseExecutable(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into an MS-DOS executable
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new executable to fill
var executable = new Executable();
#region Executable Header
// Try to parse the executable header
var executableHeader = ParseExecutableHeader(data);
if (executableHeader == null)
return null;
// Set the executable header
executable.Header = executableHeader;
#endregion
#region Relocation Table
// If the offset for the relocation table doesn't exist
int tableAddress = initialOffset + executableHeader.RelocationTableAddr;
if (tableAddress >= data.Length)
return executable;
// Try to parse the relocation table
data.Seek(tableAddress, SeekOrigin.Begin);
var relocationTable = ParseRelocationTable(data, executableHeader.RelocationItems);
if (relocationTable == null)
return null;
// Set the relocation table
executable.RelocationTable = relocationTable;
#endregion
// Return the executable
return executable;
}
/// <summary>
/// Parse a Stream into an MS-DOS executable header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable header on success, null on error</returns>
private static ExecutableHeader ParseExecutableHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new ExecutableHeader();
#region Standard Fields
byte[] magic = data.ReadBytes(2);
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.LastPageBytes = data.ReadUInt16();
header.Pages = data.ReadUInt16();
header.RelocationItems = data.ReadUInt16();
header.HeaderParagraphSize = data.ReadUInt16();
header.MinimumExtraParagraphs = data.ReadUInt16();
header.MaximumExtraParagraphs = data.ReadUInt16();
header.InitialSSValue = data.ReadUInt16();
header.InitialSPValue = data.ReadUInt16();
header.Checksum = data.ReadUInt16();
header.InitialIPValue = data.ReadUInt16();
header.InitialCSValue = data.ReadUInt16();
header.RelocationTableAddr = data.ReadUInt16();
header.OverlayNumber = data.ReadUInt16();
#endregion
// If we don't have enough data for PE extensions
if (data.Position >= data.Length || data.Length - data.Position < 36)
return header;
#region PE Extensions
header.Reserved1 = new ushort[4];
for (int i = 0; i < header.Reserved1.Length; i++)
{
header.Reserved1[i] = data.ReadUInt16();
}
header.OEMIdentifier = data.ReadUInt16();
header.OEMInformation = data.ReadUInt16();
header.Reserved2 = new ushort[10];
for (int i = 0; i < header.Reserved2.Length; i++)
{
header.Reserved2[i] = data.ReadUInt16();
}
header.NewExeHeaderAddr = data.ReadUInt32();
#endregion
return header;
}
/// <summary>
/// Parse a Stream into a relocation table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of relocation table entries to read</param>
/// <returns>Filled relocation table on success, null on error</returns>
private static RelocationEntry[] ParseRelocationTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var relocationTable = new RelocationEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new RelocationEntry();
entry.Offset = data.ReadUInt16();
entry.Segment = data.ReadUInt16();
relocationTable[i] = entry;
}
return relocationTable;
}
#endregion
}
}

View File

@@ -1,258 +0,0 @@
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.MicrosoftCabinet;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.MicrosoftCabinet.Constants;
namespace BinaryObjectScanner.Builders
{
// TODO: Add multi-cabinet reading
public class MicrosoftCabinet
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Microsoft Cabinet file
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Cabinet ParseCabinet(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseCabinet(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Microsoft Cabinet file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Cabinet ParseCabinet(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new cabinet to fill
var cabinet = new Cabinet();
#region Cabinet Header
// Try to parse the cabinet header
var cabinetHeader = ParseCabinetHeader(data);
if (cabinetHeader == null)
return null;
// Set the cabinet header
cabinet.Header = cabinetHeader;
#endregion
#region Folders
// Set the folder array
cabinet.Folders = new CFFOLDER[cabinetHeader.FolderCount];
// Try to parse each folder, if we have any
for (int i = 0; i < cabinetHeader.FolderCount; i++)
{
var folder = ParseFolder(data, cabinetHeader);
if (folder == null)
return null;
// Set the folder
cabinet.Folders[i] = folder;
}
#endregion
#region Files
// Get the files offset
int filesOffset = (int)cabinetHeader.FilesOffset + initialOffset;
if (filesOffset > data.Length)
return null;
// Seek to the offset
data.Seek(filesOffset, SeekOrigin.Begin);
// Set the file array
cabinet.Files = new CFFILE[cabinetHeader.FileCount];
// Try to parse each file, if we have any
for (int i = 0; i < cabinetHeader.FileCount; i++)
{
var file = ParseFile(data);
if (file == null)
return null;
// Set the file
cabinet.Files[i] = file;
}
#endregion
return cabinet;
}
/// <summary>
/// Parse a Stream into a cabinet header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cabinet header on success, null on error</returns>
private static CFHEADER ParseCabinetHeader(Stream data)
{
CFHEADER header = new CFHEADER();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.Reserved1 = data.ReadUInt32();
header.CabinetSize = data.ReadUInt32();
header.Reserved2 = data.ReadUInt32();
header.FilesOffset = data.ReadUInt32();
header.Reserved3 = data.ReadUInt32();
header.VersionMinor = data.ReadByteValue();
header.VersionMajor = data.ReadByteValue();
header.FolderCount = data.ReadUInt16();
header.FileCount = data.ReadUInt16();
header.Flags = (HeaderFlags)data.ReadUInt16();
header.SetID = data.ReadUInt16();
header.CabinetIndex = data.ReadUInt16();
if (header.Flags.HasFlag(HeaderFlags.RESERVE_PRESENT))
{
header.HeaderReservedSize = data.ReadUInt16();
if (header.HeaderReservedSize > 60_000)
return null;
header.FolderReservedSize = data.ReadByteValue();
header.DataReservedSize = data.ReadByteValue();
if (header.HeaderReservedSize > 0)
header.ReservedData = data.ReadBytes(header.HeaderReservedSize);
}
if (header.Flags.HasFlag(HeaderFlags.PREV_CABINET))
{
header.CabinetPrev = data.ReadString(Encoding.ASCII);
header.DiskPrev = data.ReadString(Encoding.ASCII);
}
if (header.Flags.HasFlag(HeaderFlags.NEXT_CABINET))
{
header.CabinetNext = data.ReadString(Encoding.ASCII);
header.DiskNext = data.ReadString(Encoding.ASCII);
}
return header;
}
/// <summary>
/// Parse a Stream into a folder
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="header">Cabinet header to get flags and sizes from</param>
/// <returns>Filled folder on success, null on error</returns>
private static CFFOLDER ParseFolder(Stream data, CFHEADER header)
{
CFFOLDER folder = new CFFOLDER();
folder.CabStartOffset = data.ReadUInt32();
folder.DataCount = data.ReadUInt16();
folder.CompressionType = (CompressionType)data.ReadUInt16();
if (header.FolderReservedSize > 0)
folder.ReservedData = data.ReadBytes(header.FolderReservedSize);
if (folder.CabStartOffset > 0)
{
long currentPosition = data.Position;
data.Seek(folder.CabStartOffset, SeekOrigin.Begin);
folder.DataBlocks = new CFDATA[folder.DataCount];
for (int i = 0; i < folder.DataCount; i++)
{
CFDATA dataBlock = ParseDataBlock(data, header.DataReservedSize);
folder.DataBlocks[i] = dataBlock;
}
data.Seek(currentPosition, SeekOrigin.Begin);
}
return folder;
}
/// <summary>
/// Parse a Stream into a data block
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="dataReservedSize">Reserved byte size for data blocks</param>
/// <returns>Filled folder on success, null on error</returns>
private static CFDATA ParseDataBlock(Stream data, byte dataReservedSize)
{
CFDATA dataBlock = new CFDATA();
dataBlock.Checksum = data.ReadUInt32();
dataBlock.CompressedSize = data.ReadUInt16();
dataBlock.UncompressedSize = data.ReadUInt16();
if (dataReservedSize > 0)
dataBlock.ReservedData = data.ReadBytes(dataReservedSize);
if (dataBlock.CompressedSize > 0)
dataBlock.CompressedData = data.ReadBytes(dataBlock.CompressedSize);
return dataBlock;
}
/// <summary>
/// Parse a Stream into a file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file on success, null on error</returns>
private static CFFILE ParseFile(Stream data)
{
CFFILE file = new CFFILE();
file.FileSize = data.ReadUInt32();
file.FolderStartOffset = data.ReadUInt32();
file.FolderIndex = (FolderIndex)data.ReadUInt16();
file.Date = data.ReadUInt16();
file.Time = data.ReadUInt16();
file.Attributes = (Models.MicrosoftCabinet.FileAttributes)data.ReadUInt16();
if (file.Attributes.HasFlag(Models.MicrosoftCabinet.FileAttributes.NAME_IS_UTF))
file.Name = data.ReadString(Encoding.Unicode);
else
file.Name = data.ReadString(Encoding.ASCII);
return file;
}
#endregion
}
}

View File

@@ -1,651 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.MoPaQ;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.MoPaQ.Constants;
namespace BinaryObjectScanner.Builders
{
public class MoPaQ
{
#region Byte Data
/// <summary>
/// Parse a byte array into a MoPaQ archive
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseArchive(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a MoPaQ archive
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region User Data
// Check for User Data
uint possibleSignature = data.ReadUInt32();
data.Seek(-4, SeekOrigin.Current);
if (possibleSignature == 0x1B51504D)
{
// Save the current position for offset correction
long basePtr = data.Position;
// Deserialize the user data, returning null if invalid
var userData = ParseUserData(data);
if (userData == null)
return null;
// Set the user data
archive.UserData = userData;
// Set the starting position according to the header offset
data.Seek(basePtr + (int)archive.UserData.HeaderOffset, SeekOrigin.Begin);
}
#endregion
#region Archive Header
// Check for the Header
possibleSignature = data.ReadUInt32();
data.Seek(-4, SeekOrigin.Current);
if (possibleSignature == 0x1A51504D)
{
// Try to parse the archive header
var archiveHeader = ParseArchiveHeader(data);
if (archiveHeader == null)
return null;
// Set the archive header
archive.ArchiveHeader = archiveHeader;
}
else
{
return null;
}
#endregion
#region Hash Table
// TODO: The hash table has to be be decrypted before reading
// Version 1
if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format1)
{
// If we have a hash table
long hashTableOffset = archive.ArchiveHeader.HashTablePosition;
if (hashTableOffset != 0)
{
// Seek to the offset
data.Seek(hashTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long hashTableEnd = hashTableOffset + archive.ArchiveHeader.HashTableSize;
// Read in the hash table
var hashTable = new List<HashEntry>();
while (data.Position < hashTableEnd)
{
var hashEntry = ParseHashEntry(data);
if (hashEntry == null)
return null;
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
}
}
// Version 2 and 3
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format2 || archive.ArchiveHeader.FormatVersion == FormatVersion.Format3)
{
// If we have a hash table
long hashTableOffset = ((uint)archive.ArchiveHeader.HashTablePositionHi << 23) | archive.ArchiveHeader.HashTablePosition;
if (hashTableOffset != 0)
{
// Seek to the offset
data.Seek(hashTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long hashTableEnd = hashTableOffset + archive.ArchiveHeader.HashTableSize;
// Read in the hash table
var hashTable = new List<HashEntry>();
while (data.Position < hashTableEnd)
{
var hashEntry = ParseHashEntry(data);
if (hashEntry == null)
return null;
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
}
}
// Version 4
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format4)
{
// If we have a hash table
long hashTableOffset = ((uint)archive.ArchiveHeader.HashTablePositionHi << 23) | archive.ArchiveHeader.HashTablePosition;
if (hashTableOffset != 0)
{
// Seek to the offset
data.Seek(hashTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long hashTableEnd = hashTableOffset + (long)archive.ArchiveHeader.HashTableSizeLong;
// Read in the hash table
var hashTable = new List<HashEntry>();
while (data.Position < hashTableEnd)
{
var hashEntry = ParseHashEntry(data);
if (hashEntry == null)
return null;
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
}
}
#endregion
#region Block Table
// Version 1
if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format1)
{
// If we have a block table
long blockTableOffset = archive.ArchiveHeader.BlockTablePosition;
if (blockTableOffset != 0)
{
// Seek to the offset
data.Seek(blockTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long blockTableEnd = blockTableOffset + archive.ArchiveHeader.BlockTableSize;
// Read in the block table
var blockTable = new List<BlockEntry>();
while (data.Position < blockTableEnd)
{
var blockEntry = ParseBlockEntry(data);
if (blockEntry == null)
return null;
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
}
}
// Version 2 and 3
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format2 || archive.ArchiveHeader.FormatVersion == FormatVersion.Format3)
{
// If we have a block table
long blockTableOffset = ((uint)archive.ArchiveHeader.BlockTablePositionHi << 23) | archive.ArchiveHeader.BlockTablePosition;
if (blockTableOffset != 0)
{
// Seek to the offset
data.Seek(blockTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long blockTableEnd = blockTableOffset + archive.ArchiveHeader.BlockTableSize;
// Read in the block table
var blockTable = new List<BlockEntry>();
while (data.Position < blockTableEnd)
{
var blockEntry = ParseBlockEntry(data);
if (blockEntry == null)
return null;
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
}
}
// Version 4
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format4)
{
// If we have a block table
long blockTableOffset = ((uint)archive.ArchiveHeader.BlockTablePositionHi << 23) | archive.ArchiveHeader.BlockTablePosition;
if (blockTableOffset != 0)
{
// Seek to the offset
data.Seek(blockTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long blockTableEnd = blockTableOffset + (long)archive.ArchiveHeader.BlockTableSizeLong;
// Read in the block table
var blockTable = new List<BlockEntry>();
while (data.Position < blockTableEnd)
{
var blockEntry = ParseBlockEntry(data);
if (blockEntry == null)
return null;
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
}
}
#endregion
#region Hi-Block Table
// Version 2, 3, and 4
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format2)
{
// If we have a hi-block table
long hiBlockTableOffset = (long)archive.ArchiveHeader.HiBlockTablePosition;
if (hiBlockTableOffset != 0)
{
// Seek to the offset
data.Seek(hiBlockTableOffset, SeekOrigin.Begin);
// Read in the hi-block table
var hiBlockTable = new List<short>();
for (int i = 0; i < archive.BlockTable.Length; i++)
{
short hiBlockEntry = data.ReadInt16();
hiBlockTable.Add(hiBlockEntry);
}
archive.HiBlockTable = hiBlockTable.ToArray();
}
}
#endregion
#region BET Table
// Version 3 and 4
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format3)
{
// If we have a BET table
long betTableOffset = (long)archive.ArchiveHeader.BetTablePosition;
if (betTableOffset != 0)
{
// Seek to the offset
data.Seek(betTableOffset, SeekOrigin.Begin);
// Read in the BET table
var betTable = ParseBetTable(data);
if (betTable != null)
return null;
archive.BetTable = betTable;
}
}
#endregion
#region HET Table
// Version 3 and 4
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format3)
{
// If we have a HET table
long hetTableOffset = (long)archive.ArchiveHeader.HetTablePosition;
if (hetTableOffset != 0)
{
// Seek to the offset
data.Seek(hetTableOffset, SeekOrigin.Begin);
// Read in the HET table
var hetTable = ParseHetTable(data);
if (hetTable != null)
return null;
archive.HetTable = hetTable;
}
}
#endregion
return archive;
}
/// <summary>
/// Parse a Stream into a archive header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive header on success, null on error</returns>
private static ArchiveHeader ParseArchiveHeader(Stream data)
{
ArchiveHeader archiveHeader = new ArchiveHeader();
// V1 - Common
byte[] signature = data.ReadBytes(4);
archiveHeader.Signature = Encoding.ASCII.GetString(signature);
if (archiveHeader.Signature != ArchiveHeaderSignatureString)
return null;
archiveHeader.HeaderSize = data.ReadUInt32();
archiveHeader.ArchiveSize = data.ReadUInt32();
archiveHeader.FormatVersion = (FormatVersion)data.ReadUInt16();
archiveHeader.BlockSize = data.ReadUInt16();
archiveHeader.HashTablePosition = data.ReadUInt32();
archiveHeader.BlockTablePosition = data.ReadUInt32();
archiveHeader.HashTableSize = data.ReadUInt32();
archiveHeader.BlockTableSize = data.ReadUInt32();
// V2
if (archiveHeader.FormatVersion >= FormatVersion.Format2)
{
archiveHeader.HiBlockTablePosition = data.ReadUInt64();
archiveHeader.HashTablePositionHi = data.ReadUInt16();
archiveHeader.BlockTablePositionHi = data.ReadUInt16();
}
// V3
if (archiveHeader.FormatVersion >= FormatVersion.Format3)
{
archiveHeader.ArchiveSizeLong = data.ReadUInt64();
archiveHeader.BetTablePosition = data.ReadUInt64();
archiveHeader.HetTablePosition = data.ReadUInt64();
}
// V4
if (archiveHeader.FormatVersion >= FormatVersion.Format4)
{
archiveHeader.HashTableSizeLong = data.ReadUInt64();
archiveHeader.BlockTableSizeLong = data.ReadUInt64();
archiveHeader.HiBlockTableSize = data.ReadUInt64();
archiveHeader.HetTableSize = data.ReadUInt64();
archiveHeader.BetTablesize = data.ReadUInt64();
archiveHeader.RawChunkSize = data.ReadUInt32();
archiveHeader.BlockTableMD5 = data.ReadBytes(0x10);
archiveHeader.HashTableMD5 = data.ReadBytes(0x10);
archiveHeader.HiBlockTableMD5 = data.ReadBytes(0x10);
archiveHeader.BetTableMD5 = data.ReadBytes(0x10);
archiveHeader.HetTableMD5 = data.ReadBytes(0x10);
archiveHeader.HetTableMD5 = data.ReadBytes(0x10);
}
return archiveHeader;
}
/// <summary>
/// Parse a Stream into a user data object
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled user data on success, null on error</returns>
private static UserData ParseUserData(Stream data)
{
UserData userData = new UserData();
byte[] signature = data.ReadBytes(4);
userData.Signature = Encoding.ASCII.GetString(signature);
if (userData.Signature != UserDataSignatureString)
return null;
userData.UserDataSize = data.ReadUInt32();
userData.HeaderOffset = data.ReadUInt32();
userData.UserDataHeaderSize = data.ReadUInt32();
return userData;
}
/// <summary>
/// Parse a Stream into a HET table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled HET table on success, null on error</returns>
private static HetTable ParseHetTable(Stream data)
{
HetTable hetTable = new HetTable();
// Common Headers
byte[] signature = data.ReadBytes(4);
hetTable.Signature = Encoding.ASCII.GetString(signature);
if (hetTable.Signature != HetTableSignatureString)
return null;
hetTable.Version = data.ReadUInt32();
hetTable.DataSize = data.ReadUInt32();
// HET-Specific
hetTable.TableSize = data.ReadUInt32();
hetTable.MaxFileCount = data.ReadUInt32();
hetTable.HashTableSize = data.ReadUInt32();
hetTable.TotalIndexSize = data.ReadUInt32();
hetTable.IndexSizeExtra = data.ReadUInt32();
hetTable.IndexSize = data.ReadUInt32();
hetTable.BlockTableSize = data.ReadUInt32();
hetTable.HashTable = data.ReadBytes((int)hetTable.HashTableSize);
// TODO: Populate the file indexes array
hetTable.FileIndexes = new byte[(int)hetTable.HashTableSize][];
return hetTable;
}
/// <summary>
/// Parse a Stream into a BET table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BET table on success, null on error</returns>
private static BetTable ParseBetTable(Stream data)
{
BetTable betTable = new BetTable();
// Common Headers
byte[] signature = data.ReadBytes(4);
betTable.Signature = Encoding.ASCII.GetString(signature);
if (betTable.Signature != BetTableSignatureString)
return null;
betTable.Version = data.ReadUInt32();
betTable.DataSize = data.ReadUInt32();
// BET-Specific
betTable.TableSize = data.ReadUInt32();
betTable.FileCount = data.ReadUInt32();
betTable.Unknown = data.ReadUInt32();
betTable.TableEntrySize = data.ReadUInt32();
betTable.FilePositionBitIndex = data.ReadUInt32();
betTable.FileSizeBitIndex = data.ReadUInt32();
betTable.CompressedSizeBitIndex = data.ReadUInt32();
betTable.FlagIndexBitIndex = data.ReadUInt32();
betTable.UnknownBitIndex = data.ReadUInt32();
betTable.FilePositionBitCount = data.ReadUInt32();
betTable.FileSizeBitCount = data.ReadUInt32();
betTable.CompressedSizeBitCount = data.ReadUInt32();
betTable.FlagIndexBitCount = data.ReadUInt32();
betTable.UnknownBitCount = data.ReadUInt32();
betTable.TotalBetHashSize = data.ReadUInt32();
betTable.BetHashSizeExtra = data.ReadUInt32();
betTable.BetHashSize = data.ReadUInt32();
betTable.BetHashArraySize = data.ReadUInt32();
betTable.FlagCount = data.ReadUInt32();
betTable.FlagsArray = new uint[betTable.FlagCount];
byte[] flagsArray = data.ReadBytes((int)betTable.FlagCount * 4);
Buffer.BlockCopy(flagsArray, 0, betTable.FlagsArray, 0, (int)betTable.FlagCount * 4);
// TODO: Populate the file table
// TODO: Populate the hash table
return betTable;
}
/// <summary>
/// Parse a Stream into a hash entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled hash entry on success, null on error</returns>
private static HashEntry ParseHashEntry(Stream data)
{
// TODO: Use marshalling here instead of building
HashEntry hashEntry = new HashEntry();
hashEntry.NameHashPartA = data.ReadUInt32();
hashEntry.NameHashPartB = data.ReadUInt32();
hashEntry.Locale = (Locale)data.ReadUInt16();
hashEntry.Platform = data.ReadUInt16();
hashEntry.BlockIndex = data.ReadUInt32();
return hashEntry;
}
/// <summary>
/// Parse a Stream into a block entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled block entry on success, null on error</returns>
private static BlockEntry ParseBlockEntry(Stream data)
{
BlockEntry blockEntry = new BlockEntry();
blockEntry.FilePosition = data.ReadUInt32();
blockEntry.CompressedSize = data.ReadUInt32();
blockEntry.UncompressedSize = data.ReadUInt32();
blockEntry.Flags = (FileFlags)data.ReadUInt32();
return blockEntry;
}
/// <summary>
/// Parse a Stream into a patch info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled patch info on success, null on error</returns>
private static PatchInfo ParsePatchInfo(Stream data)
{
// TODO: Use marshalling here instead of building
PatchInfo patchInfo = new PatchInfo();
patchInfo.Length = data.ReadUInt32();
patchInfo.Flags = data.ReadUInt32();
patchInfo.DataSize = data.ReadUInt32();
patchInfo.MD5 = data.ReadBytes(0x10);
// TODO: Fill the sector offset table
return patchInfo;
}
#endregion
#region Helpers
/// <summary>
/// Buffer for encryption and decryption
/// </summary>
private uint[] _stormBuffer = new uint[STORM_BUFFER_SIZE];
/// <summary>
/// Prepare the encryption table
/// </summary>
private void PrepareCryptTable()
{
uint seed = 0x00100001;
for (uint index1 = 0; index1 < 0x100; index1++)
{
for (uint index2 = index1, i = 0; i < 5; i++, index2 += 0x100)
{
seed = (seed * 125 + 3) % 0x2AAAAB;
uint temp1 = (seed & 0xFFFF) << 0x10;
seed = (seed * 125 + 3) % 0x2AAAAB;
uint temp2 = (seed & 0xFFFF);
_stormBuffer[index2] = (temp1 | temp2);
}
}
}
/// <summary>
/// Decrypt a single block of data
/// </summary>
private unsafe byte[] DecryptBlock(byte[] block, uint length, uint key)
{
uint seed = 0xEEEEEEEE;
uint[] castBlock = new uint[length / 4];
Buffer.BlockCopy(block, 0, castBlock, 0, (int)length);
int castBlockPtr = 0;
// Round to uints
length >>= 2;
while (length-- > 0)
{
seed += _stormBuffer[MPQ_HASH_KEY2_MIX + (key & 0xFF)];
uint ch = castBlock[castBlockPtr] ^ (key + seed);
key = ((~key << 0x15) + 0x11111111) | (key >> 0x0B);
seed = ch + seed + (seed << 5) + 3;
castBlock[castBlockPtr++] = ch;
}
Buffer.BlockCopy(castBlock, 0, block, 0, (int)length);
return block;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,544 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.NCF;
using BinaryObjectScanner.Utilities;
namespace BinaryObjectScanner.Builders
{
public static class NCF
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life No Cache
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life No Cache on success, null on error</returns>
public static Models.NCF.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life No Cache
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache on success, null on error</returns>
public static Models.NCF.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life No Cache to fill
var file = new Models.NCF.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the no cache header
file.Header = header;
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Directory Header
// Try to parse the directory header
var directoryHeader = ParseDirectoryHeader(data);
if (directoryHeader == null)
return null;
// Set the game cache directory header
file.DirectoryHeader = directoryHeader;
#endregion
#region Directory Entries
// Create the directory entry array
file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount];
// Try to parse the directory entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.DirectoryEntries[i] = directoryEntry;
}
#endregion
#region Directory Names
if (directoryHeader.NameSize > 0)
{
// Get the current offset for adjustment
long directoryNamesStart = data.Position;
// Get the ending offset
long directoryNamesEnd = data.Position + directoryHeader.NameSize;
// Create the string dictionary
file.DirectoryNames = new Dictionary<long, string>();
// Loop and read the null-terminated strings
while (data.Position < directoryNamesEnd)
{
long nameOffset = data.Position - directoryNamesStart;
string directoryName = data.ReadString(Encoding.ASCII);
if (data.Position > directoryNamesEnd)
{
data.Seek(-directoryName.Length, SeekOrigin.Current);
byte[] endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
if (endingData != null)
directoryName = Encoding.ASCII.GetString(endingData);
else
directoryName = null;
}
file.DirectoryNames[nameOffset] = directoryName;
}
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}
#endregion
#region Directory Info 1 Entries
// Create the directory info 1 entry array
file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count];
// Try to parse the directory info 1 entries
for (int i = 0; i < directoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
}
#endregion
#region Directory Info 2 Entries
// Create the directory info 2 entry array
file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount];
// Try to parse the directory info 2 entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
}
#endregion
#region Directory Copy Entries
// Create the directory copy entry array
file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount];
// Try to parse the directory copy entries
for (int i = 0; i < directoryHeader.CopyCount; i++)
{
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
file.DirectoryCopyEntries[i] = directoryCopyEntry;
}
#endregion
#region Directory Local Entries
// Create the directory local entry array
file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount];
// Try to parse the directory local entries
for (int i = 0; i < directoryHeader.LocalCount; i++)
{
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
file.DirectoryLocalEntries[i] = directoryLocalEntry;
}
#endregion
// Seek to end of directory section, just in case
data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin);
#region Unknown Header
// Try to parse the unknown header
var unknownHeader = ParseUnknownHeader(data);
if (unknownHeader == null)
return null;
// Set the game cache unknown header
file.UnknownHeader = unknownHeader;
#endregion
#region Unknown Entries
// Create the unknown entry array
file.UnknownEntries = new UnknownEntry[directoryHeader.ItemCount];
// Try to parse the unknown entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var unknownEntry = ParseUnknownEntry(data);
file.UnknownEntries[i] = unknownEntry;
}
#endregion
#region Checksum Header
// Try to parse the checksum header
var checksumHeader = ParseChecksumHeader(data);
if (checksumHeader == null)
return null;
// Set the game cache checksum header
file.ChecksumHeader = checksumHeader;
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Checksum Map Header
// Try to parse the checksum map header
var checksumMapHeader = ParseChecksumMapHeader(data);
if (checksumMapHeader == null)
return null;
// Set the game cache checksum map header
file.ChecksumMapHeader = checksumMapHeader;
#endregion
#region Checksum Map Entries
// Create the checksum map entry array
file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount];
// Try to parse the checksum map entries
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = ParseChecksumMapEntry(data);
file.ChecksumMapEntries[i] = checksumMapEntry;
}
#endregion
#region Checksum Entries
// Create the checksum entry array
file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount];
// Try to parse the checksum entries
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = ParseChecksumEntry(data);
file.ChecksumEntries[i] = checksumEntry;
}
#endregion
// Seek to end of checksum section, just in case
data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin);
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.Dummy0 = data.ReadUInt32();
if (header.Dummy0 != 0x00000001)
return null;
header.MajorVersion = data.ReadUInt32();
if (header.MajorVersion != 0x00000002)
return null;
header.MinorVersion = data.ReadUInt32();
if (header.MinorVersion != 1)
return null;
header.CacheID = data.ReadUInt32();
header.LastVersionPlayed = data.ReadUInt32();
header.Dummy1 = data.ReadUInt32();
header.Dummy2 = data.ReadUInt32();
header.FileSize = data.ReadUInt32();
header.BlockSize = data.ReadUInt32();
header.BlockCount = data.ReadUInt32();
header.Dummy3 = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory header on success, null on error</returns>
private static DirectoryHeader ParseDirectoryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryHeader directoryHeader = new DirectoryHeader();
directoryHeader.Dummy0 = data.ReadUInt32();
if (directoryHeader.Dummy0 != 0x00000004)
return null;
directoryHeader.CacheID = data.ReadUInt32();
directoryHeader.LastVersionPlayed = data.ReadUInt32();
directoryHeader.ItemCount = data.ReadUInt32();
directoryHeader.FileCount = data.ReadUInt32();
directoryHeader.ChecksumDataLength = data.ReadUInt32();
directoryHeader.DirectorySize = data.ReadUInt32();
directoryHeader.NameSize = data.ReadUInt32();
directoryHeader.Info1Count = data.ReadUInt32();
directoryHeader.CopyCount = data.ReadUInt32();
directoryHeader.LocalCount = data.ReadUInt32();
directoryHeader.Dummy1 = data.ReadUInt32();
directoryHeader.Dummy2 = data.ReadUInt32();
directoryHeader.Checksum = data.ReadUInt32();
return directoryHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.NameOffset = data.ReadUInt32();
directoryEntry.ItemSize = data.ReadUInt32();
directoryEntry.ChecksumIndex = data.ReadUInt32();
directoryEntry.DirectoryFlags = (HL_NCF_FLAG)data.ReadUInt32();
directoryEntry.ParentIndex = data.ReadUInt32();
directoryEntry.NextIndex = data.ReadUInt32();
directoryEntry.FirstIndex = data.ReadUInt32();
return directoryEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory info 1 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory info 1 entry on success, null on error</returns>
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
return directoryInfo1Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory info 2 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory info 2 entry on success, null on error</returns>
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
return directoryInfo2Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory copy entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory copy entry on success, null on error</returns>
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
return directoryCopyEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory local entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory local entry on success, null on error</returns>
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
return directoryLocalEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache unknown header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache unknown header on success, null on error</returns>
private static UnknownHeader ParseUnknownHeader(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownHeader unknownHeader = new UnknownHeader();
unknownHeader.Dummy0 = data.ReadUInt32();
if (unknownHeader.Dummy0 != 0x00000001)
return null;
unknownHeader.Dummy1 = data.ReadUInt32();
if (unknownHeader.Dummy1 != 0x00000000)
return null;
return unknownHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache unknown entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cacheunknown entry on success, null on error</returns>
private static UnknownEntry ParseUnknownEntry(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownEntry unknownEntry = new UnknownEntry();
unknownEntry.Dummy0 = data.ReadUInt32();
return unknownEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum header on success, null on error</returns>
private static ChecksumHeader ParseChecksumHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumHeader checksumHeader = new ChecksumHeader();
checksumHeader.Dummy0 = data.ReadUInt32();
if (checksumHeader.Dummy0 != 0x00000001)
return null;
checksumHeader.ChecksumSize = data.ReadUInt32();
return checksumHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum map header on success, null on error</returns>
private static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
checksumMapHeader.Dummy0 = data.ReadUInt32();
if (checksumMapHeader.Dummy0 != 0x14893721)
return null;
checksumMapHeader.Dummy1 = data.ReadUInt32();
if (checksumMapHeader.Dummy1 != 0x00000001)
return null;
checksumMapHeader.ItemCount = data.ReadUInt32();
checksumMapHeader.ChecksumCount = data.ReadUInt32();
return checksumMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum map entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum map entry on success, null on error</returns>
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
checksumMapEntry.ChecksumCount = data.ReadUInt32();
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
return checksumMapEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum entry on success, null on error</returns>
private static ChecksumEntry ParseChecksumEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumEntry checksumEntry = new ChecksumEntry();
checksumEntry.Checksum = data.ReadUInt32();
return checksumEntry;
}
#endregion
}
}

View File

@@ -1,508 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using BinaryObjectScanner.Models.NewExecutable;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.NewExecutable.Constants;
namespace BinaryObjectScanner.Builders
{
public static class NewExecutable
{
#region Byte Data
/// <summary>
/// Parse a byte array into a New Executable
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseExecutable(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a New Executable
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new executable to fill
var executable = new Executable();
#region MS-DOS Stub
// Parse the MS-DOS stub
var stub = MSDOS.ParseExecutable(data);
if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0)
return null;
// Set the MS-DOS stub
executable.Stub = stub;
#endregion
#region Executable Header
// Try to parse the executable header
data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin);
var executableHeader = ParseExecutableHeader(data);
if (executableHeader == null)
return null;
// Set the executable header
executable.Header = executableHeader;
#endregion
#region Segment Table
// If the offset for the segment table doesn't exist
int tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.SegmentTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the segment table
data.Seek(tableAddress, SeekOrigin.Begin);
var segmentTable = ParseSegmentTable(data, executableHeader.FileSegmentCount);
if (segmentTable == null)
return null;
// Set the segment table
executable.SegmentTable = segmentTable;
#endregion
#region Resource Table
// If the offset for the segment table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.SegmentTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the resource table
data.Seek(tableAddress, SeekOrigin.Begin);
var resourceTable = ParseResourceTable(data, executableHeader.ResourceEntriesCount);
if (resourceTable == null)
return null;
// Set the resource table
executable.ResourceTable = resourceTable;
#endregion
#region Resident-Name Table
// If the offset for the resident-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ResidentNameTableOffset;
int endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the resident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var residentNameTable = ParseResidentNameTable(data, endOffset);
if (residentNameTable == null)
return null;
// Set the resident-name table
executable.ResidentNameTable = residentNameTable;
#endregion
#region Module-Reference Table
// If the offset for the module-reference table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the module-reference table
data.Seek(tableAddress, SeekOrigin.Begin);
var moduleReferenceTable = ParseModuleReferenceTable(data, executableHeader.ModuleReferenceTableSize);
if (moduleReferenceTable == null)
return null;
// Set the module-reference table
executable.ModuleReferenceTable = moduleReferenceTable;
#endregion
#region Imported-Name Table
// If the offset for the imported-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ImportedNamesTableOffset;
endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var importedNameTable = ParseImportedNameTable(data, endOffset);
if (importedNameTable == null)
return null;
// Set the imported-name table
executable.ImportedNameTable = importedNameTable;
#endregion
#region Entry Table
// If the offset for the imported-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset;
endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset
+ executableHeader.EntryTableSize;
if (tableAddress >= data.Length)
return executable;
// Try to parse the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var entryTable = ParseEntryTable(data, endOffset);
if (entryTable == null)
return null;
// Set the entry table
executable.EntryTable = entryTable;
#endregion
#region Nonresident-Name Table
// If the offset for the nonresident-name table doesn't exist
tableAddress = initialOffset
+ (int)executableHeader.NonResidentNamesTableOffset;
endOffset = initialOffset
+ (int)executableHeader.NonResidentNamesTableOffset
+ executableHeader.NonResidentNameTableSize;
if (tableAddress >= data.Length)
return executable;
// Try to parse the nonresident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var nonResidentNameTable = ParseNonResidentNameTable(data, endOffset);
if (nonResidentNameTable == null)
return null;
// Set the nonresident-name table
executable.NonResidentNameTable = nonResidentNameTable;
#endregion
return executable;
}
/// <summary>
/// Parse a Stream into a New Executable header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable header on success, null on error</returns>
private static ExecutableHeader ParseExecutableHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new ExecutableHeader();
byte[] magic = data.ReadBytes(2);
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.LinkerVersion = data.ReadByteValue();
header.LinkerRevision = data.ReadByteValue();
header.EntryTableOffset = data.ReadUInt16();
header.EntryTableSize = data.ReadUInt16();
header.CrcChecksum = data.ReadUInt32();
header.FlagWord = (HeaderFlag)data.ReadUInt16();
header.AutomaticDataSegmentNumber = data.ReadUInt16();
header.InitialHeapAlloc = data.ReadUInt16();
header.InitialStackAlloc = data.ReadUInt16();
header.InitialCSIPSetting = data.ReadUInt32();
header.InitialSSSPSetting = data.ReadUInt32();
header.FileSegmentCount = data.ReadUInt16();
header.ModuleReferenceTableSize = data.ReadUInt16();
header.NonResidentNameTableSize = data.ReadUInt16();
header.SegmentTableOffset = data.ReadUInt16();
header.ResourceTableOffset = data.ReadUInt16();
header.ResidentNameTableOffset = data.ReadUInt16();
header.ModuleReferenceTableOffset = data.ReadUInt16();
header.ImportedNamesTableOffset = data.ReadUInt16();
header.NonResidentNamesTableOffset = data.ReadUInt32();
header.MovableEntriesCount = data.ReadUInt16();
header.SegmentAlignmentShiftCount = data.ReadUInt16();
header.ResourceEntriesCount = data.ReadUInt16();
header.TargetOperatingSystem = (OperatingSystem)data.ReadByteValue();
header.AdditionalFlags = (OS2Flag)data.ReadByteValue();
header.ReturnThunkOffset = data.ReadUInt16();
header.SegmentReferenceThunkOffset = data.ReadUInt16();
header.MinCodeSwapAreaSize = data.ReadUInt16();
header.WindowsSDKRevision = data.ReadByteValue();
header.WindowsSDKVersion = data.ReadByteValue();
return header;
}
/// <summary>
/// Parse a Stream into a segment table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of segment table entries to read</param>
/// <returns>Filled segment table on success, null on error</returns>
private static SegmentTableEntry[] ParseSegmentTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var segmentTable = new SegmentTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new SegmentTableEntry();
entry.Offset = data.ReadUInt16();
entry.Length = data.ReadUInt16();
entry.FlagWord = (SegmentTableEntryFlag)data.ReadUInt16();
entry.MinimumAllocationSize = data.ReadUInt16();
segmentTable[i] = entry;
}
return segmentTable;
}
/// <summary>
/// Parse a Stream into a resource table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of resource table entries to read</param>
/// <returns>Filled resource table on success, null on error</returns>
private static ResourceTable ParseResourceTable(Stream data, int count)
{
long initialOffset = data.Position;
// TODO: Use marshalling here instead of building
var resourceTable = new ResourceTable();
resourceTable.AlignmentShiftCount = data.ReadUInt16();
resourceTable.ResourceTypes = new ResourceTypeInformationEntry[count];
for (int i = 0; i < resourceTable.ResourceTypes.Length; i++)
{
var entry = new ResourceTypeInformationEntry();
entry.TypeID = data.ReadUInt16();
entry.ResourceCount = data.ReadUInt16();
entry.Reserved = data.ReadUInt32();
entry.Resources = new ResourceTypeResourceEntry[entry.ResourceCount];
for (int j = 0; j < entry.ResourceCount; j++)
{
// TODO: Should we read and store the resource data?
var resource = new ResourceTypeResourceEntry();
resource.Offset = data.ReadUInt16();
resource.Length = data.ReadUInt16();
resource.FlagWord = (ResourceTypeResourceFlag)data.ReadUInt16();
resource.ResourceID = data.ReadUInt16();
resource.Reserved = data.ReadUInt32();
entry.Resources[j] = resource;
}
resourceTable.ResourceTypes[i] = entry;
}
// Get the full list of unique string offsets
var stringOffsets = resourceTable.ResourceTypes
.Where(rt => rt.IsIntegerType() == false)
.Select(rt => rt.TypeID)
.Union(resourceTable.ResourceTypes
.SelectMany(rt => rt.Resources)
.Where(r => r.IsIntegerType() == false)
.Select(r => r.ResourceID))
.Distinct()
.OrderBy(o => o)
.ToList();
// Populate the type and name string dictionary
resourceTable.TypeAndNameStrings = new Dictionary<ushort, ResourceTypeAndNameString>();
for (int i = 0; i < stringOffsets.Count; i++)
{
int stringOffset = (int)(stringOffsets[i] + initialOffset);
data.Seek(stringOffset, SeekOrigin.Begin);
var str = new ResourceTypeAndNameString();
str.Length = data.ReadByteValue();
str.Text = data.ReadBytes(str.Length);
resourceTable.TypeAndNameStrings[stringOffsets[i]] = str;
}
return resourceTable;
}
/// <summary>
/// Parse a Stream into a resident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the resident-name table</param>
/// <returns>Filled resident-name table on success, null on error</returns>
private static ResidentNameTableEntry[] ParseResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<ResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new ResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
residentNameTable.Add(entry);
}
return residentNameTable.ToArray();
}
/// <summary>
/// Parse a Stream into a module-reference table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of module-reference table entries to read</param>
/// <returns>Filled module-reference table on success, null on error</returns>
private static ModuleReferenceTableEntry[] ParseModuleReferenceTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var moduleReferenceTable = new ModuleReferenceTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new ModuleReferenceTableEntry();
entry.Offset = data.ReadUInt16();
moduleReferenceTable[i] = entry;
}
return moduleReferenceTable;
}
/// <summary>
/// Parse a Stream into an imported-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the imported-name table</param>
/// <returns>Filled imported-name table on success, null on error</returns>
private static Dictionary<ushort, ImportedNameTableEntry> ParseImportedNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry>();
while (data.Position < endOffset)
{
ushort currentOffset = (ushort)data.Position;
var entry = new ImportedNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
importedNameTable[currentOffset] = entry;
}
return importedNameTable;
}
/// <summary>
/// Parse a Stream into an entry table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the entry table</param>
/// <returns>Filled entry table on success, null on error</returns>
private static EntryTableBundle[] ParseEntryTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var entryTable = new List<EntryTableBundle>();
while (data.Position < endOffset)
{
var entry = new EntryTableBundle();
entry.EntryCount = data.ReadByteValue();
entry.SegmentIndicator = data.ReadByteValue();
switch (entry.GetEntryType())
{
case SegmentEntryType.Unused:
break;
case SegmentEntryType.FixedSegment:
entry.FixedFlagWord = (FixedSegmentEntryFlag)data.ReadByteValue();
entry.FixedOffset = data.ReadUInt16();
break;
case SegmentEntryType.MoveableSegment:
entry.MoveableFlagWord = (MoveableSegmentEntryFlag)data.ReadByteValue();
entry.MoveableReserved = data.ReadUInt16();
entry.MoveableSegmentNumber = data.ReadByteValue();
entry.MoveableOffset = data.ReadUInt16();
break;
}
entryTable.Add(entry);
}
return entryTable.ToArray();
}
/// <summary>
/// Parse a Stream into a nonresident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the nonresident-name table</param>
/// <returns>Filled nonresident-name table on success, null on error</returns>
private static NonResidentNameTableEntry[] ParseNonResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<NonResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new NonResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
residentNameTable.Add(entry);
}
return residentNameTable.ToArray();
}
#endregion
}
}

View File

@@ -1,393 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.Nitro;
using BinaryObjectScanner.Utilities;
namespace BinaryObjectScanner.Builders
{
public class Nitro
{
#region Byte Data
/// <summary>
/// Parse a byte array into a NDS cart image
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled cart image on success, null on error</returns>
public static Cart ParseCart(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseCart(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a NDS cart image
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cart image on success, null on error</returns>
public static Cart ParseCart(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new cart image to fill
var cart = new Cart();
#region Header
// Try to parse the header
var header = ParseCommonHeader(data);
if (header == null)
return null;
// Set the cart image header
cart.CommonHeader = header;
#endregion
#region Extended DSi Header
// If we have a DSi-compatible cartridge
if (header.UnitCode == Unitcode.NDSPlusDSi || header.UnitCode == Unitcode.DSi)
{
var extendedDSiHeader = ParseExtendedDSiHeader(data);
if (extendedDSiHeader == null)
return null;
cart.ExtendedDSiHeader = extendedDSiHeader;
}
#endregion
#region Secure Area
// Try to get the secure area offset
long secureAreaOffset = 0x4000;
if (secureAreaOffset > data.Length)
return null;
// Seek to the secure area
data.Seek(secureAreaOffset, SeekOrigin.Begin);
// Read the secure area without processing
cart.SecureArea = data.ReadBytes(0x800);
#endregion
#region Name Table
// Try to get the name table offset
long nameTableOffset = header.FileNameTableOffset;
if (nameTableOffset < 0 || nameTableOffset > data.Length)
return null;
// Seek to the name table
data.Seek(nameTableOffset, SeekOrigin.Begin);
// Try to parse the name table
var nameTable = ParseNameTable(data);
if (nameTable == null)
return null;
// Set the name table
cart.NameTable = nameTable;
#endregion
#region File Allocation Table
// Try to get the file allocation table offset
long fileAllocationTableOffset = header.FileAllocationTableOffset;
if (fileAllocationTableOffset < 0 || fileAllocationTableOffset > data.Length)
return null;
// Seek to the file allocation table
data.Seek(fileAllocationTableOffset, SeekOrigin.Begin);
// Create the file allocation table
var fileAllocationTable = new List<FileAllocationTableEntry>();
// Try to parse the file allocation table
while (data.Position - fileAllocationTableOffset < header.FileAllocationTableLength)
{
var entry = ParseFileAllocationTableEntry(data);
fileAllocationTable.Add(entry);
}
// Set the file allocation table
cart.FileAllocationTable = fileAllocationTable.ToArray();
#endregion
// TODO: Read and optionally parse out the other areas
// Look for offsets and lengths in the header pieces
return cart;
}
/// <summary>
/// Parse a Stream into a common header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled common header on success, null on error</returns>
private static CommonHeader ParseCommonHeader(Stream data)
{
// TODO: Use marshalling here instead of building
CommonHeader commonHeader = new CommonHeader();
byte[] gameTitle = data.ReadBytes(12);
commonHeader.GameTitle = Encoding.ASCII.GetString(gameTitle).TrimEnd('\0');
commonHeader.GameCode = data.ReadUInt32();
byte[] makerCode = data.ReadBytes(2);
commonHeader.MakerCode = Encoding.ASCII.GetString(bytes: makerCode).TrimEnd('\0');
commonHeader.UnitCode = (Unitcode)data.ReadByteValue();
commonHeader.EncryptionSeedSelect = data.ReadByteValue();
commonHeader.DeviceCapacity = data.ReadByteValue();
commonHeader.Reserved1 = data.ReadBytes(7);
commonHeader.GameRevision = data.ReadUInt16();
commonHeader.RomVersion = data.ReadByteValue();
commonHeader.InternalFlags = data.ReadByteValue();
commonHeader.ARM9RomOffset = data.ReadUInt32();
commonHeader.ARM9EntryAddress = data.ReadUInt32();
commonHeader.ARM9LoadAddress = data.ReadUInt32();
commonHeader.ARM9Size = data.ReadUInt32();
commonHeader.ARM7RomOffset = data.ReadUInt32();
commonHeader.ARM7EntryAddress = data.ReadUInt32();
commonHeader.ARM7LoadAddress = data.ReadUInt32();
commonHeader.ARM7Size = data.ReadUInt32();
commonHeader.FileNameTableOffset = data.ReadUInt32();
commonHeader.FileNameTableLength = data.ReadUInt32();
commonHeader.FileAllocationTableOffset = data.ReadUInt32();
commonHeader.FileAllocationTableLength = data.ReadUInt32();
commonHeader.ARM9OverlayOffset = data.ReadUInt32();
commonHeader.ARM9OverlayLength = data.ReadUInt32();
commonHeader.ARM7OverlayOffset = data.ReadUInt32();
commonHeader.ARM7OverlayLength = data.ReadUInt32();
commonHeader.NormalCardControlRegisterSettings = data.ReadUInt32();
commonHeader.SecureCardControlRegisterSettings = data.ReadUInt32();
commonHeader.IconBannerOffset = data.ReadUInt32();
commonHeader.SecureAreaCRC = data.ReadUInt16();
commonHeader.SecureTransferTimeout = data.ReadUInt16();
commonHeader.ARM9Autoload = data.ReadUInt32();
commonHeader.ARM7Autoload = data.ReadUInt32();
commonHeader.SecureDisable = data.ReadBytes(8);
commonHeader.NTRRegionRomSize = data.ReadUInt32();
commonHeader.HeaderSize = data.ReadUInt32();
commonHeader.Reserved2 = data.ReadBytes(56);
commonHeader.NintendoLogo = data.ReadBytes(156);
commonHeader.NintendoLogoCRC = data.ReadUInt16();
commonHeader.HeaderCRC = data.ReadUInt16();
commonHeader.DebuggerReserved = data.ReadBytes(0x20);
return commonHeader;
}
/// <summary>
/// Parse a Stream into an extended DSi header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled extended DSi header on success, null on error</returns>
private static ExtendedDSiHeader ParseExtendedDSiHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ExtendedDSiHeader extendedDSiHeader = new ExtendedDSiHeader();
extendedDSiHeader.GlobalMBK15Settings = new uint[5];
for (int i = 0; i < 5; i++)
{
extendedDSiHeader.GlobalMBK15Settings[i] = data.ReadUInt32();
}
extendedDSiHeader.LocalMBK68SettingsARM9 = new uint[3];
for (int i = 0; i < 3; i++)
{
extendedDSiHeader.LocalMBK68SettingsARM9[i] = data.ReadUInt32();
}
extendedDSiHeader.LocalMBK68SettingsARM7 = new uint[3];
for (int i = 0; i < 3; i++)
{
extendedDSiHeader.LocalMBK68SettingsARM7[i] = data.ReadUInt32();
}
extendedDSiHeader.GlobalMBK9Setting = data.ReadUInt32();
extendedDSiHeader.RegionFlags = data.ReadUInt32();
extendedDSiHeader.AccessControl = data.ReadUInt32();
extendedDSiHeader.ARM7SCFGEXTMask = data.ReadUInt32();
extendedDSiHeader.ReservedFlags = data.ReadUInt32();
extendedDSiHeader.ARM9iRomOffset = data.ReadUInt32();
extendedDSiHeader.Reserved3 = data.ReadUInt32();
extendedDSiHeader.ARM9iLoadAddress = data.ReadUInt32();
extendedDSiHeader.ARM9iSize = data.ReadUInt32();
extendedDSiHeader.ARM7iRomOffset = data.ReadUInt32();
extendedDSiHeader.Reserved4 = data.ReadUInt32();
extendedDSiHeader.ARM7iLoadAddress = data.ReadUInt32();
extendedDSiHeader.ARM7iSize = data.ReadUInt32();
extendedDSiHeader.DigestNTRRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestNTRRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestTWLRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestTWLRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestSectorHashtableRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestSectorHashtableRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestBlockHashtableRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestBlockHashtableRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestSectorSize = data.ReadUInt32();
extendedDSiHeader.DigestBlockSectorCount = data.ReadUInt32();
extendedDSiHeader.IconBannerSize = data.ReadUInt32();
extendedDSiHeader.Unknown1 = data.ReadUInt32();
extendedDSiHeader.ModcryptArea1Offset = data.ReadUInt32();
extendedDSiHeader.ModcryptArea1Size = data.ReadUInt32();
extendedDSiHeader.ModcryptArea2Offset = data.ReadUInt32();
extendedDSiHeader.ModcryptArea2Size = data.ReadUInt32();
extendedDSiHeader.TitleID = data.ReadBytes(8);
extendedDSiHeader.DSiWarePublicSavSize = data.ReadUInt32();
extendedDSiHeader.DSiWarePrivateSavSize = data.ReadUInt32();
extendedDSiHeader.ReservedZero = data.ReadBytes(176);
extendedDSiHeader.Unknown2 = data.ReadBytes(0x10);
extendedDSiHeader.ARM9WithSecureAreaSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.ARM7SHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.DigestMasterSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.BannerSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.ARM9iDecryptedSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.ARM7iDecryptedSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.Reserved5 = data.ReadBytes(40);
extendedDSiHeader.ARM9NoSecureAreaSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.Reserved6 = data.ReadBytes(2636);
extendedDSiHeader.ReservedAndUnchecked = data.ReadBytes(0x180);
extendedDSiHeader.RSASignature = data.ReadBytes(0x80);
return extendedDSiHeader;
}
/// <summary>
/// Parse a Stream into a name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name table on success, null on error</returns>
private static NameTable ParseNameTable(Stream data)
{
// TODO: Use marshalling here instead of building
NameTable nameTable = new NameTable();
// Create a variable-length table
var folderAllocationTable = new List<FolderAllocationTableEntry>();
int entryCount = int.MaxValue;
while (entryCount > 0)
{
var entry = ParseFolderAllocationTableEntry(data);
folderAllocationTable.Add(entry);
// If we have the root entry
if (entryCount == int.MaxValue)
entryCount = (entry.Unknown << 8) | entry.ParentFolderIndex;
// Decrement the entry count
entryCount--;
}
// Assign the folder allocation table
nameTable.FolderAllocationTable = folderAllocationTable.ToArray();
// Create a variable-length table
var nameList = new List<NameListEntry>();
while (true)
{
var entry = ParseNameListEntry(data);
if (entry == null)
break;
nameList.Add(entry);
}
// Assign the name list
nameTable.NameList = nameList.ToArray();
return nameTable;
}
/// <summary>
/// Parse a Stream into a folder allocation table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled folder allocation table entry on success, null on error</returns>
private static FolderAllocationTableEntry ParseFolderAllocationTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
FolderAllocationTableEntry entry = new FolderAllocationTableEntry();
entry.StartOffset = data.ReadUInt32();
entry.FirstFileIndex = data.ReadUInt16();
entry.ParentFolderIndex = data.ReadByteValue();
entry.Unknown = data.ReadByteValue();
return entry;
}
/// <summary>
/// Parse a Stream into a name list entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name list entry on success, null on error</returns>
private static NameListEntry ParseNameListEntry(Stream data)
{
// TODO: Use marshalling here instead of building
NameListEntry entry = new NameListEntry();
byte flagAndSize = data.ReadByteValue();
if (flagAndSize == 0xFF)
return null;
entry.Folder = (flagAndSize & 0x80) != 0;
byte size = (byte)(flagAndSize & ~0x80);
if (size > 0)
{
byte[] name = data.ReadBytes(size);
entry.Name = Encoding.UTF8.GetString(name);
}
if (entry.Folder)
entry.Index = data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into a name list entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name list entry on success, null on error</returns>
private static FileAllocationTableEntry ParseFileAllocationTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
FileAllocationTableEntry entry = new FileAllocationTableEntry();
entry.StartOffset = data.ReadUInt32();
entry.EndOffset = data.ReadUInt32();
return entry;
}
#endregion
}
}

View File

@@ -1,137 +0,0 @@
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.PAK;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.PAK.Constants;
namespace BinaryObjectScanner.Builders
{
public static class PAK
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Package
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Package on success, null on error</returns>
public static Models.PAK.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Package
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Package on success, null on error</returns>
public static Models.PAK.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life Package to fill
var file = new Models.PAK.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
#region Directory Items
// Get the directory items offset
uint directoryItemsOffset = header.DirectoryOffset;
if (directoryItemsOffset < 0 || directoryItemsOffset >= data.Length)
return null;
// Seek to the directory items
data.Seek(directoryItemsOffset, SeekOrigin.Begin);
// Create the directory item array
file.DirectoryItems = new DirectoryItem[header.DirectoryLength / 64];
// Try to parse the directory items
for (int i = 0; i < file.DirectoryItems.Length; i++)
{
var directoryItem = ParseDirectoryItem(data);
file.DirectoryItems[i] = directoryItem;
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Package header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Package header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.DirectoryOffset = data.ReadUInt32();
header.DirectoryLength = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life Package directory item
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Package directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryItem directoryItem = new DirectoryItem();
byte[] itemName = data.ReadBytes(56);
directoryItem.ItemName = Encoding.ASCII.GetString(itemName).TrimEnd('\0');
directoryItem.ItemOffset = data.ReadUInt32();
directoryItem.ItemLength = data.ReadUInt32();
return directoryItem;
}
#endregion
}
}

View File

@@ -1,211 +0,0 @@
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.PFF;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.PFF.Constants;
namespace BinaryObjectScanner.Builders
{
public class PFF
{
#region Byte Data
/// <summary>
/// Parse a byte array into a PFF archive
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseArchive(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a PFF archive
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the archive header
archive.Header = header;
#endregion
#region Segments
// Get the segments
long offset = header.FileListOffset;
if (offset < 0 || offset >= data.Length)
return null;
// Seek to the segments
data.Seek(offset, SeekOrigin.Begin);
// Create the segments array
archive.Segments = new Segment[header.NumberOfFiles];
// Read all segments in turn
for (int i = 0; i < header.NumberOfFiles; i++)
{
var file = ParseSegment(data, header.FileSegmentSize);
if (file == null)
return null;
archive.Segments[i] = file;
}
#endregion
#region Footer
// Get the footer offset
offset = header.FileListOffset + (header.FileSegmentSize * header.NumberOfFiles);
if (offset < 0 || offset >= data.Length)
return null;
// Seek to the footer
data.Seek(offset, SeekOrigin.Begin);
// Try to parse the footer
var footer = ParseFooter(data);
if (footer == null)
return null;
// Set the archive footer
archive.Footer = footer;
#endregion
return archive;
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.HeaderSize = data.ReadUInt32();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
header.NumberOfFiles = data.ReadUInt32();
header.FileSegmentSize = data.ReadUInt32();
switch (header.Signature)
{
case Version0SignatureString:
if (header.FileSegmentSize != Version0HSegmentSize)
return null;
break;
case Version2SignatureString:
if (header.FileSegmentSize != Version2SegmentSize)
return null;
break;
// Version 3 can sometimes have Version 2 segment sizes
case Version3SignatureString:
if (header.FileSegmentSize != Version2SegmentSize && header.FileSegmentSize != Version3SegmentSize)
return null;
break;
case Version4SignatureString:
if (header.FileSegmentSize != Version4SegmentSize)
return null;
break;
default:
return null;
}
header.FileListOffset = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a footer
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled footer on success, null on error</returns>
private static Footer ParseFooter(Stream data)
{
// TODO: Use marshalling here instead of building
Footer footer = new Footer();
footer.SystemIP = data.ReadUInt32();
footer.Reserved = data.ReadUInt32();
byte[] kingTag = data.ReadBytes(4);
footer.KingTag = Encoding.ASCII.GetString(kingTag);
return footer;
}
/// <summary>
/// Parse a Stream into a file entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="segmentSize">PFF segment size</param>
/// <returns>Filled file entry on success, null on error</returns>
private static Segment ParseSegment(Stream data, uint segmentSize)
{
// TODO: Use marshalling here instead of building
Segment segment = new Segment();
segment.Deleted = data.ReadUInt32();
segment.FileLocation = data.ReadUInt32();
segment.FileSize = data.ReadUInt32();
segment.PackedDate = data.ReadUInt32();
byte[] fileName = data.ReadBytes(0x10);
segment.FileName = Encoding.ASCII.GetString(fileName).TrimEnd('\0');
if (segmentSize > Version2SegmentSize)
segment.ModifiedDate = data.ReadUInt32();
if (segmentSize > Version3SegmentSize)
segment.CompressionLevel = data.ReadUInt32();
return segment;
}
#endregion
}
}

View File

@@ -1,463 +0,0 @@
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.PlayJ;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.PlayJ.Constants;
namespace BinaryObjectScanner.Builders
{
public class PlayJ
{
#region Byte Data
/// <summary>
/// Parse a byte array into a PlayJ playlist
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled playlist on success, null on error</returns>
public static Playlist ParsePlaylist(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParsePlaylist(dataStream);
}
/// <summary>
/// Parse a byte array into a PlayJ audio file
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled audio file on success, null on error</returns>
public static AudioFile ParseAudioFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseAudioFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a PlayJ playlist
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled playlist on success, null on error</returns>
public static Playlist ParsePlaylist(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new playlist to fill
var playlist = new Playlist();
#region Playlist Header
// Try to parse the playlist header
var playlistHeader = ParsePlaylistHeader(data);
if (playlistHeader == null)
return null;
// Set the playlist header
playlist.Header = playlistHeader;
#endregion
#region Audio Files
// Create the audio files array
playlist.AudioFiles = new AudioFile[playlistHeader.TrackCount];
// Try to parse the audio files
for (int i = 0; i < playlist.AudioFiles.Length; i++)
{
long currentOffset = data.Position;
var entryHeader = ParseAudioFile(data, currentOffset);
if (entryHeader == null)
return null;
playlist.AudioFiles[i] = entryHeader;
}
#endregion
return playlist;
}
/// <summary>
/// Parse a Stream into a PlayJ audio file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="adjust">Offset to adjust all seeking by</param>
/// <returns>Filled audio file on success, null on error</returns>
public static AudioFile ParseAudioFile(Stream data, long adjust = 0)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new audio file to fill
var audioFile = new AudioFile();
#region Audio Header
// Try to parse the audio header
var audioHeader = ParseAudioHeader(data);
if (audioHeader == null)
return null;
// Set the audio header
audioFile.Header = audioHeader;
#endregion
#region Unknown Block 1
uint unknownOffset1 = (audioHeader.Version == 0x00000000)
? (audioHeader as AudioHeaderV1).UnknownOffset1
: (audioHeader as AudioHeaderV2).UnknownOffset1 + 0x54;
// If we have an unknown block 1 offset
if (unknownOffset1 > 0)
{
// Get the unknown block 1 offset
long offset = unknownOffset1 + adjust;
if (offset < 0 || offset >= data.Length)
return null;
// Seek to the unknown block 1
data.Seek(offset, SeekOrigin.Begin);
}
// Try to parse the unknown block 1
var unknownBlock1 = ParseUnknownBlock1(data);
if (unknownBlock1 == null)
return null;
// Set the unknown block 1
audioFile.UnknownBlock1 = unknownBlock1;
#endregion
#region V1 Only
// If we have a V1 file
if (audioHeader.Version == 0x00000000)
{
#region Unknown Value 2
// Get the V1 unknown offset 2
uint? unknownOffset2 = (audioHeader as AudioHeaderV1)?.UnknownOffset2;
// If we have an unknown value 2 offset
if (unknownOffset2 != null && unknownOffset2 > 0)
{
// Get the unknown value 2 offset
long offset = unknownOffset2.Value + adjust;
if (offset < 0 || offset >= data.Length)
return null;
// Seek to the unknown value 2
data.Seek(offset, SeekOrigin.Begin);
}
// Set the unknown value 2
audioFile.UnknownValue2 = data.ReadUInt32();
#endregion
#region Unknown Block 3
// Get the V1 unknown offset 3
uint? unknownOffset3 = (audioHeader as AudioHeaderV1)?.UnknownOffset3;
// If we have an unknown block 3 offset
if (unknownOffset3 != null && unknownOffset3 > 0)
{
// Get the unknown block 3 offset
long offset = unknownOffset3.Value + adjust;
if (offset < 0 || offset >= data.Length)
return null;
// Seek to the unknown block 3
data.Seek(offset, SeekOrigin.Begin);
}
// Try to parse the unknown block 3
var unknownBlock3 = ParseUnknownBlock3(data);
if (unknownBlock3 == null)
return null;
// Set the unknown block 3
audioFile.UnknownBlock3 = unknownBlock3;
#endregion
}
#endregion
#region V2 Only
// If we have a V2 file
if (audioHeader.Version == 0x0000000A)
{
#region Data Files Count
// Set the data files count
audioFile.DataFilesCount = data.ReadUInt32();
#endregion
#region Data Files
// Create the data files array
audioFile.DataFiles = new DataFile[audioFile.DataFilesCount];
// Try to parse the data files
for (int i = 0; i < audioFile.DataFiles.Length; i++)
{
var dataFile = ParseDataFile(data);
if (dataFile == null)
return null;
audioFile.DataFiles[i] = dataFile;
}
#endregion
}
#endregion
return audioFile;
}
/// <summary>
/// Parse a Stream into a playlist header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled playlist header on success, null on error</returns>
private static PlaylistHeader ParsePlaylistHeader(Stream data)
{
// TODO: Use marshalling here instead of building
PlaylistHeader playlistHeader = new PlaylistHeader();
playlistHeader.TrackCount = data.ReadUInt32();
playlistHeader.Data = data.ReadBytes(52);
return playlistHeader;
}
/// <summary>
/// Parse a Stream into an audio header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled audio header on success, null on error</returns>
private static AudioHeader ParseAudioHeader(Stream data)
{
// Cache the current offset
long initialOffset = data.Position;
// TODO: Use marshalling here instead of building
AudioHeader audioHeader;
// Get the common header pieces
uint signature = data.ReadUInt32();
if (signature != SignatureUInt32)
return null;
uint version = data.ReadUInt32();
// Build the header according to version
uint unknownOffset1;
switch (version)
{
// Version 1
case 0x00000000:
AudioHeaderV1 v1 = new AudioHeaderV1();
v1.Signature = signature;
v1.Version = version;
v1.TrackID = data.ReadUInt32();
v1.UnknownOffset1 = data.ReadUInt32();
v1.UnknownOffset2 = data.ReadUInt32();
v1.UnknownOffset3 = data.ReadUInt32();
v1.Unknown1 = data.ReadUInt32();
v1.Unknown2 = data.ReadUInt32();
v1.Year = data.ReadUInt32();
v1.TrackNumber = data.ReadByteValue();
v1.Subgenre = (Subgenre)data.ReadByteValue();
v1.Duration = data.ReadUInt32();
audioHeader = v1;
unknownOffset1 = v1.UnknownOffset1;
break;
// Version 2
case 0x0000000A:
AudioHeaderV2 v2 = new AudioHeaderV2();
v2.Signature = signature;
v2.Version = version;
v2.Unknown1 = data.ReadUInt32();
v2.Unknown2 = data.ReadUInt32();
v2.Unknown3 = data.ReadUInt32();
v2.Unknown4 = data.ReadUInt32();
v2.Unknown5 = data.ReadUInt32();
v2.Unknown6 = data.ReadUInt32();
v2.UnknownOffset1 = data.ReadUInt32();
v2.Unknown7 = data.ReadUInt32();
v2.Unknown8 = data.ReadUInt32();
v2.Unknown9 = data.ReadUInt32();
v2.UnknownOffset2 = data.ReadUInt32();
v2.Unknown10 = data.ReadUInt32();
v2.Unknown11 = data.ReadUInt32();
v2.Unknown12 = data.ReadUInt32();
v2.Unknown13 = data.ReadUInt32();
v2.Unknown14 = data.ReadUInt32();
v2.Unknown15 = data.ReadUInt32();
v2.Unknown16 = data.ReadUInt32();
v2.Unknown17 = data.ReadUInt32();
v2.TrackID = data.ReadUInt32();
v2.Year = data.ReadUInt32();
v2.TrackNumber = data.ReadUInt32();
v2.Unknown18 = data.ReadUInt32();
audioHeader = v2;
unknownOffset1 = v2.UnknownOffset1 + 0x54;
break;
// No other version are recognized
default:
return null;
}
audioHeader.TrackLength = data.ReadUInt16();
byte[] track = data.ReadBytes(audioHeader.TrackLength);
if (track != null)
audioHeader.Track = Encoding.ASCII.GetString(track);
audioHeader.ArtistLength = data.ReadUInt16();
byte[] artist = data.ReadBytes(audioHeader.ArtistLength);
if (artist != null)
audioHeader.Artist = Encoding.ASCII.GetString(artist);
audioHeader.AlbumLength = data.ReadUInt16();
byte[] album = data.ReadBytes(audioHeader.AlbumLength);
if (album != null)
audioHeader.Album = Encoding.ASCII.GetString(album);
audioHeader.WriterLength = data.ReadUInt16();
byte[] writer = data.ReadBytes(audioHeader.WriterLength);
if (writer != null)
audioHeader.Writer = Encoding.ASCII.GetString(writer);
audioHeader.PublisherLength = data.ReadUInt16();
byte[] publisher = data.ReadBytes(audioHeader.PublisherLength);
if (publisher != null)
audioHeader.Publisher = Encoding.ASCII.GetString(publisher);
audioHeader.LabelLength = data.ReadUInt16();
byte[] label = data.ReadBytes(audioHeader.LabelLength);
if (label != null)
audioHeader.Label = Encoding.ASCII.GetString(label);
if (data.Position - initialOffset < unknownOffset1)
{
audioHeader.CommentsLength = data.ReadUInt16();
byte[] comments = data.ReadBytes(audioHeader.CommentsLength);
if (comments != null)
audioHeader.Comments = Encoding.ASCII.GetString(comments);
}
return audioHeader;
}
/// <summary>
/// Parse a Stream into an unknown block 1
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled unknown block 1 on success, null on error</returns>
private static UnknownBlock1 ParseUnknownBlock1(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownBlock1 unknownBlock1 = new UnknownBlock1();
unknownBlock1.Length = data.ReadUInt32();
unknownBlock1.Data = data.ReadBytes((int)unknownBlock1.Length);
return unknownBlock1;
}
/// <summary>
/// Parse a Stream into an unknown block 3
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled unknown block 3 on success, null on error</returns>
private static UnknownBlock3 ParseUnknownBlock3(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownBlock3 unknownBlock3 = new UnknownBlock3();
// No-op because we don't even know the length
return unknownBlock3;
}
/// <summary>
/// Parse a Stream into a data file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled data file on success, null on error</returns>
private static DataFile ParseDataFile(Stream data)
{
// TODO: Use marshalling here instead of building
DataFile dataFile = new DataFile();
dataFile.FileNameLength = data.ReadUInt16();
byte[] fileName = data.ReadBytes(dataFile.FileNameLength);
if (fileName != null)
dataFile.FileName = Encoding.ASCII.GetString(fileName);
dataFile.DataLength = data.ReadUInt32();
dataFile.Data = data.ReadBytes((int)dataFile.DataLength);
return dataFile;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,184 +0,0 @@
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.Quantum;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.Quantum.Constants;
namespace BinaryObjectScanner.Builders
{
public class Quantum
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Quantum archive
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseArchive(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Quantum archive
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the archive header
archive.Header = header;
#endregion
#region File List
// If we have any files
if (header.FileCount > 0)
{
var fileDescriptors = new FileDescriptor[header.FileCount];
// Read all entries in turn
for (int i = 0; i < header.FileCount; i++)
{
var file = ParseFileDescriptor(data, header.MinorVersion);
if (file == null)
return null;
fileDescriptors[i] = file;
}
// Set the file list
archive.FileList = fileDescriptors;
}
#endregion
// Cache the compressed data offset
archive.CompressedDataOffset = data.Position;
return archive;
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(2);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.MajorVersion = data.ReadByteValue();
header.MinorVersion = data.ReadByteValue();
header.FileCount = data.ReadUInt16();
header.TableSize = data.ReadByteValue();
header.CompressionFlags = data.ReadByteValue();
return header;
}
/// <summary>
/// Parse a Stream into a file descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="minorVersion">Minor version of the archive</param>
/// <returns>Filled file descriptor on success, null on error</returns>
private static FileDescriptor ParseFileDescriptor(Stream data, byte minorVersion)
{
// TODO: Use marshalling here instead of building
FileDescriptor fileDescriptor = new FileDescriptor();
fileDescriptor.FileNameSize = ReadVariableLength(data);
if (fileDescriptor.FileNameSize > 0)
{
byte[] fileName = data.ReadBytes(fileDescriptor.FileNameSize);
fileDescriptor.FileName = Encoding.ASCII.GetString(fileName);
}
fileDescriptor.CommentFieldSize = ReadVariableLength(data);
if (fileDescriptor.CommentFieldSize > 0)
{
byte[] commentField = data.ReadBytes(fileDescriptor.CommentFieldSize);
fileDescriptor.CommentField = Encoding.ASCII.GetString(commentField);
}
fileDescriptor.ExpandedFileSize = data.ReadUInt32();
fileDescriptor.FileTime = data.ReadUInt16();
fileDescriptor.FileDate = data.ReadUInt16();
// Hack for unknown format data
if (minorVersion == 22)
fileDescriptor.Unknown = data.ReadUInt16();
return fileDescriptor;
}
/// <summary>
/// Parse a Stream into a variable-length size prefix
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Variable-length size prefix</returns>
/// <remarks>
/// Strings are prefixed with their length. If the length is less than 128
/// then it is stored directly in one byte. If it is greater than 127 then
/// the high bit of the first byte is set to 1 and the remaining fifteen bits
/// contain the actual length in big-endian format.
/// </remarks>
private static int ReadVariableLength(Stream data)
{
byte b0 = data.ReadByteValue();
if (b0 < 0x7F)
return b0;
b0 &= 0x7F;
byte b1 = data.ReadByteValue();
return (b0 << 8) | b1;
}
#endregion
}
}

View File

@@ -1,732 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.SGA;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.SGA.Constants;
namespace BinaryObjectScanner.Builders
{
public static class SGA
{
#region Byte Data
/// <summary>
/// Parse a byte array into an SGA
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled SGA on success, null on error</returns>
public static Models.SGA.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into an SGA
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA on success, null on error</returns>
public static Models.SGA.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new SGA to fill
var file = new Models.SGA.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the SGA header
file.Header = header;
#endregion
#region Directory
// Try to parse the directory
var directory = ParseDirectory(data, header.MajorVersion);
if (directory == null)
return null;
// Set the SGA directory
file.Directory = directory;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into an SGA header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
byte[] signatureBytes = data.ReadBytes(8);
string signature = Encoding.ASCII.GetString(signatureBytes);
if (signature != SignatureString)
return null;
ushort majorVersion = data.ReadUInt16();
ushort minorVersion = data.ReadUInt16();
if (minorVersion != 0)
return null;
switch (majorVersion)
{
// Versions 4 and 5 share the same header
case 4:
case 5:
Header4 header4 = new Header4();
header4.Signature = signature;
header4.MajorVersion = majorVersion;
header4.MinorVersion = minorVersion;
header4.FileMD5 = data.ReadBytes(0x10);
byte[] header4Name = data.ReadBytes(count: 128);
header4.Name = Encoding.Unicode.GetString(header4Name).TrimEnd('\0');
header4.HeaderMD5 = data.ReadBytes(0x10);
header4.HeaderLength = data.ReadUInt32();
header4.FileDataOffset = data.ReadUInt32();
header4.Dummy0 = data.ReadUInt32();
return header4;
// Versions 6 and 7 share the same header
case 6:
case 7:
Header6 header6 = new Header6();
header6.Signature = signature;
header6.MajorVersion = majorVersion;
header6.MinorVersion = minorVersion;
byte[] header6Name = data.ReadBytes(count: 128);
header6.Name = Encoding.Unicode.GetString(header6Name).TrimEnd('\0');
header6.HeaderLength = data.ReadUInt32();
header6.FileDataOffset = data.ReadUInt32();
header6.Dummy0 = data.ReadUInt32();
return header6;
// No other major versions are recognized
default:
return null;
}
}
/// <summary>
/// Parse a Stream into an SGA directory
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA directory on success, null on error</returns>
private static Models.SGA.Directory ParseDirectory(Stream data, ushort majorVersion)
{
#region Directory
// Create the appropriate type of directory
Models.SGA.Directory directory;
switch (majorVersion)
{
case 4: directory = new Directory4(); break;
case 5: directory = new Directory5(); break;
case 6: directory = new Directory6(); break;
case 7: directory = new Directory7(); break;
default: return null;
}
#endregion
// Cache the current offset
long currentOffset = data.Position;
#region Directory Header
// Try to parse the directory header
var directoryHeader = ParseDirectoryHeader(data, majorVersion);
if (directoryHeader == null)
return null;
// Set the directory header
switch (majorVersion)
{
case 4: (directory as Directory4).DirectoryHeader = directoryHeader as DirectoryHeader4; break;
case 5: (directory as Directory5).DirectoryHeader = directoryHeader as DirectoryHeader5; break;
case 6: (directory as Directory6).DirectoryHeader = directoryHeader as DirectoryHeader5; break;
case 7: (directory as Directory7).DirectoryHeader = directoryHeader as DirectoryHeader7; break;
default: return null;
}
#endregion
#region Sections
// Get the sections offset
long sectionOffset;
switch (majorVersion)
{
case 4: sectionOffset = (directoryHeader as DirectoryHeader4).SectionOffset; break;
case 5:
case 6: sectionOffset = (directoryHeader as DirectoryHeader5).SectionOffset; break;
case 7: sectionOffset = (directoryHeader as DirectoryHeader7).SectionOffset; break;
default: return null;
}
// Adjust the sections offset based on the directory
sectionOffset += currentOffset;
// Validate the offset
if (sectionOffset < 0 || sectionOffset >= data.Length)
return null;
// Seek to the sections
data.Seek(sectionOffset, SeekOrigin.Begin);
// Get the section count
uint sectionCount;
switch (majorVersion)
{
case 4: sectionCount = (directoryHeader as DirectoryHeader4).SectionCount; break;
case 5:
case 6: sectionCount = (directoryHeader as DirectoryHeader5).SectionCount; break;
case 7: sectionCount = (directoryHeader as DirectoryHeader7).SectionCount; break;
default: return null;
}
// Create the sections array
object[] sections;
switch (majorVersion)
{
case 4: sections = new Section4[sectionCount]; break;
case 5:
case 6:
case 7: sections = new Section5[sectionCount]; break;
default: return null;
}
// Try to parse the sections
for (int i = 0; i < sections.Length; i++)
{
switch (majorVersion)
{
case 4: sections[i] = ParseSection4(data); break;
case 5:
case 6:
case 7: sections[i] = ParseSection5(data); break;
default: return null;
}
}
// Assign the sections
switch (majorVersion)
{
case 4: (directory as Directory4).Sections = sections as Section4[]; break;
case 5: (directory as Directory5).Sections = sections as Section5[]; break;
case 6: (directory as Directory6).Sections = sections as Section5[]; break;
case 7: (directory as Directory7).Sections = sections as Section5[]; break;
default: return null;
}
#endregion
#region Folders
// Get the folders offset
long folderOffset;
switch (majorVersion)
{
case 4: folderOffset = (directoryHeader as DirectoryHeader4).FolderOffset; break;
case 5: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break;
case 6: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break;
case 7: folderOffset = (directoryHeader as DirectoryHeader7).FolderOffset; break;
default: return null;
}
// Adjust the folders offset based on the directory
folderOffset += currentOffset;
// Validate the offset
if (folderOffset < 0 || folderOffset >= data.Length)
return null;
// Seek to the folders
data.Seek(folderOffset, SeekOrigin.Begin);
// Get the folder count
uint folderCount;
switch (majorVersion)
{
case 4: folderCount = (directoryHeader as DirectoryHeader4).FolderCount; break;
case 5: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break;
case 6: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break;
case 7: folderCount = (directoryHeader as DirectoryHeader7).FolderCount; break;
default: return null;
}
// Create the folders array
object[] folders;
switch (majorVersion)
{
case 4: folders = new Folder4[folderCount]; break;
case 5: folders = new Folder5[folderCount]; break;
case 6: folders = new Folder5[folderCount]; break;
case 7: folders = new Folder5[folderCount]; break;
default: return null;
}
// Try to parse the folders
for (int i = 0; i < folders.Length; i++)
{
switch (majorVersion)
{
case 4: folders[i] = ParseFolder4(data); break;
case 5: folders[i] = ParseFolder5(data); break;
case 6: folders[i] = ParseFolder5(data); break;
case 7: folders[i] = ParseFolder5(data); break;
default: return null;
}
}
// Assign the folders
switch (majorVersion)
{
case 4: (directory as Directory4).Folders = folders as Folder4[]; break;
case 5: (directory as Directory5).Folders = folders as Folder5[]; break;
case 6: (directory as Directory6).Folders = folders as Folder5[]; break;
case 7: (directory as Directory7).Folders = folders as Folder5[]; break;
default: return null;
}
#endregion
#region Files
// Get the files offset
long fileOffset;
switch (majorVersion)
{
case 4: fileOffset = (directoryHeader as DirectoryHeader4).FileOffset; break;
case 5: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break;
case 6: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break;
case 7: fileOffset = (directoryHeader as DirectoryHeader7).FileOffset; break;
default: return null;
}
// Adjust the files offset based on the directory
fileOffset += currentOffset;
// Validate the offset
if (fileOffset < 0 || fileOffset >= data.Length)
return null;
// Seek to the files
data.Seek(fileOffset, SeekOrigin.Begin);
// Get the file count
uint fileCount;
switch (majorVersion)
{
case 4: fileCount = (directoryHeader as DirectoryHeader4).FileCount; break;
case 5: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break;
case 6: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break;
case 7: fileCount = (directoryHeader as DirectoryHeader7).FileCount; break;
default: return null;
}
// Create the files array
object[] files;
switch (majorVersion)
{
case 4: files = new File4[fileCount]; break;
case 5: files = new File4[fileCount]; break;
case 6: files = new File6[fileCount]; break;
case 7: files = new File7[fileCount]; break;
default: return null;
}
// Try to parse the files
for (int i = 0; i < files.Length; i++)
{
switch (majorVersion)
{
case 4: files[i] = ParseFile4(data); break;
case 5: files[i] = ParseFile4(data); break;
case 6: files[i] = ParseFile6(data); break;
case 7: files[i] = ParseFile7(data); break;
default: return null;
}
}
// Assign the files
switch (majorVersion)
{
case 4: (directory as Directory4).Files = files as File4[]; break;
case 5: (directory as Directory5).Files = files as File4[]; break;
case 6: (directory as Directory6).Files = files as File6[]; break;
case 7: (directory as Directory7).Files = files as File7[]; break;
default: return null;
}
#endregion
#region String Table
// Get the string table offset
long stringTableOffset;
switch (majorVersion)
{
case 4: stringTableOffset = (directoryHeader as DirectoryHeader4).StringTableOffset; break;
case 5: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break;
case 6: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break;
case 7: stringTableOffset = (directoryHeader as DirectoryHeader7).StringTableOffset; break;
default: return null;
}
// Adjust the string table offset based on the directory
stringTableOffset += currentOffset;
// Validate the offset
if (stringTableOffset < 0 || stringTableOffset >= data.Length)
return null;
// Seek to the string table
data.Seek(stringTableOffset, SeekOrigin.Begin);
// Get the string table count
uint stringCount;
switch (majorVersion)
{
case 4: stringCount = (directoryHeader as DirectoryHeader4).StringTableCount; break;
case 5: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break;
case 6: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break;
case 7: stringCount = (directoryHeader as DirectoryHeader7).StringTableCount; break;
default: return null;
}
// TODO: Are these strings actually indexed by number and not position?
// TODO: If indexed by position, I think it needs to be adjusted by start of table
// Create the strings dictionary
Dictionary<long, string> strings = new Dictionary<long, string>((int)stringCount);
// Get the current position to adjust the offsets
long stringTableStart = data.Position;
// Try to parse the strings
for (int i = 0; i < stringCount; i++)
{
long currentPosition = data.Position - stringTableStart;
strings[currentPosition] = data.ReadString(Encoding.ASCII);
}
// Assign the files
switch (majorVersion)
{
case 4: (directory as Directory4).StringTable = strings; break;
case 5: (directory as Directory5).StringTable = strings; break;
case 6: (directory as Directory6).StringTable = strings; break;
case 7: (directory as Directory7).StringTable = strings; break;
default: return null;
}
// Loop through all folders to assign names
for (int i = 0; i < folderCount; i++)
{
switch (majorVersion)
{
case 4: (directory as Directory4).Folders[i].Name = strings[(directory as Directory4).Folders[i].NameOffset]; break;
case 5: (directory as Directory5).Folders[i].Name = strings[(directory as Directory5).Folders[i].NameOffset]; break;
case 6: (directory as Directory6).Folders[i].Name = strings[(directory as Directory6).Folders[i].NameOffset]; break;
case 7: (directory as Directory7).Folders[i].Name = strings[(directory as Directory7).Folders[i].NameOffset]; break;
default: return null;
}
}
// Loop through all files to assign names
for (int i = 0; i < fileCount; i++)
{
switch (majorVersion)
{
case 4: (directory as Directory4).Files[i].Name = strings[(directory as Directory4).Files[i].NameOffset]; break;
case 5: (directory as Directory5).Files[i].Name = strings[(directory as Directory5).Files[i].NameOffset]; break;
case 6: (directory as Directory6).Files[i].Name = strings[(directory as Directory6).Files[i].NameOffset]; break;
case 7: (directory as Directory7).Files[i].Name = strings[(directory as Directory7).Files[i].NameOffset]; break;
default: return null;
}
}
#endregion
return directory;
}
/// <summary>
/// Parse a Stream into an SGA directory header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA directory header on success, null on error</returns>
private static object ParseDirectoryHeader(Stream data, ushort majorVersion)
{
switch (majorVersion)
{
case 4: return ParseDirectory4Header(data);
case 5: return ParseDirectory5Header(data);
case 6: return ParseDirectory5Header(data);
case 7: return ParseDirectory7Header(data);
default: return null;
}
}
/// <summary>
/// Parse a Stream into an SGA directory header version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA directory header version 4 on success, null on error</returns>
private static DirectoryHeader4 ParseDirectory4Header(Stream data)
{
DirectoryHeader4 directoryHeader4 = new DirectoryHeader4();
directoryHeader4.SectionOffset = data.ReadUInt32();
directoryHeader4.SectionCount = data.ReadUInt16();
directoryHeader4.FolderOffset = data.ReadUInt32();
directoryHeader4.FolderCount = data.ReadUInt16();
directoryHeader4.FileOffset = data.ReadUInt32();
directoryHeader4.FileCount = data.ReadUInt16();
directoryHeader4.StringTableOffset = data.ReadUInt32();
directoryHeader4.StringTableCount = data.ReadUInt16();
return directoryHeader4;
}
/// <summary>
/// Parse a Stream into an SGA directory header version 5
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA directory header version 5 on success, null on error</returns>
private static DirectoryHeader5 ParseDirectory5Header(Stream data)
{
DirectoryHeader5 directoryHeader5 = new DirectoryHeader5();
directoryHeader5.SectionOffset = data.ReadUInt32();
directoryHeader5.SectionCount = data.ReadUInt32();
directoryHeader5.FolderOffset = data.ReadUInt32();
directoryHeader5.FolderCount = data.ReadUInt32();
directoryHeader5.FileOffset = data.ReadUInt32();
directoryHeader5.FileCount = data.ReadUInt32();
directoryHeader5.StringTableOffset = data.ReadUInt32();
directoryHeader5.StringTableCount = data.ReadUInt32();
return directoryHeader5;
}
/// <summary>
/// Parse a Stream into an SGA directory header version 7
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA directory header version 7 on success, null on error</returns>
private static DirectoryHeader7 ParseDirectory7Header(Stream data)
{
DirectoryHeader7 directoryHeader7 = new DirectoryHeader7();
directoryHeader7.SectionOffset = data.ReadUInt32();
directoryHeader7.SectionCount = data.ReadUInt32();
directoryHeader7.FolderOffset = data.ReadUInt32();
directoryHeader7.FolderCount = data.ReadUInt32();
directoryHeader7.FileOffset = data.ReadUInt32();
directoryHeader7.FileCount = data.ReadUInt32();
directoryHeader7.StringTableOffset = data.ReadUInt32();
directoryHeader7.StringTableCount = data.ReadUInt32();
directoryHeader7.HashTableOffset = data.ReadUInt32();
directoryHeader7.BlockSize = data.ReadUInt32();
return directoryHeader7;
}
/// <summary>
/// Parse a Stream into an SGA section version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA section version 4 on success, null on error</returns>
private static Section4 ParseSection4(Stream data)
{
Section4 section4 = new Section4();
byte[] section4Alias = data.ReadBytes(count: 64);
section4.Alias = Encoding.ASCII.GetString(section4Alias).TrimEnd('\0');
byte[] section4Name = data.ReadBytes(64);
section4.Name = Encoding.ASCII.GetString(section4Name).TrimEnd('\0');
section4.FolderStartIndex = data.ReadUInt16();
section4.FolderEndIndex = data.ReadUInt16();
section4.FileStartIndex = data.ReadUInt16();
section4.FileEndIndex = data.ReadUInt16();
section4.FolderRootIndex = data.ReadUInt16();
return section4;
}
/// <summary>
/// Parse a Stream into an SGA section version 5
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA section version 5 on success, null on error</returns>
private static Section5 ParseSection5(Stream data)
{
Section5 section5 = new Section5();
byte[] section5Alias = data.ReadBytes(count: 64);
section5.Alias = Encoding.ASCII.GetString(section5Alias).TrimEnd('\0');
byte[] section5Name = data.ReadBytes(64);
section5.Name = Encoding.ASCII.GetString(section5Name).TrimEnd('\0');
section5.FolderStartIndex = data.ReadUInt32();
section5.FolderEndIndex = data.ReadUInt32();
section5.FileStartIndex = data.ReadUInt32();
section5.FileEndIndex = data.ReadUInt32();
section5.FolderRootIndex = data.ReadUInt32();
return section5;
}
/// <summary>
/// Parse a Stream into an SGA folder version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA folder version 4 on success, null on error</returns>
private static Folder4 ParseFolder4(Stream data)
{
Folder4 folder4 = new Folder4();
folder4.NameOffset = data.ReadUInt32();
folder4.Name = null; // Read from string table
folder4.FolderStartIndex = data.ReadUInt16();
folder4.FolderEndIndex = data.ReadUInt16();
folder4.FileStartIndex = data.ReadUInt16();
folder4.FileEndIndex = data.ReadUInt16();
return folder4;
}
/// <summary>
/// Parse a Stream into an SGA folder version 5
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA folder version 5 on success, null on error</returns>
private static Folder5 ParseFolder5(Stream data)
{
Folder5 folder5 = new Folder5();
folder5.NameOffset = data.ReadUInt32();
folder5.Name = null; // Read from string table
folder5.FolderStartIndex = data.ReadUInt32();
folder5.FolderEndIndex = data.ReadUInt32();
folder5.FileStartIndex = data.ReadUInt32();
folder5.FileEndIndex = data.ReadUInt32();
return folder5;
}
/// <summary>
/// Parse a Stream into an SGA file version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA file version 4 on success, null on error</returns>
private static File4 ParseFile4(Stream data)
{
File4 file4 = new File4();
file4.NameOffset = data.ReadUInt32();
file4.Name = null; // Read from string table
file4.Offset = data.ReadUInt32();
file4.SizeOnDisk = data.ReadUInt32();
file4.Size = data.ReadUInt32();
file4.TimeModified = data.ReadUInt32();
file4.Dummy0 = data.ReadByteValue();
file4.Type = data.ReadByteValue();
return file4;
}
/// <summary>
/// Parse a Stream into an SGA file version 6
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA file version 6 on success, null on error</returns>
private static File6 ParseFile6(Stream data)
{
File6 file6 = new File6();
file6.NameOffset = data.ReadUInt32();
file6.Name = null; // Read from string table
file6.Offset = data.ReadUInt32();
file6.SizeOnDisk = data.ReadUInt32();
file6.Size = data.ReadUInt32();
file6.TimeModified = data.ReadUInt32();
file6.Dummy0 = data.ReadByteValue();
file6.Type = data.ReadByteValue();
file6.CRC32 = data.ReadUInt32();
return file6;
}
/// <summary>
/// Parse a Stream into an SGA file version 7
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA file version 7 on success, null on error</returns>
private static File7 ParseFile7(Stream data)
{
File7 file7 = new File7();
file7.NameOffset = data.ReadUInt32();
file7.Name = null; // Read from string table
file7.Offset = data.ReadUInt32();
file7.SizeOnDisk = data.ReadUInt32();
file7.Size = data.ReadUInt32();
file7.TimeModified = data.ReadUInt32();
file7.Dummy0 = data.ReadByteValue();
file7.Type = data.ReadByteValue();
file7.CRC32 = data.ReadUInt32();
file7.HashOffset = data.ReadUInt32();
return file7;
}
#endregion
}
}

View File

@@ -1,141 +0,0 @@
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.VBSP;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.VBSP.Constants;
namespace BinaryObjectScanner.Builders
{
public static class VBSP
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life 2 Level
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life 2 Level on success, null on error</returns>
public static Models.VBSP.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life 2 Level
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life 2 Level on success, null on error</returns>
public static Models.VBSP.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life 2 Level to fill
var file = new Models.VBSP.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life 2 Level header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life 2 Level header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.Version = data.ReadInt32();
if ((header.Version < 19 || header.Version > 22) && header.Version != 0x00040014)
return null;
header.Lumps = new Lump[HL_VBSP_LUMP_COUNT];
for (int i = 0; i < HL_VBSP_LUMP_COUNT; i++)
{
header.Lumps[i] = ParseLump(data, header.Version);
}
header.MapRevision = data.ReadInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life 2 Level lump
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="version">VBSP version</param>
/// <returns>Filled Half-Life 2 Level lump on success, null on error</returns>
private static Lump ParseLump(Stream data, int version)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.Length = data.ReadUInt32();
lump.Version = data.ReadUInt32();
lump.FourCC = new char[4];
for (int i = 0; i < 4; i++)
{
lump.FourCC[i] = (char)data.ReadByte();
}
// This block was commented out because test VBSPs with header
// version 21 had the values in the "right" order already and
// were causing decompression issues
//if (version >= 21 && version != 0x00040014)
//{
// uint temp = lump.Version;
// lump.Version = lump.Offset;
// lump.Offset = lump.Length;
// lump.Length = temp;
//}
return lump;
}
#endregion
}
}

View File

@@ -1,318 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.VPK;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.VPK.Constants;
namespace BinaryObjectScanner.Builders
{
public static class VPK
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Valve Package
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Valve Package on success, null on error</returns>
public static Models.VPK.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Valve Package
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package on success, null on error</returns>
public static Models.VPK.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Valve Package to fill
var file = new Models.VPK.File();
#region Header
// Try to parse the header
// The original version had no signature.
var header = ParseHeader(data);
// Set the package header
file.Header = header;
#endregion
#region Extended Header
if (header?.Version == 2)
{
// Try to parse the extended header
var extendedHeader = ParseExtendedHeader(data);
if (extendedHeader == null)
return null;
// Set the package extended header
file.ExtendedHeader = extendedHeader;
}
#endregion
#region Directory Items
// Create the directory items tree
var directoryItems = ParseDirectoryItemTree(data);
// Set the directory items
file.DirectoryItems = directoryItems;
#endregion
#region Archive Hashes
if (header?.Version == 2 && file.ExtendedHeader != null && file.ExtendedHeader.ArchiveHashLength > 0)
{
// Create the archive hashes list
var archiveHashes = new List<ArchiveHash>();
// Cache the current offset
initialOffset = data.Position;
// Try to parse the directory items
while (data.Position < initialOffset + file.ExtendedHeader.ArchiveHashLength)
{
var archiveHash = ParseArchiveHash(data);
archiveHashes.Add(archiveHash);
}
file.ArchiveHashes = archiveHashes.ToArray();
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Valve Package header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.Signature = data.ReadUInt32();
if (header.Signature != SignatureUInt32)
return null;
header.Version = data.ReadUInt32();
if (header.Version > 2)
return null;
header.DirectoryLength = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Valve Package extended header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package extended header on success, null on error</returns>
private static ExtendedHeader ParseExtendedHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ExtendedHeader extendedHeader = new ExtendedHeader();
extendedHeader.Dummy0 = data.ReadUInt32();
extendedHeader.ArchiveHashLength = data.ReadUInt32();
extendedHeader.ExtraLength = data.ReadUInt32();
extendedHeader.Dummy1 = data.ReadUInt32();
return extendedHeader;
}
/// <summary>
/// Parse a Stream into a Valve Package archive hash
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package archive hash on success, null on error</returns>
private static ArchiveHash ParseArchiveHash(Stream data)
{
// TODO: Use marshalling here instead of building
ArchiveHash archiveHash = new ArchiveHash();
archiveHash.ArchiveIndex = data.ReadUInt32();
archiveHash.ArchiveOffset = data.ReadUInt32();
archiveHash.Length = data.ReadUInt32();
archiveHash.Hash = data.ReadBytes(0x10);
return archiveHash;
}
/// <summary>
/// Parse a Stream into a Valve Package directory item tree
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item tree on success, null on error</returns>
private static DirectoryItem[] ParseDirectoryItemTree(Stream data)
{
// Create the directory items list
var directoryItems = new List<DirectoryItem>();
while (true)
{
// Get the extension
string extensionString = data.ReadString(Encoding.ASCII);
if (string.IsNullOrEmpty(extensionString))
break;
// Sanitize the extension
for (int i = 0; i < 0x20; i++)
{
extensionString = extensionString.Replace($"{(char)i}", string.Empty);
}
while (true)
{
// Get the path
string pathString = data.ReadString(Encoding.ASCII);
if (string.IsNullOrEmpty(pathString))
break;
// Sanitize the path
for (int i = 0; i < 0x20; i++)
{
pathString = pathString.Replace($"{(char)i}", string.Empty);
}
while (true)
{
// Get the name
string nameString = data.ReadString(Encoding.ASCII);
if (string.IsNullOrEmpty(nameString))
break;
// Sanitize the name
for (int i = 0; i < 0x20; i++)
{
nameString = nameString.Replace($"{(char)i}", string.Empty);
}
// Get the directory item
var directoryItem = ParseDirectoryItem(data, extensionString, pathString, nameString);
// Add the directory item
directoryItems.Add(directoryItem);
}
}
}
return directoryItems.ToArray();
}
/// <summary>
/// Parse a Stream into a Valve Package directory item
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data, string extension, string path, string name)
{
DirectoryItem directoryItem = new DirectoryItem();
directoryItem.Extension = extension;
directoryItem.Path = path;
directoryItem.Name = name;
// Get the directory entry
var directoryEntry = ParseDirectoryEntry(data);
// Set the directory entry
directoryItem.DirectoryEntry = directoryEntry;
// Get the preload data pointer
long preloadDataPointer = -1; int preloadDataLength = -1;
if (directoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE && directoryEntry.EntryLength > 0)
{
preloadDataPointer = directoryEntry.EntryOffset;
preloadDataLength = (int)directoryEntry.EntryLength;
}
else if (directoryEntry.PreloadBytes > 0)
{
preloadDataPointer = data.Position;
preloadDataLength = directoryEntry.PreloadBytes;
}
// If we had a valid preload data pointer
byte[] preloadData = null;
if (preloadDataPointer >= 0 && preloadDataLength > 0)
{
// Cache the current offset
long initialOffset = data.Position;
// Seek to the preload data offset
data.Seek(preloadDataPointer, SeekOrigin.Begin);
// Read the preload data
preloadData = data.ReadBytes(preloadDataLength);
// Seek back to the original offset
data.Seek(initialOffset, SeekOrigin.Begin);
}
// Set the preload data
directoryItem.PreloadData = preloadData;
return directoryItem;
}
/// <summary>
/// Parse a Stream into a Valve Package directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.CRC = data.ReadUInt32();
directoryEntry.PreloadBytes = data.ReadUInt16();
directoryEntry.ArchiveIndex = data.ReadUInt16();
directoryEntry.EntryOffset = data.ReadUInt32();
directoryEntry.EntryLength = data.ReadUInt32();
directoryEntry.Dummy0 = data.ReadUInt16();
return directoryEntry;
}
#endregion
}
}

View File

@@ -1,266 +0,0 @@
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.WAD;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.WAD.Constants;
namespace BinaryObjectScanner.Builders
{
public static class WAD
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Texture Package
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Texture Package on success, null on error</returns>
public static Models.WAD.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Texture Package
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package on success, null on error</returns>
public static Models.WAD.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life Texture Package to fill
var file = new Models.WAD.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
#region Lumps
// Get the lump offset
uint lumpOffset = header.LumpOffset;
if (lumpOffset < 0 || lumpOffset >= data.Length)
return null;
// Seek to the lump offset
data.Seek(lumpOffset, SeekOrigin.Begin);
// Create the lump array
file.Lumps = new Lump[header.LumpCount];
for (int i = 0; i < header.LumpCount; i++)
{
var lump = ParseLump(data);
file.Lumps[i] = lump;
}
#endregion
#region Lump Infos
// Create the lump info array
file.LumpInfos = new LumpInfo[header.LumpCount];
for (int i = 0; i < header.LumpCount; i++)
{
var lump = file.Lumps[i];
if (lump.Compression != 0)
{
file.LumpInfos[i] = null;
continue;
}
// Get the lump info offset
uint lumpInfoOffset = lump.Offset;
if (lumpInfoOffset < 0 || lumpInfoOffset >= data.Length)
{
file.LumpInfos[i] = null;
continue;
}
// Seek to the lump info offset
data.Seek(lumpInfoOffset, SeekOrigin.Begin);
// Try to parse the lump info -- TODO: Do we ever set the mipmap level?
var lumpInfo = ParseLumpInfo(data, lump.Type);
file.LumpInfos[i] = lumpInfo;
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.LumpCount = data.ReadUInt32();
header.LumpOffset = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package lump
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package lump on success, null on error</returns>
private static Lump ParseLump(Stream data)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.DiskLength = data.ReadUInt32();
lump.Length = data.ReadUInt32();
lump.Type = data.ReadByteValue();
lump.Compression = data.ReadByteValue();
lump.Padding0 = data.ReadByteValue();
lump.Padding1 = data.ReadByteValue();
byte[] name = data.ReadBytes(16);
lump.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
return lump;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package lump info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="type">Lump type</param>
/// <param name="mipmap">Mipmap level</param>
/// <returns>Filled Half-Life Texture Package lump info on success, null on error</returns>
private static LumpInfo ParseLumpInfo(Stream data, byte type, uint mipmap = 0)
{
// TODO: Use marshalling here instead of building
LumpInfo lumpInfo = new LumpInfo();
// Cache the initial offset
long initialOffset = data.Position;
// Type 0x42 has no name, type 0x43 does. Are these flags?
if (type == 0x42)
{
if (mipmap > 0)
return null;
lumpInfo.Width = data.ReadUInt32();
lumpInfo.Height = data.ReadUInt32();
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
lumpInfo.PaletteSize = data.ReadUInt16();
}
else if (type == 0x43)
{
if (mipmap > 3)
return null;
byte[] name = data.ReadBytes(16);
lumpInfo.Name = Encoding.ASCII.GetString(name);
lumpInfo.Width = data.ReadUInt32();
lumpInfo.Height = data.ReadUInt32();
lumpInfo.PixelOffset = data.ReadUInt32();
_ = data.ReadBytes(12); // Unknown data
// Cache the current offset
long currentOffset = data.Position;
// Seek to the pixel data
data.Seek(initialOffset + lumpInfo.PixelOffset, SeekOrigin.Begin);
// Read the pixel data
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
// Seek back to the offset
data.Seek(currentOffset, SeekOrigin.Begin);
uint pixelSize = lumpInfo.Width * lumpInfo.Height;
// Mipmap data -- TODO: How do we determine this during initial parsing?
switch (mipmap)
{
case 1: _ = data.ReadBytes((int)pixelSize); break;
case 2: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4))); break;
case 3: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16))); break;
default: return null;
}
_ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16) + (pixelSize / 64))); // Pixel data
lumpInfo.PaletteSize = data.ReadUInt16();
lumpInfo.PaletteData = data.ReadBytes((int)lumpInfo.PaletteSize * 3);
}
else
{
return null;
}
// Adjust based on mipmap level
switch (mipmap)
{
case 1:
lumpInfo.Width /= 2;
lumpInfo.Height /= 2;
break;
case 2:
lumpInfo.Width /= 4;
lumpInfo.Height /= 4;
break;
case 3:
lumpInfo.Width /= 8;
lumpInfo.Height /= 8;
break;
default:
return null;
}
return lumpInfo;
}
#endregion
}
}

View File

@@ -1,274 +0,0 @@
using System.IO;
using System.Text;
using BinaryObjectScanner.Models.XZP;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.XZP.Constants;
namespace BinaryObjectScanner.Builders
{
public static class XZP
{
#region Byte Data
/// <summary>
/// Parse a byte array into a XBox Package File
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled XBox Package File on success, null on error</returns>
public static Models.XZP.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a XBox Package File
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File on success, null on error</returns>
public static Models.XZP.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new XBox Package File to fill
var file = new Models.XZP.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
#region Directory Entries
// Create the directory entry array
file.DirectoryEntries = new DirectoryEntry[header.DirectoryEntryCount];
// Try to parse the directory entries
for (int i = 0; i < header.DirectoryEntryCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.DirectoryEntries[i] = directoryEntry;
}
#endregion
#region Preload Directory Entries
if (header.PreloadBytes > 0)
{
// Create the preload directory entry array
file.PreloadDirectoryEntries = new DirectoryEntry[header.PreloadDirectoryEntryCount];
// Try to parse the preload directory entries
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.PreloadDirectoryEntries[i] = directoryEntry;
}
}
#endregion
#region Preload Directory Mappings
if (header.PreloadBytes > 0)
{
// Create the preload directory mapping array
file.PreloadDirectoryMappings = new DirectoryMapping[header.PreloadDirectoryEntryCount];
// Try to parse the preload directory mappings
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
{
var directoryMapping = ParseDirectoryMapping(data);
file.PreloadDirectoryMappings[i] = directoryMapping;
}
}
#endregion
#region Directory Items
if (header.DirectoryItemCount > 0)
{
// Get the directory item offset
uint directoryItemOffset = header.DirectoryItemOffset;
if (directoryItemOffset < 0 || directoryItemOffset >= data.Length)
return null;
// Seek to the directory items
data.Seek(directoryItemOffset, SeekOrigin.Begin);
// Create the directory item array
file.DirectoryItems = new DirectoryItem[header.DirectoryItemCount];
// Try to parse the directory items
for (int i = 0; i < header.DirectoryItemCount; i++)
{
var directoryItem = ParseDirectoryItem(data);
file.DirectoryItems[i] = directoryItem;
}
}
#endregion
#region Footer
// Seek to the footer
data.Seek(-8, SeekOrigin.End);
// Try to parse the footer
var footer = ParseFooter(data);
if (footer == null)
return null;
// Set the package footer
file.Footer = footer;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a XBox Package File header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != HeaderSignatureString)
return null;
header.Version = data.ReadUInt32();
if (header.Version != 6)
return null;
header.PreloadDirectoryEntryCount = data.ReadUInt32();
header.DirectoryEntryCount = data.ReadUInt32();
header.PreloadBytes = data.ReadUInt32();
header.HeaderLength = data.ReadUInt32();
header.DirectoryItemCount = data.ReadUInt32();
header.DirectoryItemOffset = data.ReadUInt32();
header.DirectoryItemLength = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a XBox Package File directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.FileNameCRC = data.ReadUInt32();
directoryEntry.EntryLength = data.ReadUInt32();
directoryEntry.EntryOffset = data.ReadUInt32();
return directoryEntry;
}
/// <summary>
/// Parse a Stream into a XBox Package File directory mapping
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory mapping on success, null on error</returns>
private static DirectoryMapping ParseDirectoryMapping(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapping directoryMapping = new DirectoryMapping();
directoryMapping.PreloadDirectoryEntryIndex = data.ReadUInt16();
return directoryMapping;
}
/// <summary>
/// Parse a Stream into a XBox Package File directory item
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryItem directoryItem = new DirectoryItem();
directoryItem.FileNameCRC = data.ReadUInt32();
directoryItem.NameOffset = data.ReadUInt32();
directoryItem.TimeCreated = data.ReadUInt32();
// Cache the current offset
long currentPosition = data.Position;
// Seek to the name offset
data.Seek(directoryItem.NameOffset, SeekOrigin.Begin);
// Read the name
directoryItem.Name = data.ReadString(Encoding.ASCII);
// Seek back to the right position
data.Seek(currentPosition, SeekOrigin.Begin);
return directoryItem;
}
/// <summary>
/// Parse a Stream into a XBox Package File footer
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File footer on success, null on error</returns>
private static Footer ParseFooter(Stream data)
{
// TODO: Use marshalling here instead of building
Footer footer = new Footer();
footer.FileLength = data.ReadUInt32();
byte[] signature = data.ReadBytes(4);
footer.Signature = Encoding.ASCII.GetString(signature);
if (footer.Signature != FooterSignatureString)
return null;
return footer;
}
#endregion
}
}

View File

@@ -1,12 +0,0 @@
namespace BinaryObjectScanner.Compression.ADPCM
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public unsafe struct ADPCM_DATA
{
public uint[] pValues;
public int BitCount;
public int field_8;
public int field_C;
public int field_10;
}
}

View File

@@ -1,131 +0,0 @@
using static BinaryObjectScanner.Compression.ADPCM.Constants;
using static BinaryObjectScanner.Compression.ADPCM.Helper;
namespace BinaryObjectScanner.Compression.ADPCM
{
public unsafe class Compressor
{
/// <summary>
/// Compression routine
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public int CompressADPCM(void* pvOutBuffer, int cbOutBuffer, void* pvInBuffer, int cbInBuffer, int ChannelCount, int CompressionLevel)
{
TADPCMStream os = new TADPCMStream(pvOutBuffer, cbOutBuffer); // The output stream
TADPCMStream @is = new TADPCMStream(pvInBuffer, cbInBuffer); // The input stream
byte BitShift = (byte)(CompressionLevel - 1);
short[] PredictedSamples = new short[MAX_ADPCM_CHANNEL_COUNT];// Predicted samples for each channel
short[] StepIndexes = new short[MAX_ADPCM_CHANNEL_COUNT]; // Step indexes for each channel
short InputSample = 0; // Input sample for the current channel
int TotalStepSize;
int ChannelIndex;
int AbsDifference;
int Difference;
int MaxBitMask;
int StepSize;
// First byte in the output stream contains zero. The second one contains the compression level
os.WriteByteSample(0);
if (!os.WriteByteSample(BitShift))
return 2;
// Set the initial step index for each channel
PredictedSamples[0] = PredictedSamples[1] = 0;
StepIndexes[0] = StepIndexes[1] = INITIAL_ADPCM_STEP_INDEX;
// Next, InitialSample value for each channel follows
for (int i = 0; i < ChannelCount; i++)
{
// Get the initial sample from the input stream
if (!@is.ReadWordSample(ref InputSample))
return os.LengthProcessed(pvOutBuffer);
// Store the initial sample to our sample array
PredictedSamples[i] = InputSample;
// Also store the loaded sample to the output stream
if (!os.WriteWordSample(InputSample))
return os.LengthProcessed(pvOutBuffer);
}
// Get the initial index
ChannelIndex = ChannelCount - 1;
// Now keep reading the input data as long as there is something in the input buffer
while (@is.ReadWordSample(ref InputSample))
{
int EncodedSample = 0;
// If we have two channels, we need to flip the channel index
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
// Get the difference from the previous sample.
// If the difference is negative, set the sign bit to the encoded sample
AbsDifference = InputSample - PredictedSamples[ChannelIndex];
if (AbsDifference < 0)
{
AbsDifference = -AbsDifference;
EncodedSample |= 0x40;
}
// If the difference is too low (higher that difference treshold),
// write a step index modifier marker
StepSize = StepSizeTable[StepIndexes[ChannelIndex]];
if (AbsDifference < (StepSize >> CompressionLevel))
{
if (StepIndexes[ChannelIndex] != 0)
StepIndexes[ChannelIndex]--;
os.WriteByteSample(0x80);
}
else
{
// If the difference is too high, write marker that
// indicates increase in step size
while (AbsDifference > (StepSize << 1))
{
if (StepIndexes[ChannelIndex] >= 0x58)
break;
// Modify the step index
StepIndexes[ChannelIndex] += 8;
if (StepIndexes[ChannelIndex] > 0x58)
StepIndexes[ChannelIndex] = 0x58;
// Write the "modify step index" marker
StepSize = StepSizeTable[StepIndexes[ChannelIndex]];
os.WriteByteSample(0x81);
}
// Get the limit bit value
MaxBitMask = (1 << (BitShift - 1));
MaxBitMask = (MaxBitMask > 0x20) ? 0x20 : MaxBitMask;
Difference = StepSize >> BitShift;
TotalStepSize = 0;
for (int BitVal = 0x01; BitVal <= MaxBitMask; BitVal <<= 1)
{
if ((TotalStepSize + StepSize) <= AbsDifference)
{
TotalStepSize += StepSize;
EncodedSample |= BitVal;
}
StepSize >>= 1;
}
PredictedSamples[ChannelIndex] = (short)UpdatePredictedSample(PredictedSamples[ChannelIndex],
EncodedSample,
Difference + TotalStepSize);
// Write the encoded sample to the output stream
if (!os.WriteByteSample((byte)EncodedSample))
break;
// Calculates the step index to use for the next encode
StepIndexes[ChannelIndex] = GetNextStepIndex(StepIndexes[ChannelIndex], (uint)EncodedSample);
}
}
return os.LengthProcessed(pvOutBuffer);
}
}
}

View File

@@ -1,51 +0,0 @@
namespace BinaryObjectScanner.Compression.ADPCM
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.h"/>
public static class Constants
{
public const int MAX_ADPCM_CHANNEL_COUNT = 2;
public const byte INITIAL_ADPCM_STEP_INDEX = 0x2C;
#region Tables necessary for decompression
public static readonly int[] NextStepTable =
{
-1, 0, -1, 4, -1, 2, -1, 6,
-1, 1, -1, 5, -1, 3, -1, 7,
-1, 1, -1, 5, -1, 3, -1, 7,
-1, 2, -1, 4, -1, 6, -1, 8
};
public static readonly int[] StepSizeTable =
{
7, 8, 9, 10, 11, 12, 13, 14,
16, 17, 19, 21, 23, 25, 28, 31,
34, 37, 41, 45, 50, 55, 60, 66,
73, 80, 88, 97, 107, 118, 130, 143,
157, 173, 190, 209, 230, 253, 279, 307,
337, 371, 408, 449, 494, 544, 598, 658,
724, 796, 876, 963, 1060, 1166, 1282, 1411,
1552, 1707, 1878, 2066, 2272, 2499, 2749, 3024,
3327, 3660, 4026, 4428, 4871, 5358, 5894, 6484,
7132, 7845, 8630, 9493, 10442, 11487, 12635, 13899,
15289, 16818, 18500, 20350, 22385, 24623, 27086, 29794,
32767
};
#endregion
#region ADPCM decompression present in Starcraft I BETA
public static readonly uint[] adpcm_values_2 = { 0x33, 0x66 };
public static readonly uint[] adpcm_values_3 = { 0x3A, 0x3A, 0x50, 0x70 };
public static readonly uint[] adpcm_values_4 = { 0x3A, 0x3A, 0x3A, 0x3A, 0x4D, 0x66, 0x80, 0x9A };
public static readonly uint[] adpcm_values_6 =
{
0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A,
0x46, 0x53, 0x60, 0x6D, 0x7A, 0x86, 0x93, 0xA0, 0xAD, 0xBA, 0xC6, 0xD3, 0xE0, 0xED, 0xFA, 0x106
};
#endregion
}
}

View File

@@ -1,205 +0,0 @@
using static BinaryObjectScanner.Compression.ADPCM.Constants;
using static BinaryObjectScanner.Compression.ADPCM.Helper;
namespace BinaryObjectScanner.Compression.ADPCM
{
public unsafe class Decompressor
{
/// <summary>
/// Decompression routine
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public int DecompressADPCM(void* pvOutBuffer, int cbOutBuffer, void* pvInBuffer, int cbInBuffer, int ChannelCount)
{
TADPCMStream os = new TADPCMStream(pvOutBuffer, cbOutBuffer); // Output stream
TADPCMStream @is = new TADPCMStream(pvInBuffer, cbInBuffer); // Input stream
byte EncodedSample = 0;
byte BitShift = 0;
short[] PredictedSamples = new short[MAX_ADPCM_CHANNEL_COUNT]; // Predicted sample for each channel
short[] StepIndexes = new short[MAX_ADPCM_CHANNEL_COUNT]; // Predicted step index for each channel
int ChannelIndex; // Current channel index
// Initialize the StepIndex for each channel
PredictedSamples[0] = PredictedSamples[1] = 0;
StepIndexes[0] = StepIndexes[1] = INITIAL_ADPCM_STEP_INDEX;
// The first byte is always zero, the second one contains bit shift (compression level - 1)
@is.ReadByteSample(ref BitShift);
@is.ReadByteSample(ref BitShift);
// Next, InitialSample value for each channel follows
for (int i = 0; i < ChannelCount; i++)
{
// Get the initial sample from the input stream
short InitialSample = 0;
// Attempt to read the initial sample
if (!@is.ReadWordSample(ref InitialSample))
return os.LengthProcessed(pvOutBuffer);
// Store the initial sample to our sample array
PredictedSamples[i] = InitialSample;
// Also store the loaded sample to the output stream
if (!os.WriteWordSample(InitialSample))
return os.LengthProcessed(pvOutBuffer);
}
// Get the initial index
ChannelIndex = ChannelCount - 1;
// Keep reading as long as there is something in the input buffer
while (@is.ReadByteSample(ref EncodedSample))
{
// If we have two channels, we need to flip the channel index
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
if (EncodedSample == 0x80)
{
if (StepIndexes[ChannelIndex] != 0)
StepIndexes[ChannelIndex]--;
if (!os.WriteWordSample(PredictedSamples[ChannelIndex]))
return os.LengthProcessed(pvOutBuffer);
}
else if (EncodedSample == 0x81)
{
// Modify the step index
StepIndexes[ChannelIndex] += 8;
if (StepIndexes[ChannelIndex] > 0x58)
StepIndexes[ChannelIndex] = 0x58;
// Next pass, keep going on the same channel
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
}
else
{
int StepIndex = StepIndexes[ChannelIndex];
int StepSize = StepSizeTable[StepIndex];
// Encode one sample
PredictedSamples[ChannelIndex] = (short)DecodeSample(PredictedSamples[ChannelIndex],
EncodedSample,
StepSize,
StepSize >> BitShift);
// Write the decoded sample to the output stream
if (!os.WriteWordSample(PredictedSamples[ChannelIndex]))
break;
// Calculates the step index to use for the next encode
StepIndexes[ChannelIndex] = GetNextStepIndex(StepIndex, EncodedSample);
}
}
// Return total bytes written since beginning of the output buffer
return os.LengthProcessed(pvOutBuffer);
}
/// <summary>
/// ADPCM decompression present in Starcraft I BETA
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public int DecompressADPCM_SC1B(void* pvOutBuffer, int cbOutBuffer, void* pvInBuffer, int cbInBuffer, int ChannelCount)
{
TADPCMStream os = new TADPCMStream(pvOutBuffer, cbOutBuffer); // Output stream
TADPCMStream @is = new TADPCMStream(pvInBuffer, cbInBuffer); // Input stream
ADPCM_DATA AdpcmData = new ADPCM_DATA();
int[] LowBitValues = new int[MAX_ADPCM_CHANNEL_COUNT];
int[] UpperBits = new int[MAX_ADPCM_CHANNEL_COUNT];
int[] BitMasks = new int[MAX_ADPCM_CHANNEL_COUNT];
int[] PredictedSamples = new int[MAX_ADPCM_CHANNEL_COUNT];
int ChannelIndex;
int ChannelIndexMax;
int OutputSample;
byte BitCount = 0;
byte EncodedSample = 0;
short InputValue16 = 0;
int reg_eax;
int Difference;
// The first byte contains number of bits
if (!@is.ReadByteSample(ref BitCount))
return os.LengthProcessed(pvOutBuffer);
if (InitAdpcmData(AdpcmData, BitCount) == null)
return os.LengthProcessed(pvOutBuffer);
//assert(AdpcmData.pValues != NULL);
// Init bit values
for (int i = 0; i < ChannelCount; i++)
{
byte OneByte = 0;
if (!@is.ReadByteSample(ref OneByte))
return os.LengthProcessed(pvOutBuffer);
LowBitValues[i] = OneByte & 0x01;
UpperBits[i] = OneByte >> 1;
}
//
for (int i = 0; i < ChannelCount; i++)
{
if (!@is.ReadWordSample(ref InputValue16))
return os.LengthProcessed(pvOutBuffer);
BitMasks[i] = InputValue16 << AdpcmData.BitCount;
}
// Next, InitialSample value for each channel follows
for (int i = 0; i < ChannelCount; i++)
{
if (!@is.ReadWordSample(ref InputValue16))
return os.LengthProcessed(pvOutBuffer);
PredictedSamples[i] = InputValue16;
os.WriteWordSample(InputValue16);
}
// Get the initial index
ChannelIndexMax = ChannelCount - 1;
ChannelIndex = 0;
// Keep reading as long as there is something in the input buffer
while (@is.ReadByteSample(ref EncodedSample))
{
reg_eax = ((PredictedSamples[ChannelIndex] * 3) << 3) - PredictedSamples[ChannelIndex];
PredictedSamples[ChannelIndex] = ((reg_eax * 10) + 0x80) >> 8;
Difference = (((EncodedSample >> 1) + 1) * BitMasks[ChannelIndex] + AdpcmData.field_10) >> AdpcmData.BitCount;
PredictedSamples[ChannelIndex] = UpdatePredictedSample(PredictedSamples[ChannelIndex], EncodedSample, Difference, 0x01);
BitMasks[ChannelIndex] = (int)((AdpcmData.pValues[EncodedSample >> 1] * BitMasks[ChannelIndex] + 0x80) >> 6);
if (BitMasks[ChannelIndex] < AdpcmData.field_8)
BitMasks[ChannelIndex] = AdpcmData.field_8;
if (BitMasks[ChannelIndex] > AdpcmData.field_C)
BitMasks[ChannelIndex] = AdpcmData.field_C;
reg_eax = (cbInBuffer - @is.LengthProcessed(pvInBuffer)) >> ChannelIndexMax;
OutputSample = PredictedSamples[ChannelIndex];
if (reg_eax < UpperBits[ChannelIndex])
{
if (LowBitValues[ChannelIndex] != 0)
{
OutputSample += (UpperBits[ChannelIndex] - reg_eax);
if (OutputSample > 32767)
OutputSample = 32767;
}
else
{
OutputSample += (reg_eax - UpperBits[ChannelIndex]);
if (OutputSample < -32768)
OutputSample = -32768;
}
}
// Write the word sample and swap channel
os.WriteWordSample((short)(OutputSample));
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
}
return os.LengthProcessed(pvOutBuffer);
}
}
}

View File

@@ -1,104 +0,0 @@
using static BinaryObjectScanner.Compression.ADPCM.Constants;
namespace BinaryObjectScanner.Compression.ADPCM
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
internal static unsafe class Helper
{
#region Local functions
public static short GetNextStepIndex(int StepIndex, uint EncodedSample)
{
// Get the next step index
StepIndex = StepIndex + NextStepTable[EncodedSample & 0x1F];
// Don't make the step index overflow
if (StepIndex < 0)
StepIndex = 0;
else if (StepIndex > 88)
StepIndex = 88;
return (short)StepIndex;
}
public static int UpdatePredictedSample(int PredictedSample, int EncodedSample, int Difference, int BitMask = 0x40)
{
// Is the sign bit set?
if ((EncodedSample & BitMask) != 0)
{
PredictedSample -= Difference;
if (PredictedSample <= -32768)
PredictedSample = -32768;
}
else
{
PredictedSample += Difference;
if (PredictedSample >= 32767)
PredictedSample = 32767;
}
return PredictedSample;
}
public static int DecodeSample(int PredictedSample, int EncodedSample, int StepSize, int Difference)
{
if ((EncodedSample & 0x01) != 0)
Difference += (StepSize >> 0);
if ((EncodedSample & 0x02) != 0)
Difference += (StepSize >> 1);
if ((EncodedSample & 0x04) != 0)
Difference += (StepSize >> 2);
if ((EncodedSample & 0x08) != 0)
Difference += (StepSize >> 3);
if ((EncodedSample & 0x10) != 0)
Difference += (StepSize >> 4);
if ((EncodedSample & 0x20) != 0)
Difference += (StepSize >> 5);
return UpdatePredictedSample(PredictedSample, EncodedSample, Difference);
}
#endregion
#region ADPCM decompression present in Starcraft I BETA
public static uint[] InitAdpcmData(ADPCM_DATA pData, byte BitCount)
{
switch (BitCount)
{
case 2:
pData.pValues = adpcm_values_2;
break;
case 3:
pData.pValues = adpcm_values_3;
break;
case 4:
pData.pValues = adpcm_values_4;
break;
default:
pData.pValues = null;
break;
case 6:
pData.pValues = adpcm_values_6;
break;
}
pData.BitCount = BitCount;
pData.field_C = 0x20000;
pData.field_8 = 1 << BitCount;
pData.field_10 = (1 << BitCount) / 2;
return pData.pValues;
}
#endregion
}
}

View File

@@ -1,67 +0,0 @@
namespace BinaryObjectScanner.Compression.ADPCM
{
/// <summary>
/// Helper class for writing output ADPCM data
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public unsafe class TADPCMStream
{
private byte* pbBufferEnd;
private byte* pbBuffer;
public TADPCMStream(void* pvBuffer, int cbBuffer)
{
pbBufferEnd = (byte*)pvBuffer + cbBuffer;
pbBuffer = (byte*)pvBuffer;
}
public bool ReadByteSample(ref byte ByteSample)
{
// Check if there is enough space in the buffer
if (pbBuffer >= pbBufferEnd)
return false;
ByteSample = *pbBuffer++;
return true;
}
public bool WriteByteSample(byte ByteSample)
{
// Check if there is enough space in the buffer
if (pbBuffer >= pbBufferEnd)
return false;
*pbBuffer++ = ByteSample;
return true;
}
public bool ReadWordSample(ref short OneSample)
{
// Check if we have enough space in the output buffer
if ((int)(pbBufferEnd - pbBuffer) < sizeof(short))
return false;
// Write the sample
OneSample = (short)(pbBuffer[0] + ((pbBuffer[1]) << 0x08));
pbBuffer += sizeof(short);
return true;
}
public bool WriteWordSample(short OneSample)
{
// Check if we have enough space in the output buffer
if ((int)(pbBufferEnd - pbBuffer) < sizeof(short))
return false;
// Write the sample
*pbBuffer++ = (byte)(OneSample & 0xFF);
*pbBuffer++ = (byte)(OneSample >> 0x08);
return true;
}
public int LengthProcessed(void* pvOutBuffer)
{
return (int)((byte*)pbBuffer - (byte*)pvOutBuffer);
}
}
}

View File

@@ -1,43 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BinaryObjectScanner.Compression</Title>
<AssemblyName>BinaryObjectScanner.Compression</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.8</Version>
<AssemblyVersion>2.8</AssemblyVersion>
<FileVersion>2.8</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<!-- These are needed for dealing with submodules -->
<PropertyGroup>
<DefaultItemExcludes>
$(DefaultItemExcludes);
**\AssemblyInfo.cs;
External\stormlibsharp\lib\**;
External\stormlibsharp\TestConsole\**
</DefaultItemExcludes>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="SharpCompress" Version="0.32.2" />
<PackageReference Include="SharpZipLib" Version="1.4.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\BinaryObjectScanner.Models\BinaryObjectScanner.Models.csproj" />
<ProjectReference Include="..\BinaryObjectScanner.Utilities\BinaryObjectScanner.Utilities.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,525 +0,0 @@
using System.IO;
using System.Linq;
using System.Text;
using BinaryObjectScanner.Models.Compression.LZ;
using BinaryObjectScanner.Utilities;
using static BinaryObjectScanner.Models.Compression.LZ.Constants;
namespace BinaryObjectScanner.Compression
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/kernel32/lzexpand.c"/>
public class LZ
{
#region Constructors
/// <summary>
/// Constructor
/// </summary>
public LZ() { }
#endregion
#region Static Methods
/// <summary>
/// Decompress LZ-compressed data
/// </summary>
/// <param name="compressed">Byte array representing the compressed data</param>
/// <returns>Decompressed data as a byte array, null on error</returns>
public static byte[] Decompress(byte[] compressed)
{
// If we have and invalid input
if (compressed == null || compressed.Length == 0)
return null;
// Create a memory stream for the input and decompress that
var compressedStream = new MemoryStream(compressed);
return Decompress(compressedStream);
}
/// <summary>
/// Decompress LZ-compressed data
/// </summary>
/// <param name="compressed">Stream representing the compressed data</param>
/// <returns>Decompressed data as a byte array, null on error</returns>
public static byte[] Decompress(Stream compressed)
{
// If we have and invalid input
if (compressed == null || compressed.Length == 0)
return null;
// Create a new LZ for decompression
var lz = new LZ();
// Open the input data
var sourceState = lz.Open(compressed, out _);
if (sourceState?.Window == null)
return null;
// Create the output data and open it
var decompressedStream = new MemoryStream();
var destState = lz.Open(decompressedStream, out _);
if (destState == null)
return null;
// Decompress the data by copying
long read = lz.CopyTo(sourceState, destState, out LZERROR error);
// Copy the data to the buffer
byte[] decompressed;
if (read == 0 || (error != LZERROR.LZERROR_OK && error != LZERROR.LZERROR_NOT_LZ))
{
decompressed = null;
}
else
{
int dataEnd = (int)decompressedStream.Position;
decompressedStream.Seek(0, SeekOrigin.Begin);
decompressed = decompressedStream.ReadBytes(dataEnd);
}
// Close the streams
lz.Close(sourceState);
lz.Close(destState);
return decompressed;
}
/// <summary>
/// Reconstructs the full filename of the compressed file
/// </summary>
public static string GetExpandedName(string input, out LZERROR error)
{
// Try to open the file as a compressed stream
var fileStream = File.Open(input, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
var state = new LZ().Open(fileStream, out error);
if (state?.Window == null)
return null;
// Get the extension for modification
string inputExtension = Path.GetExtension(input).TrimStart('.');
// If we have no extension
if (string.IsNullOrWhiteSpace(inputExtension))
return Path.GetFileNameWithoutExtension(input);
// If we have an extension of length 1
if (inputExtension.Length == 1)
{
if (inputExtension == "_")
return $"{Path.GetFileNameWithoutExtension(input)}.{char.ToLower(state.LastChar)}";
else
return Path.GetFileNameWithoutExtension(input);
}
// If we have an extension that doesn't end in an underscore
if (!inputExtension.EndsWith("_"))
return Path.GetFileNameWithoutExtension(input);
// Build the new filename
bool isLowerCase = char.IsUpper(input[0]);
char replacementChar = isLowerCase ? char.ToLower(state.LastChar) : char.ToUpper(state.LastChar);
string outputExtension = inputExtension.Substring(0, inputExtension.Length - 1) + replacementChar;
return $"{Path.GetFileNameWithoutExtension(input)}.{outputExtension}";
}
#endregion
#region State Management
/// <summary>
/// Opens a stream and creates a state from it
/// </summary>
/// <param name="stream">Source stream to create a state from</stream>
/// <param name="error">Output representing the last error</param>
/// <returns>An initialized State, null on error</returns>
/// <remarks>Uncompressed streams are represented by a State with no buffer</remarks>
public State Open(Stream stream, out LZERROR error)
{
State lzs = Init(stream, out error);
if (error == LZERROR.LZERROR_OK || error == LZERROR.LZERROR_NOT_LZ)
return lzs;
return null;
}
/// <summary>
/// Closes a state by invalidating the source
/// </summary>
/// <param name="stream">State object to close</stream>
public void Close(State state)
{
try
{
state?.Source?.Close();
}
catch { }
}
/// <summary>
/// Initializes internal decompression buffers
/// </summary>
/// <param name="source">Input stream to create a state from</param>
/// <param name="error">Output representing the last error</param>
/// <returns>An initialized State, null on error</returns>
/// <remarks>Uncompressed streams are represented by a State with no buffer</remarks>
public State Init(Stream source, out LZERROR error)
{
// If we have an invalid source
if (source == null)
{
error = LZERROR.LZERROR_BADVALUE;
return null;
}
// Attempt to read the header
var fileHeader = ParseFileHeader(source, out error);
// If we had a valid but uncompressed stream
if (error == LZERROR.LZERROR_NOT_LZ)
{
source.Seek(0, SeekOrigin.Begin);
return new State { Source = source };
}
// If we had any error
else if (error != LZERROR.LZERROR_OK)
{
source.Seek(0, SeekOrigin.Begin);
return null;
}
// Initialize the table with all spaces
byte[] table = Enumerable.Repeat((byte)' ', LZ_TABLE_SIZE).ToArray();
// Build the state
var state = new State
{
Source = source,
LastChar = fileHeader.LastChar,
RealLength = fileHeader.RealLength,
Window = new byte[GETLEN],
WindowLength = 0,
WindowCurrent = 0,
Table = table,
CurrentTableEntry = 0xff0,
};
// Return the state
return state;
}
#endregion
#region Stream Functionality
/// <summary>
/// Attempt to read the specified number of bytes from the State
/// </summary>
/// <param name="source">Source State to read from</param>
/// <param name="buffer">Byte buffer to read into</param>
/// <param name="offset">Offset within the buffer to read</param>
/// <param name="count">Number of bytes to read</param>
/// <param name="error">Output representing the last error</param>
/// <returns>The number of bytes read, if possible</returns>
/// <remarks>
/// If the source data is compressed, this will decompress the data.
/// If the source data is uncompressed, it is copied directly
/// </remarks>
public int Read(State source, byte[] buffer, int offset, int count, out LZERROR error)
{
// If we have an uncompressed input
if (source.Window == null)
{
error = LZERROR.LZERROR_NOT_LZ;
return source.Source.Read(buffer, offset, count);
}
// If seeking has occurred, we need to perform the seek
if (source.RealCurrent != source.RealWanted)
{
// If the requested position is before the current, we need to reset
if (source.RealCurrent > source.RealWanted)
{
// Reset the decompressor state
source.Source.Seek(LZ_HEADER_LEN, SeekOrigin.Begin);
FlushWindow(source);
source.RealCurrent = 0;
source.ByteType = 0;
source.StringLength = 0;
source.Table = Enumerable.Repeat((byte)' ', LZ_TABLE_SIZE).ToArray();
source.CurrentTableEntry = 0xFF0;
}
// While we are not at the right offset
while (source.RealCurrent < source.RealWanted)
{
_ = DecompressByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
}
}
int bytesRemaining = count;
while (bytesRemaining > 0)
{
byte b = DecompressByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return count - bytesRemaining;
source.RealWanted++;
buffer[offset++] = b;
bytesRemaining--;
}
error = LZERROR.LZERROR_OK;
return count;
}
/// <summary>
/// Perform a seek on the source data
/// </summary>
/// <param name="state">State to seek within</param>
/// <param name="offset">Data offset to seek to</state>
/// <param name="seekOrigin">SeekOrigin representing how to seek</state>
/// <param name="error">Output representing the last error</param>
/// <returns>The position that was seeked to, -1 on error</returns>
public long Seek(State state, long offset, SeekOrigin seekOrigin, out LZERROR error)
{
// If we have an invalid state
if (state == null)
{
error = LZERROR.LZERROR_BADVALUE;
return -1;
}
// If we have an uncompressed input
if (state.Window == null)
{
error = LZERROR.LZERROR_NOT_LZ;
return state.Source.Seek(offset, seekOrigin);
}
// Otherwise, generate the new offset
long newWanted = state.RealWanted;
switch (seekOrigin)
{
case SeekOrigin.Current:
newWanted += offset;
break;
case SeekOrigin.End:
newWanted = state.RealLength - offset;
break;
default:
newWanted = offset;
break;
}
// If we have an invalid new offset
if (newWanted < 0 && newWanted > state.RealLength)
{
error = LZERROR.LZERROR_BADVALUE;
return -1;
}
error = LZERROR.LZERROR_OK;
state.RealWanted = (uint)newWanted;
return newWanted;
}
/// <summary>
/// Copies all data from the source to the destination
/// </summary>
/// <param name="source">Source State to read from</param>
/// <param name="dest">Destination state to write to</param>
/// <param name="error">Output representing the last error</param>
/// <returns>The number of bytes written, -1 on error</returns>
/// <remarks>
/// If the source data is compressed, this will decompress the data.
/// If the source data is uncompressed, it is copied directly
/// </remarks>
public long CopyTo(State source, State dest, out LZERROR error)
{
error = LZERROR.LZERROR_OK;
// If we have an uncompressed input
if (source.Window == null)
{
source.Source.CopyTo(dest.Source);
return source.Source.Length;
}
// Loop until we have read everything
long length = 0;
while (true)
{
// Read at most 1000 bytes
byte[] buf = new byte[1000];
int read = Read(source, buf, 0, buf.Length, out error);
// If we had an error
if (read == 0)
{
if (error == LZERROR.LZERROR_NOT_LZ)
{
error = LZERROR.LZERROR_OK;
break;
}
else if (error != LZERROR.LZERROR_OK)
{
error = LZERROR.LZERROR_READ;
return 0;
}
}
// Otherwise, append the length read and write the data
length += read;
dest.Source.Write(buf, 0, read);
}
return length;
}
/// <summary>
/// Decompress a single byte of data from the source State
/// </summary>
/// <param name="source">Source State to read from</param>
/// <param name="error">Output representing the last error</param>
/// <returns>The read byte, if possible</returns>
private byte DecompressByte(State source, out LZERROR error)
{
byte b;
if (source.StringLength != 0)
{
b = source.Table[source.StringPosition];
source.StringPosition = (source.StringPosition + 1) & 0xFFF;
source.StringLength--;
}
else
{
if ((source.ByteType & 0x100) == 0)
{
b = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
source.ByteType = (ushort)(b | 0xFF00);
}
if ((source.ByteType & 1) != 0)
{
b = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
}
else
{
byte b1 = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
byte b2 = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
// Format:
// b1 b2
// AB CD
// where CAB is the stringoffset in the table
// and D+3 is the len of the string
source.StringPosition = (uint)(b1 | ((b2 & 0xf0) << 4));
source.StringLength = (byte)((b2 & 0xf) + 2);
// 3, but we use a byte already below...
b = source.Table[source.StringPosition];
source.StringPosition = (source.StringPosition + 1) & 0xFFF;
}
source.ByteType >>= 1;
}
// Store b in table
source.Table[source.CurrentTableEntry++] = b;
source.CurrentTableEntry &= 0xFFF;
source.RealCurrent++;
error = LZERROR.LZERROR_OK;
return b;
}
/// <summary>
/// Reads one compressed byte, including buffering
/// </summary>
/// <param name="state">State to read using</param>
/// <param name="error">Output representing the last error</param>
/// <returns>Byte value that was read, if possible</returns>
private byte ReadByte(State state, out LZERROR error)
{
// If we have enough data in the buffer
if (state.WindowCurrent < state.WindowLength)
{
error = LZERROR.LZERROR_OK;
return state.Window[state.WindowCurrent++];
}
// Otherwise, read from the source
int ret = state.Source.Read(state.Window, 0, GETLEN);
if (ret == 0)
{
error = LZERROR.LZERROR_NOT_LZ;
return 0;
}
// Reset the window state
state.WindowLength = (uint)ret;
state.WindowCurrent = 1;
error = LZERROR.LZERROR_OK;
return state.Window[0];
}
/// <summary>
/// Reset the current window position to the length
/// </summary>
/// <param name="state">State to flush</param>
private void FlushWindow(State state)
{
state.WindowCurrent = state.WindowLength;
}
/// <summary>
/// Parse a Stream into a file header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="error">Output representing the last error</param>
/// <returns>Filled file header on success, null on error</returns>
private FileHeaader ParseFileHeader(Stream data, out LZERROR error)
{
error = LZERROR.LZERROR_OK;
FileHeaader fileHeader = new FileHeaader();
byte[] magic = data.ReadBytes(LZ_MAGIC_LEN);
fileHeader.Magic = Encoding.ASCII.GetString(magic);
if (fileHeader.Magic != MagicString)
{
error = LZERROR.LZERROR_NOT_LZ;
return null;
}
fileHeader.CompressionType = data.ReadByteValue();
if (fileHeader.CompressionType != (byte)'A')
{
error = LZERROR.LZERROR_UNKNOWNALG;
return null;
}
fileHeader.LastChar = (char)data.ReadByteValue();
fileHeader.RealLength = data.ReadUInt32();
return fileHeader;
}
#endregion
}
}

View File

@@ -1,12 +0,0 @@
namespace BinaryObjectScanner.Compression.LZX
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public class Bits
{
public uint BitBuffer;
public int BitsLeft;
public int InputPosition; //byte*
}
}

View File

@@ -1,759 +0,0 @@
using System;
using BinaryObjectScanner.Compression.LZX;
using static BinaryObjectScanner.Models.Compression.LZX.Constants;
using static BinaryObjectScanner.Models.MicrosoftCabinet.Constants;
namespace BinaryObjectScanner.Compression.LZX
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/fdi.c"/>
public class Decompressor
{
/// <summary>
/// Initialize an LZX decompressor state
/// </summary>
public static bool Init(int window, State state)
{
uint wndsize = (uint)(1 << window);
int posn_slots;
/* LZX supports window sizes of 2^15 (32Kb) through 2^21 (2Mb) */
/* if a previously allocated window is big enough, keep it */
if (window < 15 || window > 21)
return false;
if (state.actual_size < wndsize)
state.window = null;
if (state.window == null)
{
state.window = new byte[wndsize];
state.actual_size = wndsize;
}
state.window_size = wndsize;
/* calculate required position slots */
if (window == 20) posn_slots = 42;
else if (window == 21) posn_slots = 50;
else posn_slots = window << 1;
/*posn_slots=i=0; while (i < wndsize) i += 1 << CAB(extra_bits)[posn_slots++]; */
state.R0 = state.R1 = state.R2 = 1;
state.main_elements = (ushort)(LZX_NUM_CHARS + (posn_slots << 3));
state.header_read = 0;
state.frames_read = 0;
state.block_remaining = 0;
state.block_type = LZX_BLOCKTYPE_INVALID;
state.intel_curpos = 0;
state.intel_started = 0;
state.window_posn = 0;
/* initialize tables to 0 (because deltas will be applied to them) */
// memset(state.MAINTREE_len, 0, sizeof(state.MAINTREE_len));
// memset(state.LENGTH_len, 0, sizeof(state.LENGTH_len));
return true;
}
/// <summary>
/// Decompress a byte array using a given State
/// </summary>
public static bool Decompress(State state, int inlen, byte[] inbuf, int outlen, byte[] outbuf)
{
int inpos = 0; // inbuf[0];
int endinp = inpos + inlen;
int window = 0; // state.window[0];
int runsrc, rundest; // byte*
uint window_posn = state.window_posn;
uint window_size = state.window_size;
uint R0 = state.R0;
uint R1 = state.R1;
uint R2 = state.R2;
uint match_offset, i, j, k; /* ijk used in READ_HUFFSYM macro */
Bits lb = new Bits(); /* used in READ_LENGTHS macro */
int togo = outlen, this_run, main_element, aligned_bits;
int match_length, copy_length, length_footer, extra, verbatim_bits;
INIT_BITSTREAM(out int bitsleft, out uint bitbuf);
/* read header if necessary */
if (state.header_read == 0)
{
i = j = 0;
k = READ_BITS(1, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (k != 0)
{
i = READ_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
j = READ_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
}
state.intel_filesize = (int)((i << 16) | j); /* or 0 if not encoded */
state.header_read = 1;
}
/* main decoding loop */
while (togo > 0)
{
/* last block finished, new block expected */
if (state.block_remaining == 0)
{
if (state.block_type == LZX_BLOCKTYPE_UNCOMPRESSED)
{
if ((state.block_length & 1) != 0)
inpos++; /* realign bitstream to word */
INIT_BITSTREAM(out bitsleft, out bitbuf);
}
state.block_type = (ushort)READ_BITS(3, inbuf, ref inpos, ref bitsleft, ref bitbuf);
i = READ_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
j = READ_BITS(8, inbuf, ref inpos, ref bitsleft, ref bitbuf);
state.block_remaining = state.block_length = (i << 8) | j;
switch (state.block_type)
{
case LZX_BLOCKTYPE_ALIGNED:
for (i = 0; i < 8; i++)
{
j = READ_BITS(3, inbuf, ref inpos, ref bitsleft, ref bitbuf);
state.tblALIGNED_len[i] = (byte)j;
}
make_decode_table(LZX_ALIGNED_MAXSYMBOLS, LZX_ALIGNED_TABLEBITS, state.tblALIGNED_len, state.tblALIGNED_table);
/* rest of aligned header is same as verbatim */
goto case LZX_BLOCKTYPE_VERBATIM;
case LZX_BLOCKTYPE_VERBATIM:
READ_LENGTHS(state.tblMAINTREE_len, 0, 256, lb, state, inbuf, ref inpos, ref bitsleft, ref bitbuf);
READ_LENGTHS(state.tblMAINTREE_len, 256, state.main_elements, lb, state, inbuf, ref inpos, ref bitsleft, ref bitbuf);
make_decode_table(LZX_MAINTREE_MAXSYMBOLS, LZX_MAINTREE_TABLEBITS, state.tblMAINTREE_len, state.tblMAINTREE_table);
if (state.tblMAINTREE_len[0xE8] != 0)
state.intel_started = 1;
READ_LENGTHS(state.tblLENGTH_len, 0, LZX_NUM_SECONDARY_LENGTHS, lb, state, inbuf, ref inpos, ref bitsleft, ref bitbuf);
make_decode_table(LZX_LENGTH_MAXSYMBOLS, LZX_LENGTH_TABLEBITS, state.tblLENGTH_len, state.tblLENGTH_table);
break;
case LZX_BLOCKTYPE_UNCOMPRESSED:
state.intel_started = 1; /* because we can't assume otherwise */
ENSURE_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf); /* get up to 16 pad bits into the buffer */
/* and align the bitstream! */
if (bitsleft > 16)
inpos -= 2;
R0 = (uint)(inbuf[inpos + 0] | (inbuf[inpos + 1] << 8) | (inbuf[inpos + 2] << 16) | (inbuf[inpos + 3] << 24)); inpos += 4;
R1 = (uint)(inbuf[inpos + 0] | (inbuf[inpos + 1] << 8) | (inbuf[inpos + 2] << 16) | (inbuf[inpos + 3] << 24)); inpos += 4;
R2 = (uint)(inbuf[inpos + 0] | (inbuf[inpos + 1] << 8) | (inbuf[inpos + 2] << 16) | (inbuf[inpos + 3] << 24)); inpos += 4;
break;
default:
return false;
}
}
/* buffer exhaustion check */
if (inpos > endinp)
{
/* it's possible to have a file where the next run is less than
* 16 bits in size. In this case, the READ_HUFFSYM() macro used
* in building the tables will exhaust the buffer, so we should
* allow for this, but not allow those accidentally read bits to
* be used (so we check that there are at least 16 bits
* remaining - in this boundary case they aren't really part of
* the compressed data)
*/
if (inpos > (endinp + 2) || bitsleft < 16)
return false;
}
while ((this_run = (int)state.block_remaining) > 0 && togo > 0)
{
if (this_run > togo) this_run = togo;
togo -= this_run;
state.block_remaining -= (uint)this_run;
/* apply 2^x-1 mask */
window_posn &= window_size - 1;
/* runs can't straddle the window wraparound */
if ((window_posn + this_run) > window_size)
return false;
switch (state.block_type)
{
case LZX_BLOCKTYPE_VERBATIM:
while (this_run > 0)
{
main_element = READ_HUFFSYM(state.tblMAINTREE_table, state.tblMAINTREE_len, LZX_MAINTREE_TABLEBITS, LZX_MAINTREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (main_element < LZX_NUM_CHARS)
{
/* literal: 0 to LZX_NUM_CHARS-1 */
state.window[window + window_posn++] = (byte)main_element;
this_run--;
}
else
{
/* match: LZX_NUM_CHARS + ((slot<<3) | length_header (3 bits)) */
main_element -= LZX_NUM_CHARS;
match_length = main_element & LZX_NUM_PRIMARY_LENGTHS;
if (match_length == LZX_NUM_PRIMARY_LENGTHS)
{
length_footer = READ_HUFFSYM(state.tblLENGTH_table, state.tblLENGTH_len, LZX_LENGTH_TABLEBITS, LZX_LENGTH_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_length += length_footer;
}
match_length += LZX_MIN_MATCH;
match_offset = (uint)(main_element >> 3);
if (match_offset > 2)
{
/* not repeated offset */
if (match_offset != 3)
{
extra = state.ExtraBits[match_offset];
verbatim_bits = (int)READ_BITS(extra, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset = (uint)(state.PositionSlotBases[match_offset] - 2 + verbatim_bits);
}
else
{
match_offset = 1;
}
/* update repeated offset LRU queue */
R2 = R1; R1 = R0; R0 = match_offset;
}
else if (match_offset == 0)
{
match_offset = R0;
}
else if (match_offset == 1)
{
match_offset = R1;
R1 = R0; R0 = match_offset;
}
else /* match_offset == 2 */
{
match_offset = R2;
R2 = R0; R0 = match_offset;
}
rundest = (int)(window + window_posn);
this_run -= match_length;
/* copy any wrapped around source data */
if (window_posn >= match_offset)
{
/* no wrap */
runsrc = (int)(rundest - match_offset);
}
else
{
runsrc = (int)(rundest + (window_size - match_offset));
copy_length = (int)(match_offset - window_posn);
if (copy_length < match_length)
{
match_length -= copy_length;
window_posn += (uint)copy_length;
while (copy_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
runsrc = window;
}
}
window_posn += (uint)match_length;
/* copy match data - no worries about destination wraps */
while (match_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
}
}
break;
case LZX_BLOCKTYPE_ALIGNED:
while (this_run > 0)
{
main_element = READ_HUFFSYM(state.tblMAINTREE_table, state.tblMAINTREE_len, LZX_MAINTREE_TABLEBITS, LZX_MAINTREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (main_element < LZX_NUM_CHARS)
{
/* literal: 0 to LZX_NUM_CHARS-1 */
state.window[window + window_posn++] = (byte)main_element;
this_run--;
}
else
{
/* mverbatim_bitsatch: LZX_NUM_CHARS + ((slot<<3) | length_header (3 bits)) */
main_element -= LZX_NUM_CHARS;
match_length = main_element & LZX_NUM_PRIMARY_LENGTHS;
if (match_length == LZX_NUM_PRIMARY_LENGTHS)
{
length_footer = READ_HUFFSYM(state.tblLENGTH_table, state.tblLENGTH_len, LZX_LENGTH_TABLEBITS, LZX_LENGTH_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_length += length_footer;
}
match_length += LZX_MIN_MATCH;
match_offset = (uint)(main_element >> 3);
if (match_offset > 2)
{
/* not repeated offset */
extra = state.ExtraBits[match_offset];
match_offset = state.PositionSlotBases[match_offset] - 2;
if (extra > 3)
{
/* verbatim and aligned bits */
extra -= 3;
verbatim_bits = (int)READ_BITS(extra, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)(verbatim_bits << 3);
aligned_bits = READ_HUFFSYM(state.tblALIGNED_table, state.tblALIGNED_len, LZX_ALIGNED_TABLEBITS, LZX_ALIGNED_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)aligned_bits;
}
else if (extra == 3)
{
/* aligned bits only */
aligned_bits = READ_HUFFSYM(state.tblALIGNED_table, state.tblALIGNED_len, LZX_ALIGNED_TABLEBITS, LZX_ALIGNED_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)aligned_bits;
}
else if (extra > 0)
{
/* extra==1, extra==2 */
/* verbatim bits only */
verbatim_bits = (int)READ_BITS(extra, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)verbatim_bits;
}
else /* extra == 0 */
{
/* ??? */
match_offset = 1;
}
/* update repeated offset LRU queue */
R2 = R1; R1 = R0; R0 = match_offset;
}
else if (match_offset == 0)
{
match_offset = R0;
}
else if (match_offset == 1)
{
match_offset = R1;
R1 = R0; R0 = match_offset;
}
else /* match_offset == 2 */
{
match_offset = R2;
R2 = R0; R0 = match_offset;
}
rundest = (int)(window + window_posn);
this_run -= match_length;
/* copy any wrapped around source data */
if (window_posn >= match_offset)
{
/* no wrap */
runsrc = (int)(rundest - match_offset);
}
else
{
runsrc = (int)(rundest + (window_size - match_offset));
copy_length = (int)(match_offset - window_posn);
if (copy_length < match_length)
{
match_length -= copy_length;
window_posn += (uint)copy_length;
while (copy_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
runsrc = window;
}
}
window_posn += (uint)match_length;
/* copy match data - no worries about destination wraps */
while (match_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
}
}
break;
case LZX_BLOCKTYPE_UNCOMPRESSED:
if ((inpos + this_run) > endinp)
return false;
Array.Copy(inbuf, inpos, state.window, window + window_posn, this_run);
inpos += this_run;
window_posn += (uint)this_run;
break;
default:
return false; /* might as well */
}
}
}
if (togo != 0)
return false;
Array.Copy(state.window, window + ((window_posn == 0) ? window_size : window_posn) - outlen, outbuf, 0, outlen);
state.window_posn = window_posn;
state.R0 = R0;
state.R1 = R1;
state.R2 = R2;
/* intel E8 decoding */
if ((state.frames_read++ < 32768) && state.intel_filesize != 0)
{
if (outlen <= 6 || state.intel_started == 0)
{
state.intel_curpos += outlen;
}
else
{
int data = 0; // outbuf[0];
int dataend = data + outlen - 10;
int curpos = state.intel_curpos;
int filesize = state.intel_filesize;
int abs_off, rel_off;
state.intel_curpos = curpos + outlen;
while (data < dataend)
{
if (outbuf[data++] != 0xE8)
{
curpos++;
continue;
}
abs_off = outbuf[data + 0] | (outbuf[data + 1] << 8) | (outbuf[data + 2] << 16) | (outbuf[data + 3] << 24);
if ((abs_off >= -curpos) && (abs_off < filesize))
{
rel_off = (abs_off >= 0) ? abs_off - curpos : abs_off + filesize;
outbuf[data + 0] = (byte)rel_off;
outbuf[data + 1] = (byte)(rel_off >> 8);
outbuf[data + 2] = (byte)(rel_off >> 16);
outbuf[data + 3] = (byte)(rel_off >> 24);
}
data += 4;
curpos += 5;
}
}
}
return true;
}
/// <summary>
/// Read and build the Huffman tree from the lengths
/// </summary>
private static int ReadLengths(byte[] lengths, uint first, uint last, Bits lb, State state, byte[] inbuf)
{
uint x, y;
uint bitbuf = lb.BitBuffer;
int bitsleft = lb.BitsLeft;
int inpos = lb.InputPosition;
for (x = 0; x < 20; x++)
{
y = READ_BITS(4, inbuf, ref inpos, ref bitsleft, ref bitbuf);
state.tblPRETREE_len[x] = (byte)y;
}
make_decode_table(LZX_PRETREE_MAXSYMBOLS, LZX_PRETREE_TABLEBITS, state.tblPRETREE_len, state.tblPRETREE_table);
for (x = first; x < last;)
{
int z = READ_HUFFSYM(state.tblPRETREE_table, state.tblPRETREE_len, LZX_PRETREE_TABLEBITS, LZX_PRETREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (z == 17)
{
y = READ_BITS(4, inbuf, ref inpos, ref bitsleft, ref bitbuf);
y += 4;
while (y-- > 0)
{
lengths[x++] = 0;
}
}
else if (z == 18)
{
y = READ_BITS(5, inbuf, ref inpos, ref bitsleft, ref bitbuf);
y += 20;
while (y-- > 0)
{
lengths[x++] = 0;
}
}
else if (z == 19)
{
y = READ_BITS(1, inbuf, ref inpos, ref bitsleft, ref bitbuf);
y += 4;
z = READ_HUFFSYM(state.tblPRETREE_table, state.tblPRETREE_len, LZX_PRETREE_TABLEBITS, LZX_PRETREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
z = lengths[x] - z;
if (z < 0)
z += 17;
while (y-- > 0)
{
lengths[x++] = (byte)z;
}
}
else
{
z = lengths[x] - z;
if (z < 0)
z += 17;
lengths[x++] = (byte)z;
}
}
lb.BitBuffer = bitbuf;
lb.BitsLeft = bitsleft;
lb.InputPosition = inpos;
return 0;
}
// Bitstream reading macros (LZX / intel little-endian byte order)
#region Bitstream Reading Macros
/*
* These bit access routines work by using the area beyond the MSB and the
* LSB as a free source of zeroes. This avoids having to mask any bits.
* So we have to know the bit width of the bitbuffer variable.
*/
/// <summary>
/// Should be used first to set up the system
/// </summary>
private static void INIT_BITSTREAM(out int bitsleft, out uint bitbuf)
{
bitsleft = 0;
bitbuf = 0;
}
/// <summary>
/// Ensures there are at least N bits in the bit buffer. It can guarantee
// up to 17 bits (i.e. it can read in 16 new bits when there is down to
/// 1 bit in the buffer, and it can read 32 bits when there are 0 bits in
/// the buffer).
/// </summary>
/// <remarks>Quantum reads bytes in normal order; LZX is little-endian order</remarks>
private static void ENSURE_BITS(int n, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
while (bitsleft < n)
{
byte b0 = inpos + 0 < inbuf.Length ? inbuf[inpos + 0] : (byte)0;
byte b1 = inpos + 1 < inbuf.Length ? inbuf[inpos + 1] : (byte)0;
bitbuf |= (uint)(((b1 << 8) | b0) << (16 - bitsleft));
bitsleft += 16;
inpos += 2;
}
}
/// <summary>
/// Extracts (without removing) N bits from the bit buffer
/// </summary>
private static uint PEEK_BITS(int n, uint bitbuf)
{
return bitbuf >> (32 - n);
}
/// <summary>
/// Removes N bits from the bit buffer
/// </summary>
private static void REMOVE_BITS(int n, ref int bitsleft, ref uint bitbuf)
{
bitbuf <<= n;
bitsleft -= n;
}
/// <summary>
/// Takes N bits from the buffer and puts them in v.
/// </summary>
private static uint READ_BITS(int n, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
uint v = 0;
if (n > 0)
{
ENSURE_BITS(n, inbuf, ref inpos, ref bitsleft, ref bitbuf);
v = PEEK_BITS(n, bitbuf);
REMOVE_BITS(n, ref bitsleft, ref bitbuf);
}
return v;
}
#endregion
#region Huffman Methods
/// <summary>
/// This function was coded by David Tritscher. It builds a fast huffman
/// decoding table out of just a canonical huffman code lengths table.
/// </summary>
/// <param name="nsyms">Total number of symbols in this huffman tree.</param>
/// <param name="nbits">
/// Any symbols with a code length of nbits or less can be decoded
/// in one lookup of the table.
/// </param>
/// <param name="length">A table to get code lengths from [0 to syms-1]</param>
/// <param name="table">The table to fill up with decoded symbols and pointers.</param>
/// <returns>
/// OK: 0
/// error: 1
/// </returns>
private static int make_decode_table(uint nsyms, uint nbits, byte[] length, ushort[] table)
{
ushort sym;
uint leaf;
byte bit_num = 1;
uint fill;
uint pos = 0; /* the current position in the decode table */
uint table_mask = (uint)(1 << (int)nbits);
uint bit_mask = table_mask >> 1; /* don't do 0 length codes */
uint next_symbol = bit_mask; /* base of allocation for long codes */
/* fill entries for codes short enough for a direct mapping */
while (bit_num <= nbits)
{
for (sym = 0; sym < nsyms; sym++)
{
if (length[sym] == bit_num)
{
leaf = pos;
if ((pos += bit_mask) > table_mask) return 1; /* table overrun */
/* fill all possible lookups of this symbol with the symbol itself */
fill = bit_mask;
while (fill-- > 0) table[leaf++] = sym;
}
}
bit_mask >>= 1;
bit_num++;
}
/* if there are any codes longer than nbits */
if (pos != table_mask)
{
/* clear the remainder of the table */
for (sym = (ushort)pos; sym < table_mask; sym++) table[sym] = 0;
/* give ourselves room for codes to grow by up to 16 more bits */
pos <<= 16;
table_mask <<= 16;
bit_mask = 1 << 15;
while (bit_num <= 16)
{
for (sym = 0; sym < nsyms; sym++)
{
if (length[sym] == bit_num)
{
leaf = pos >> 16;
for (fill = 0; fill < bit_num - nbits; fill++)
{
/* if this path hasn't been taken yet, 'allocate' two entries */
if (table[leaf] == 0)
{
table[(next_symbol << 1)] = 0;
table[(next_symbol << 1) + 1] = 0;
table[leaf] = (ushort)next_symbol++;
}
/* follow the path and select either left or right for next bit */
leaf = (uint)(table[leaf] << 1);
if (((pos >> (int)(15 - fill)) & 1) != 0) leaf++;
}
table[leaf] = sym;
if ((pos += bit_mask) > table_mask) return 1; /* table overflow */
}
}
bit_mask >>= 1;
bit_num++;
}
}
/* full table? */
if (pos == table_mask) return 0;
/* either erroneous table, or all elements are 0 - let's find out. */
for (sym = 0; sym < nsyms; sym++) if (length[sym] != 0) return 1;
return 0;
}
#endregion
// Huffman macros
#region Huffman Macros
/// <summary>
/// Decodes one huffman symbol from the bitstream using the stated table and
/// puts it in v.
/// </summary>
private static int READ_HUFFSYM(ushort[] hufftbl, byte[] lentable, int tablebits, int maxsymbols, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
int v = 0, i, j = 0;
ENSURE_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if ((i = hufftbl[PEEK_BITS(tablebits, bitbuf)]) >= maxsymbols)
{
j = 1 << (32 - tablebits);
do
{
j >>= 1;
i <<= 1;
i |= (bitbuf & j) != 0 ? 1 : 0;
if (j == 0)
throw new System.Exception();
} while ((i = hufftbl[i]) >= maxsymbols);
}
j = lentable[v = i];
REMOVE_BITS(j, ref bitsleft, ref bitbuf);
return v;
}
/// <summary>
/// Reads in code lengths for symbols first to last in the given table. The
/// code lengths are stored in their own special LZX way.
/// </summary>
private static bool READ_LENGTHS(byte[] lentable, uint first, uint last, Bits lb, State state, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
lb.BitBuffer = bitbuf;
lb.BitsLeft = bitsleft;
lb.InputPosition = inpos;
if (ReadLengths(lentable, first, last, lb, state, inbuf) != 0)
return false;
bitbuf = lb.BitBuffer;
bitsleft = lb.BitsLeft;
inpos = lb.InputPosition;
return true;
}
#endregion
}
}

View File

@@ -1,119 +0,0 @@
using static BinaryObjectScanner.Models.Compression.LZX.Constants;
namespace BinaryObjectScanner.Compression.LZX
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public class State
{
/// <summary>
/// the actual decoding window
/// </summary>
public byte[] window;
/// <summary>
/// window size (32Kb through 2Mb)
/// </summary>
public uint window_size;
/// <summary>
/// window size when it was first allocated
/// </summary>
public uint actual_size;
/// <summary>
/// current offset within the window
/// </summary>
public uint window_posn;
/// <summary>
/// for the LRU offset system
/// </summary>
public uint R0, R1, R2;
/// <summary>
/// number of main tree elements
/// </summary>
public ushort main_elements;
/// <summary>
/// have we started decoding at all yet?
/// </summary>
public int header_read;
/// <summary>
/// type of this block
/// </summary>
public ushort block_type;
/// <summary>
/// uncompressed length of this block
/// </summary>
public uint block_length;
/// <summary>
/// uncompressed bytes still left to decode
/// </summary>
public uint block_remaining;
/// <summary>
/// the number of CFDATA blocks processed
/// </summary>
public uint frames_read;
/// <summary>
/// magic header value used for transform
/// </summary>
public int intel_filesize;
/// <summary>
/// current offset in transform space
/// </summary>
public int intel_curpos;
/// <summary>
/// have we seen any translatable data yet?
/// </summary>
public int intel_started;
public ushort[] tblPRETREE_table = new ushort[(1 << LZX_PRETREE_TABLEBITS) + (LZX_PRETREE_MAXSYMBOLS << 1)];
public byte[] tblPRETREE_len = new byte[LZX_PRETREE_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
public ushort[] tblMAINTREE_table = new ushort[(1 << LZX_MAINTREE_TABLEBITS) + (LZX_MAINTREE_MAXSYMBOLS << 1)];
public byte[] tblMAINTREE_len = new byte[LZX_MAINTREE_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
public ushort[] tblLENGTH_table = new ushort[(1 << LZX_LENGTH_TABLEBITS) + (LZX_LENGTH_MAXSYMBOLS << 1)];
public byte[] tblLENGTH_len = new byte[LZX_LENGTH_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
public ushort[] tblALIGNED_table = new ushort[(1 << LZX_ALIGNED_TABLEBITS) + (LZX_ALIGNED_MAXSYMBOLS << 1)];
public byte[] tblALIGNED_len = new byte[LZX_ALIGNED_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
#region Decompression Tables
/// <summary>
/// An index to the position slot bases
/// </summary>
public uint[] PositionSlotBases = new uint[]
{
0, 1, 2, 3, 4, 6, 8, 12,
16, 24, 32, 48, 64, 96, 128, 192,
256, 384, 512, 768, 1024, 1536, 2048, 3072,
4096, 6144, 8192, 12288, 16384, 24576, 32768, 49152,
65536, 98304, 131072, 196608, 262144, 393216, 524288, 655360,
786432, 917504, 1048576, 1179648, 1310720, 1441792, 1572864, 1703936,
1835008, 1966080, 2097152
};
/// <summary>
/// How many bits of offset-from-base data is needed
/// </summary>
public byte[] ExtraBits = new byte[]
{
0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14,
15, 15, 16, 16, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17
};
#endregion
}
}

View File

@@ -1,637 +0,0 @@
using System;
using System.Runtime.InteropServices;
using BinaryObjectScanner.Models.Compression.MSZIP;
using static BinaryObjectScanner.Models.Compression.MSZIP.Constants;
namespace BinaryObjectScanner.Compression.MSZIP
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/fdi.c"/>
public unsafe class Decompressor
{
/// <summary>
/// Decompress a byte array using a given State
/// </summary>
public static bool Decompress(State state, int inlen, byte[] inbuf, int outlen, byte[] outbuf)
{
fixed (byte* inpos = inbuf)
{
state.inpos = inpos;
state.bb = state.bk = state.window_posn = 0;
if (outlen > ZIPWSIZE)
return false;
// CK = Chris Kirmse, official Microsoft purloiner
if (state.inpos[0] != 0x43 || state.inpos[1] != 0x4B)
return false;
state.inpos += 2;
int lastBlockFlag = 0;
do
{
if (InflateBlock(&lastBlockFlag, state, inbuf, outbuf) != 0)
return false;
} while (lastBlockFlag == 0);
// Return success
return true;
}
}
/// <summary>
/// Decompress a deflated block
/// </summary>
private static uint InflateBlock(int* e, State state, byte[] inbuf, byte[] outbuf)
{
// Make local bit buffer
uint b = state.bb;
uint k = state.bk;
// Read the deflate block header
var header = new DeflateBlockHeader();
// Read in last block bit
ZIPNEEDBITS(1, state, ref b, ref k);
header.BFINAL = (*e = (int)b & 1) != 0;
ZIPDUMPBITS(1, ref b, ref k);
// Read in block type
ZIPNEEDBITS(2, state, ref b, ref k);
header.BTYPE = (CompressionType)(b & 3);
ZIPDUMPBITS(2, ref b, ref k);
// Restore the global bit buffer
state.bb = b;
state.bk = k;
// Inflate that block type
switch (header.BTYPE)
{
case CompressionType.NoCompression:
return (uint)DecompressStored(state, inbuf, outbuf);
case CompressionType.FixedHuffman:
return (uint)DecompressFixed(state, inbuf, outbuf);
case CompressionType.DynamicHuffman:
return (uint)DecompressDynamic(state, inbuf, outbuf);
// Bad block type
case CompressionType.Reserved:
default:
return 2;
}
}
/// <summary>
/// "Decompress" a stored block
/// </summary>
private static int DecompressStored(State state, byte[] inbuf, byte[] outbuf)
{
// Make local copies of globals
uint b = state.bb;
uint k = state.bk;
uint w = state.window_posn;
// Go to byte boundary
int n = (int)(k & 7);
ZIPDUMPBITS(n, ref b, ref k);
// Read the stored block header
var header = new NonCompressedBlockHeader();
// Get the length and its compliment
ZIPNEEDBITS(16, state, ref b, ref k);
header.LEN = (ushort)(b & 0xffff);
ZIPDUMPBITS(16, ref b, ref k);
ZIPNEEDBITS(16, state, ref b, ref k);
header.NLEN = (ushort)(b & 0xffff);
if (header.LEN != (~header.NLEN & 0xffff))
return 1; // Error in compressed data
ZIPDUMPBITS(16, ref b, ref k);
// Read and output the compressed data
while (n-- > 0)
{
ZIPNEEDBITS(8, state, ref b, ref k);
outbuf[w++] = (byte)b;
ZIPDUMPBITS(8, ref b, ref k);
}
// Restore the globals from the locals
state.window_posn = w;
state.bb = b;
state.bk = k;
return 0;
}
/// <summary>
/// Decompress a block originally compressed with fixed Huffman codes
/// </summary>
private static int DecompressFixed(State state, byte[] inbuf, byte[] outbuf)
{
// Create the block header
FixedHuffmanCompressedBlockHeader header = new FixedHuffmanCompressedBlockHeader();
fixed (uint* l = state.ll)
fixed (ushort* Zipcplens = CopyLengths)
fixed (ushort* Zipcplext = LiteralExtraBits)
fixed (ushort* Zipcpdist = CopyOffsets)
fixed (ushort* Zipcpdext = DistanceExtraBits)
{
// Assign the literal lengths
state.ll = header.LiteralLengths;
HuffmanNode* fixed_tl;
int fixed_bl = 7;
// Build the literal length tree
int i = BuildHuffmanTree(l, 288, 257, Zipcplens, Zipcplext, &fixed_tl, &fixed_bl, state);
if (i != 0)
return i;
// Assign the distance codes
state.ll = header.DistanceCodes;
HuffmanNode* fixed_td;
int fixed_bd = 5;
// Build the distance code tree
i = BuildHuffmanTree(l, 30, 0, Zipcpdist, Zipcpdext, &fixed_td, &fixed_bd, state);
if (i != 0)
return i;
// Decompress until an end-of-block code
return InflateCodes(fixed_tl, fixed_td, fixed_bl, fixed_bd, state, inbuf, outbuf);
}
}
/// <summary>
/// Decompress a block originally compressed with dynamic Huffman codes
/// </summary>
private static int DecompressDynamic(State state, byte[] inbuf, byte[] outbuf)
{
int i; /* temporary variables */
uint j;
uint l; /* last length */
uint m; /* mask for bit lengths table */
uint n; /* number of lengths to get */
HuffmanNode* tl; /* literal/length code table */
HuffmanNode* td; /* distance code table */
int bl; /* lookup bits for tl */
int bd; /* lookup bits for td */
uint nb; /* number of bit length codes */
uint nl; /* number of literal/length codes */
uint nd; /* number of distance codes */
uint b; /* bit buffer */
uint k; /* number of bits in bit buffer */
/* make local bit buffer */
b = state.bb;
k = state.bk;
state.ll = new uint[288 + 32];
fixed (uint* ll = state.ll)
{
/* read in table lengths */
ZIPNEEDBITS(5, state, ref b, ref k);
nl = 257 + (b & 0x1f); /* number of literal/length codes */
ZIPDUMPBITS(5, ref b, ref k);
ZIPNEEDBITS(5, state, ref b, ref k);
nd = 1 + (b & 0x1f); /* number of distance codes */
ZIPDUMPBITS(5, ref b, ref k);
ZIPNEEDBITS(4, state, ref b, ref k);
nb = 4 + (b & 0xf); /* number of bit length codes */
ZIPDUMPBITS(4, ref b, ref k);
if (nl > 288 || nd > 32)
return 1; /* bad lengths */
/* read in bit-length-code lengths */
for (j = 0; j < nb; j++)
{
ZIPNEEDBITS(3, state, ref b, ref k);
state.ll[BitLengthOrder[j]] = b & 7;
ZIPDUMPBITS(3, ref b, ref k);
}
for (; j < 19; j++)
state.ll[BitLengthOrder[j]] = 0;
/* build decoding table for trees--single level, 7 bit lookup */
bl = 7;
if ((i = BuildHuffmanTree(ll, 19, 19, null, null, &tl, &bl, state)) != 0)
return i; /* incomplete code set */
/* read in literal and distance code lengths */
n = nl + nd;
m = BitMasks[bl];
i = (int)(l = 0);
while ((uint)i < n)
{
ZIPNEEDBITS(bl, state, ref b, ref k);
j = (td = tl + (b & m))->b;
ZIPDUMPBITS((int)j, ref b, ref k);
j = td->n;
if (j < 16) /* length of code in bits (0..15) */
{
state.ll[i++] = l = j; /* save last length in l */
}
else if (j == 16) /* repeat last length 3 to 6 times */
{
ZIPNEEDBITS(2, state, ref b, ref k);
j = 3 + (b & 3);
ZIPDUMPBITS(2, ref b, ref k);
if ((uint)i + j > n)
return 1;
while (j-- > 0)
{
state.ll[i++] = l;
}
}
else if (j == 17) /* 3 to 10 zero length codes */
{
ZIPNEEDBITS(3, state, ref b, ref k);
j = 3 + (b & 7);
ZIPDUMPBITS(3, ref b, ref k);
if ((uint)i + j > n)
return 1;
while (j-- > 0)
state.ll[i++] = 0;
l = 0;
}
else /* j == 18: 11 to 138 zero length codes */
{
ZIPNEEDBITS(7, state, ref b, ref k);
j = 11 + (b & 0x7f);
ZIPDUMPBITS(7, ref b, ref k);
if ((uint)i + j > n)
return 1;
while (j-- > 0)
state.ll[i++] = 0;
l = 0;
}
}
/* restore the global bit buffer */
state.bb = b;
state.bk = k;
fixed (ushort* Zipcplens = CopyLengths)
fixed (ushort* Zipcplext = LiteralExtraBits)
fixed (ushort* Zipcpdist = CopyOffsets)
fixed (ushort* Zipcpdext = DistanceExtraBits)
{
/* build the decoding tables for literal/length and distance codes */
bl = ZIPLBITS;
if ((i = BuildHuffmanTree(ll, nl, 257, Zipcplens, Zipcplext, &tl, &bl, state)) != 0)
{
return i; /* incomplete code set */
}
bd = ZIPDBITS;
BuildHuffmanTree(ll + nl, nd, 0, Zipcpdist, Zipcpdext, &td, &bd, state);
/* decompress until an end-of-block code */
if (InflateCodes(tl, td, bl, bd, state, inbuf, outbuf) != 0)
return 1;
return 0;
}
}
}
/// <summary>
/// Build a Huffman tree from a set of lengths
/// </summary>
private static int BuildHuffmanTree(uint* b, uint n, uint s, ushort* d, ushort* e, HuffmanNode** t, int* m, State state)
{
uint a; /* counter for codes of length k */
uint el; /* length of EOB code (value 256) */
uint f; /* i repeats in table every f entries */
int g; /* maximum code length */
int h; /* table level */
uint i; /* counter, current code */
uint j; /* counter */
int k; /* number of bits in current code */
int* l; /* stack of bits per table */
uint* p; /* pointer into state.c[],state.b[],state.v[] */
HuffmanNode* q; /* points to current table */
HuffmanNode r = new HuffmanNode(); /* table entry for structure assignment */
int w; /* bits before this table == (l * h) */
uint* xp; /* pointer into x */
int y; /* number of dummy codes added */
uint z; /* number of entries in current table */
fixed (int* state_lx_ptr = state.lx)
{
l = state_lx_ptr + 1;
/* Generate counts for each bit length */
el = n > 256 ? b[256] : ZIPBMAX; /* set length of EOB code, if any */
for (i = 0; i < ZIPBMAX + 1; ++i)
state.c[i] = 0;
p = b; i = n;
do
{
state.c[*p]++; p++; /* assume all entries <= ZIPBMAX */
} while (--i > 0);
if (state.c[0] == n) /* null input--all zero length codes */
{
*t = null;
*m = 0;
return 0;
}
/* Find minimum and maximum length, bound *m by those */
for (j = 1; j <= ZIPBMAX; j++)
{
if (state.c[j] > 0)
break;
}
k = (int)j; /* minimum code length */
if ((uint)*m < j)
*m = (int)j;
for (i = ZIPBMAX; i > 0; i--)
{
if (state.c[i] > 0)
break;
}
g = (int)i; /* maximum code length */
if ((uint)*m > i)
*m = (int)i;
/* Adjust last length count to fill out codes, if needed */
for (y = 1 << (int)j; j < i; j++, y <<= 1)
{
if ((y -= (int)state.c[j]) < 0)
return 2; /* bad input: more codes than bits */
}
if ((y -= (int)state.c[i]) < 0)
return 2;
state.c[i] += (uint)y;
/* Generate starting offsets LONGo the value table for each length */
state.x[1] = j = 0;
fixed (uint* state_c_ptr = state.c)
fixed (uint* state_x_ptr = state.x)
{
p = state_c_ptr + 1;
xp = state_x_ptr + 2;
while (--i > 0)
{
/* note that i == g from above */
*xp++ = (j += *p++);
}
}
/* Make a table of values in order of bit lengths */
p = b; i = 0;
do
{
if ((j = *p++) != 0)
state.v[state.x[j]++] = i;
} while (++i < n);
/* Generate the Huffman codes and for each, make the table entries */
state.x[0] = i = 0; /* first Huffman code is zero */
fixed (uint* state_v_ptr = state.v)
{
p = state_v_ptr; /* grab values in bit order */
h = -1; /* no tables yet--level -1 */
w = l[-1] = 0; /* no bits decoded yet */
state.u[0] = default; /* just to keep compilers happy */
q = null; /* ditto */
z = 0; /* ditto */
/* go through the bit lengths (k already is bits in shortest code) */
for (; k <= g; k++)
{
a = state.c[k];
while (a-- > 0)
{
/* here i is the Huffman code of length k bits for value *p */
/* make tables up to required level */
while (k > w + l[h])
{
w += l[h++]; /* add bits already decoded */
/* compute minimum size table less than or equal to *m bits */
if ((z = (uint)(g - w)) > (uint)*m) /* upper limit */
z = (uint)*m;
if ((f = (uint)(1 << (int)(j = (uint)(k - w)))) > a + 1) /* try a k-w bit table */
{ /* too few codes for k-w bit table */
f -= a + 1; /* deduct codes from patterns left */
fixed (uint* state_c_ptr = state.c)
{
xp = state_c_ptr + k;
while (++j < z) /* try smaller tables up to z bits */
{
if ((f <<= 1) <= *++xp)
break; /* enough codes to use up j bits */
f -= *xp; /* else deduct codes from patterns */
}
}
}
if ((uint)w + j > el && (uint)w < el)
j = (uint)(el - w); /* make EOB code end at table */
z = (uint)(1 << (int)j); /* table entries for j-bit table */
l[h] = (int)j; /* set table size in stack */
/* allocate and link in new table */
q = (HuffmanNode*)Marshal.AllocHGlobal((int)((z + 1) * sizeof(HuffmanNode)));
*t = q + 1; /* link to list for HuffmanNode_free() */
*(t = &(*q).t) = null;
state.u[h] = ++q; /* table starts after link */
/* connect to last table, if there is one */
if (h > 0)
{
state.x[h] = i; /* save pattern for backing up */
r.b = (byte)l[h - 1]; /* bits to dump before this table */
r.e = (byte)(16 + j); /* bits in this table */
r.t = q; /* pointer to this table */
j = (uint)((i & ((1 << w) - 1)) >> (w - l[h - 1]));
state.u[h - 1][j] = r; /* connect to last table */
}
}
/* set up table entry in r */
r.b = (byte)(k - w);
fixed (uint* state_v_ptr_comp = state.v)
{
if (p >= state_v_ptr_comp + n)
{
r.e = 99; /* out of values--invalid code */
}
else if (*p < s)
{
r.e = (byte)(*p < 256 ? 16 : 15); /* 256 is end-of-block code */
r.n = (ushort)*p++; /* simple code is just the value */
}
else
{
r.e = (byte)e[*p - s]; /* non-simple--look up in lists */
r.n = d[*p++ - s];
}
}
/* fill code-like entries with r */
f = (uint)(1 << (k - w));
for (j = i >> w; j < z; j += f)
{
q[j] = r;
}
/* backwards increment the k-bit code i */
for (j = (uint)(1 << (k - 1)); (i & j) != 0; j >>= 1)
{
i ^= j;
}
i ^= j;
/* backup over finished tables */
while ((i & ((1 << w) - 1)) != state.x[h])
w -= l[--h]; /* don't need to update q */
}
}
}
/* return actual size of base table */
*m = l[0];
}
/* Return true (1) if we were given an incomplete table */
return y != 0 && g != 1 ? 1 : 0;
}
/// <summary>
/// Inflate codes into Huffman trees
/// </summary>
private static int InflateCodes(HuffmanNode* tl, HuffmanNode* td, int bl, int bd, State state, byte[] inbuf, byte[] outbuf)
{
uint e; /* table entry flag/number of extra bits */
uint n, d; /* length and index for copy */
uint w; /* current window position */
HuffmanNode* t; /* pointer to table entry */
uint ml, md; /* masks for bl and bd bits */
uint b; /* bit buffer */
uint k; /* number of bits in bit buffer */
/* make local copies of globals */
b = state.bb; /* initialize bit buffer */
k = state.bk;
w = state.window_posn; /* initialize window position */
/* inflate the coded data */
ml = BitMasks[bl]; /* precompute masks for speed */
md = BitMasks[bd];
for (; ; )
{
ZIPNEEDBITS(bl, state, ref b, ref k);
if ((e = (t = tl + (b & ml))->e) > 16)
{
do
{
if (e == 99)
return 1;
ZIPDUMPBITS(t->b, ref b, ref k);
e -= 16;
ZIPNEEDBITS((int)e, state, ref b, ref k);
} while ((e = (*(t = t->t + (b & BitMasks[e]))).e) > 16);
}
ZIPDUMPBITS(t->b, ref b, ref k);
if (e == 16) /* then it's a literal */
{
outbuf[w++] = (byte)t->n;
}
else /* it's an EOB or a length */
{
/* exit if end of block */
if (e == 15)
break;
/* get length of block to copy */
ZIPNEEDBITS((int)e, state, ref b, ref k);
n = t->n + (b & BitMasks[e]);
ZIPDUMPBITS((int)e, ref b, ref k);
/* decode distance of block to copy */
ZIPNEEDBITS(bd, state, ref b, ref k);
if ((e = (*(t = td + (b & md))).e) > 16)
do
{
if (e == 99)
return 1;
ZIPDUMPBITS(t->b, ref b, ref k);
e -= 16;
ZIPNEEDBITS((int)e, state, ref b, ref k);
} while ((e = (*(t = t->t + (b & BitMasks[e]))).e) > 16);
ZIPDUMPBITS(t->b, ref b, ref k);
ZIPNEEDBITS((int)e, state, ref b, ref k);
d = w - t->n - (b & BitMasks[e]);
ZIPDUMPBITS((int)e, ref b, ref k);
do
{
d &= ZIPWSIZE - 1;
e = ZIPWSIZE - Math.Max(d, w);
e = Math.Min(e, n);
n -= e;
do
{
outbuf[w++] = outbuf[d++];
} while (--e > 0);
} while (n > 0);
}
}
/* restore the globals from the locals */
state.window_posn = w; /* restore global window pointer */
state.bb = b; /* restore global bit buffer */
state.bk = k;
/* done */
return 0;
}
#region Macros
private static void ZIPNEEDBITS(int n, State state, ref uint bitBuffer, ref uint bitCount)
{
while (bitCount < n)
{
int c = *state.inpos++;
bitBuffer |= (uint)(c << (int)bitCount);
bitCount += 8;
}
}
private static void ZIPDUMPBITS(int n, ref uint bitBuffer, ref uint bitCount)
{
bitBuffer >>= n;
bitCount -= (uint)n;
}
#endregion
}
}

View File

@@ -1,29 +0,0 @@
namespace BinaryObjectScanner.Compression.MSZIP
{
public unsafe struct HuffmanNode
{
/// <summary>
/// Number of extra bits or operation
/// </summary>
public byte e;
/// <summary>
/// Number of bits in this code or subcode
/// </summary>
public byte b;
#region v
/// <summary>
/// Literal, length base, or distance base
/// </summary>
public ushort n;
/// <summary>
/// Pointer to next level of table
/// </summary>
public HuffmanNode* t;
#endregion
}
}

View File

@@ -1,56 +0,0 @@
using static BinaryObjectScanner.Models.Compression.MSZIP.Constants;
namespace BinaryObjectScanner.Compression.MSZIP
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public unsafe class State
{
/// <summary>
/// Current offset within the window
/// </summary>
public uint window_posn;
/// <summary>
/// Bit buffer
/// </summary>
public uint bb;
/// <summary>
/// Bits in bit buffer
/// </summary>
public uint bk;
/// <summary>
/// Literal/length and distance code lengths
/// </summary>
public uint[] ll = new uint[288 + 32];
/// <summary>
/// Bit length count table
/// </summary>
public uint[] c = new uint[ZIPBMAX + 1];
/// <summary>
/// Memory for l[-1..ZIPBMAX-1]
/// </summary>
public int[] lx = new int[ZIPBMAX + 1];
/// <summary>
/// Table stack
/// </summary>
public HuffmanNode*[] u = new HuffmanNode*[ZIPBMAX];
/// <summary>
/// Values in order of bit length
/// </summary>
public uint[] v = new uint[ZIPN_MAX];
/// <summary>
/// Bit offsets, then code stack
/// </summary>
public uint[] x = new uint[ZIPBMAX + 1];
/// <remarks>byte*</remarks>
public byte* inpos;
}
}

View File

@@ -1,499 +0,0 @@
using System;
using System.Linq;
using BinaryObjectScanner.Models.Compression.Quantum;
using BinaryObjectScanner.Models.MicrosoftCabinet;
namespace BinaryObjectScanner.Compression.Quantum
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/fdi.c"/>
/// <see href="https://github.com/wine-mirror/wine/blob/master/include/fdi.h"/>
/// <see href="http://www.russotto.net/quantumcomp.html"/>
public static class Decompressor
{
/// <summary>
/// Decompress a byte array using a given State
/// </summary>
public static int Decompress(State state, int inlen, byte[] inbuf, int outlen, byte[] outbuf)
{
int inpos = 0, outpos = 0; // inbuf[0], outbuf[0]
int window = 0; // state.Window[0]
int runsrc, rundest;
uint windowPosition = state.WindowPosition;
uint windowSize = state.WindowSize;
int extra, togo = outlen, matchLength = 0, copyLength;
byte selector, sym;
uint matchOffset = 0;
// Make local copies of state variables
uint bitBuffer = state.BitBuffer;
int bitsLeft = state.BitsLeft;
ushort H = 0xFFFF, L = 0;
// Read initial value of C
ushort C = (ushort)Q_READ_BITS(16, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
// Apply 2^x-1 mask
windowPosition &= windowSize - 1;
while (togo > 0)
{
selector = (byte)GET_SYMBOL(state.SelectorModel, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
switch (selector)
{
// Selector 0 = literal model, 64 entries, 0x00-0x3F
case 0:
sym = (byte)GET_SYMBOL(state.Model0, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 1 = literal model, 64 entries, 0x40-0x7F
case 1:
sym = (byte)GET_SYMBOL(state.Model1, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 2 = literal model, 64 entries, 0x80-0xBF
case 2:
sym = (byte)GET_SYMBOL(state.Model2, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 3 = literal model, 64 entries, 0xC0-0xFF
case 3:
sym = (byte)GET_SYMBOL(state.Model3, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 4 = fixed length of 3
case 4:
sym = (byte)GET_SYMBOL(state.Model4, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.ExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchOffset = (uint)(state.PositionSlotBases[sym] + extra + 1);
matchLength = 3;
break;
// Selector 5 = fixed length of 4
case 5:
sym = (byte)GET_SYMBOL(state.Model5, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.ExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchOffset = (uint)(state.PositionSlotBases[sym] + extra + 1);
matchLength = 4;
break;
// Selector 6 = variable length
case 6:
sym = (byte)GET_SYMBOL(state.Model6Length, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.LengthExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchLength = state.LengthBases[sym] + extra + 5;
sym = (byte)GET_SYMBOL(state.Model6Position, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.ExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchOffset = (uint)(state.PositionSlotBases[sym] + extra + 1);
break;
default:
return inpos;
}
// If this is a match
if (selector >= 4)
{
rundest = (int)(window + windowPosition);
togo -= matchLength;
// Copy any wrapped around source data
if (windowPosition >= matchOffset)
{
// No wrap
runsrc = (int)(rundest - matchOffset);
}
else
{
runsrc = (int)(rundest + (windowSize - matchOffset));
copyLength = (int)(matchOffset - windowPosition);
if (copyLength < matchLength)
{
matchLength -= copyLength;
windowPosition += (uint)copyLength;
while (copyLength-- > 0)
{
state.Window[rundest++] = state.Window[rundest++];
}
runsrc = window;
}
}
windowPosition += (uint)matchLength;
// Copy match data - no worries about destination wraps
while (matchLength-- > 0)
{
state.Window[rundest++] = state.Window[runsrc++];
// Handle wraparounds that aren't supposed to happen
if (rundest >= state.Window.Length)
rundest = 0;
if (runsrc >= state.Window.Length)
runsrc = 0;
}
}
// If we hit the end of the window, copy to the output and wrap
if (windowPosition >= state.Window.Length)
{
Array.Copy(state.Window, 0, outbuf, outpos, Math.Min(windowSize, outlen));
outpos += (int)Math.Min(windowSize, outlen);
outlen -= (int)Math.Min(windowSize, outlen);
windowPosition = 0;
}
}
if (togo > 0)
return inpos;
if (outlen > 0)
{
int sourceIndex = (int)((windowPosition == 0 ? windowSize : windowPosition) - outlen);
Array.Copy(state.Window, sourceIndex, outbuf, outpos, outlen);
}
// Cache the decompression state variables
state.BitBuffer = bitBuffer;
state.BitsLeft = bitsLeft;
state.WindowPosition = windowPosition;
return inpos;
}
/// <summary>
/// Initialize a Quantum decompressor state
/// </summary>
public static bool InitState(State state, CFFOLDER folder)
{
int window = ((ushort)folder.CompressionType >> 8) & 0x1f;
int level = ((ushort)folder.CompressionType >> 4) & 0xF;
return InitState(state, window, level);
}
/// <summary>
/// Initialize a Quantum decompressor state
/// </summary>
public static bool InitState(State state, int window, int level)
{
uint windowSize = (uint)(1 << window);
int maxSize = window * 2;
// QTM supports window sizes of 2^10 (1Kb) through 2^21 (2Mb)
// If a previously allocated window is big enough, keep it
if (window < 10 || window > 21)
return false;
// If we don't have the proper window size
if (state.ActualSize < windowSize)
state.Window = null;
// If we have no window
if (state.Window == null)
{
state.Window = new byte[windowSize];
state.ActualSize = windowSize;
}
// Set the window size and position
state.WindowSize = windowSize;
state.WindowPosition = 0;
// Initialize arithmetic coding models
state.SelectorModel = CreateModel(state.SelectorModelSymbols, 7, 0);
state.Model0 = CreateModel(state.Model0Symbols, 0x40, 0x00);
state.Model1 = CreateModel(state.Model1Symbols, 0x40, 0x40);
state.Model2 = CreateModel(state.Model2Symbols, 0x40, 0x80);
state.Model3 = CreateModel(state.Model3Symbols, 0x40, 0xC0);
// Model 4 depends on table size, ranges from 20 to 24
state.Model4 = CreateModel(state.Model4Symbols, (maxSize < 24) ? maxSize : 24, 0);
// Model 5 depends on table size, ranges from 20 to 36
state.Model5 = CreateModel(symbols: state.Model5Symbols, (maxSize < 36) ? maxSize : 36, 0);
// Model 6 Position depends on table size, ranges from 20 to 42
state.Model6Position = CreateModel(state.Model6PositionSymbols, (maxSize < 42) ? maxSize : 42, 0);
state.Model6Length = CreateModel(state.Model6LengthSymbols, 27, 0);
return true;
}
/// <summary>
/// Initialize a Quantum model that decodes symbols from s to (s + n - 1)
/// </summary>
private static Model CreateModel(ModelSymbol[] symbols, int entryCount, int initialSymbol)
{
// Set the basic values
Model model = new Model
{
TimeToReorder = 4,
Entries = entryCount,
Symbols = symbols,
};
// Clear out the look-up table
model.LookupTable = Enumerable.Repeat<ushort>(0xFFFF, model.LookupTable.Length).ToArray();
// Loop through and build the look-up table
for (ushort i = 0; i < entryCount; i++)
{
// Set up a look-up entry for symbol
model.LookupTable[i + initialSymbol] = i;
// Create the symbol in the table
model.Symbols[i] = new ModelSymbol
{
Symbol = (ushort)(i + initialSymbol),
CumulativeFrequency = (ushort)(entryCount - i),
};
}
// Set the last symbol frequency to 0
model.Symbols[entryCount] = new ModelSymbol { CumulativeFrequency = 0 };
return model;
}
/// <summary>
/// Update the Quantum model for a particular symbol
/// </summary>
private static void UpdateModel(Model model, int symbol)
{
// Update the cumulative frequency for all symbols less than the provided
for (int i = 0; i < symbol; i++)
{
model.Symbols[i].CumulativeFrequency += 8;
}
// If the first symbol still has a cumulative frequency under 3800
if (model.Symbols[0].CumulativeFrequency <= 3800)
return;
// If we have more than 1 shift left in the model
if (--model.TimeToReorder != 0)
{
// Loop through the entries from highest to lowest,
// performing the shift on the cumulative frequencies
for (int i = model.Entries - 1; i >= 0; i--)
{
// -1, not -2; the 0 entry saves this
model.Symbols[i].CumulativeFrequency >>= 1;
if (model.Symbols[i].CumulativeFrequency <= model.Symbols[i + 1].CumulativeFrequency)
model.Symbols[i].CumulativeFrequency = (ushort)(model.Symbols[i + 1].CumulativeFrequency + 1);
}
}
// If we have no shifts left in the model
else
{
// Reset the shifts left value to 50
model.TimeToReorder = 50;
// Loop through the entries setting the cumulative frequencies
for (int i = 0; i < model.Entries; i++)
{
// No -1, want to include the 0 entry
// This converts cumfreqs into frequencies, then shifts right
model.Symbols[i].CumulativeFrequency -= model.Symbols[i + 1].CumulativeFrequency;
model.Symbols[i].CumulativeFrequency++; // Avoid losing things entirely
model.Symbols[i].CumulativeFrequency >>= 1;
}
// Now sort by frequencies, decreasing order -- this must be an
// inplace selection sort, or a sort with the same (in)stability
// characteristics
for (int i = 0; i < model.Entries - 1; i++)
{
for (int j = i + 1; j < model.Entries; j++)
{
if (model.Symbols[i].CumulativeFrequency < model.Symbols[j].CumulativeFrequency)
{
var temp = model.Symbols[i];
model.Symbols[i] = model.Symbols[j];
model.Symbols[j] = temp;
}
}
}
// Then convert frequencies back to cumfreq
for (int i = model.Entries - 1; i >= 0; i--)
{
model.Symbols[i].CumulativeFrequency += model.Symbols[i + 1].CumulativeFrequency;
}
// Then update the other part of the table
for (ushort i = 0; i < model.Entries; i++)
{
model.LookupTable[model.Symbols[i].Symbol] = i;
}
}
}
// Bitstream reading macros (Quantum / normal byte order)
#region Macros
/*
* These bit access routines work by using the area beyond the MSB and the
* LSB as a free source of zeroes. This avoids having to mask any bits.
* So we have to know the bit width of the bitbuffer variable. This is
* defined as Uint_BITS.
*
* Uint_BITS should be at least 16 bits. Unlike LZX's Huffman decoding,
* Quantum's arithmetic decoding only needs 1 bit at a time, it doesn't
* need an assured number. Retrieving larger bitstrings can be done with
* multiple reads and fills of the bitbuffer. The code should work fine
* for machines where Uint >= 32 bits.
*
* Also note that Quantum reads bytes in normal order; LZX is in
* little-endian order.
*/
/// <summary>
/// Should be used first to set up the system
/// </summary>
private static void Q_INIT_BITSTREAM(out int bitsleft, out uint bitbuf)
{
bitsleft = 0;
bitbuf = 0;
}
/// <summary>
/// Adds more data to the bit buffer, if there is room for another 16 bits.
/// </summary>
private static void Q_FILL_BUFFER(byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
if (bitsleft > 8)
return;
byte b0 = inpos + 0 < inbuf.Length ? inbuf[inpos + 0] : (byte)0;
byte b1 = inpos + 1 < inbuf.Length ? inbuf[inpos + 1] : (byte)0;
bitbuf |= (uint)(((b0 << 8) | b1) << (16 - bitsleft));
bitsleft += 16;
inpos += 2;
}
/// <summary>
/// Extracts (without removing) N bits from the bit buffer
/// </summary>
private static uint Q_PEEK_BITS(int n, uint bitbuf)
{
return bitbuf >> (32 - n);
}
/// <summary>
/// Removes N bits from the bit buffer
/// </summary>
private static void Q_REMOVE_BITS(int n, ref int bitsleft, ref uint bitbuf)
{
bitbuf <<= n;
bitsleft -= n;
}
/// <summary>
/// Takes N bits from the buffer and puts them in v. Unlike LZX, this can loop
/// several times to get the requisite number of bits.
/// </summary>
private static uint Q_READ_BITS(int n, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
uint v = 0; int bitrun;
for (int bitsneed = n; bitsneed != 0; bitsneed -= bitrun)
{
Q_FILL_BUFFER(inbuf, ref inpos, ref bitsleft, ref bitbuf);
bitrun = (bitsneed > bitsleft) ? bitsleft : bitsneed;
v = (v << bitrun) | Q_PEEK_BITS(bitrun, bitbuf);
Q_REMOVE_BITS(bitrun, ref bitsleft, ref bitbuf);
}
return v;
}
/// <summary>
/// Fetches the next symbol from the stated model and puts it in symbol.
/// It may need to read the bitstream to do this.
/// </summary>
private static ushort GET_SYMBOL(Model model, ref ushort H, ref ushort L, ref ushort C, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
ushort symf = GetFrequency(model.Symbols[0].CumulativeFrequency, H, L, C);
int i;
for (i = 1; i < model.Entries; i++)
{
if (model.Symbols[i].CumulativeFrequency <= symf)
break;
}
ushort symbol = model.Symbols[i - 1].Symbol;
GetCode(model.Symbols[i - 1].CumulativeFrequency,
model.Symbols[i].CumulativeFrequency,
model.Symbols[0].CumulativeFrequency,
ref H, ref L, ref C,
inbuf, ref inpos, ref bitsleft, ref bitbuf);
UpdateModel(model, i);
return symbol;
}
/// <summary>
/// Get the frequency for a given range and total frequency
/// </summary>
private static ushort GetFrequency(ushort totalFrequency, ushort H, ushort L, ushort C)
{
uint range = (uint)(((H - L) & 0xFFFF) + 1);
uint freq = (uint)(((C - L + 1) * totalFrequency - 1) / range);
return (ushort)(freq & 0xFFFF);
}
/// <summary>
/// The decoder renormalization loop
/// </summary>
private static void GetCode(int previousFrequency,
int cumulativeFrequency,
int totalFrequency,
ref ushort H,
ref ushort L,
ref ushort C,
byte[] inbuf,
ref int inpos,
ref int bitsleft,
ref uint bitbuf)
{
uint range = (uint)((H - L) + 1);
H = (ushort)(L + ((previousFrequency * range) / totalFrequency) - 1);
L = (ushort)(L + (cumulativeFrequency * range) / totalFrequency);
while (true)
{
if ((L & 0x8000) != (H & 0x8000))
{
if ((L & 0x4000) == 0 || (H & 0x4000) != 0)
break;
// Underflow case
C ^= 0x4000;
L &= 0x3FFF;
H |= 0x4000;
}
L <<= 1;
H = (ushort)((H << 1) | 1);
C = (ushort)((C << 1) | Q_READ_BITS(1, inbuf, ref inpos, ref bitsleft, ref bitbuf));
}
}
#endregion
}
}

View File

@@ -1,193 +0,0 @@
using BinaryObjectScanner.Models.Compression.Quantum;
namespace BinaryObjectScanner.Compression.Quantum
{
/// <see href="https://github.com/kyz/libmspack/blob/master/libmspack/mspack/qtmd.c"/>
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public class State
{
/// <summary>
/// The actual decoding window
/// </summary>
public byte[] Window;
/// <summary>
/// Window size (1Kb through 2Mb)
/// </summary>
public uint WindowSize;
/// <summary>
/// Window size when it was first allocated
/// </summary>
public uint ActualSize;
/// <summary>
/// Current offset within the window
/// </summary>
public uint WindowPosition;
#region Models
/// <summary>
/// Symbol table for selector model
/// </summary>
public ModelSymbol[] SelectorModelSymbols = new ModelSymbol[7 + 1];
/// <summary>
/// Model for selector values
/// </summary>
public Model SelectorModel;
/// <summary>
/// Model for Selector 0
/// </summary>
public Model Model0;
/// <summary>
/// Model for Selector 1
/// </summary>
public Model Model1;
/// <summary>
/// Model for Selector 2
/// </summary>
public Model Model2;
/// <summary>
/// Model for Selector 3
/// </summary>
public Model Model3;
/// <summary>
/// Model for Selector 4
/// </summary>
public Model Model4;
/// <summary>
/// Model for Selector 5
/// </summary>
public Model Model5;
/// <summary>
/// Model for Selector 6 Position
/// </summary>
public Model Model6Position;
/// <summary>
/// Model for Selector 6 Length
/// </summary>
public Model Model6Length;
#endregion
#region Symbol Tables
/// <summary>
/// Symbol table for Selector 0
/// </summary>
public ModelSymbol[] Model0Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 1
/// </summary>
public ModelSymbol[] Model1Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 2
/// </summary>
public ModelSymbol[] Model2Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 3
/// </summary>
public ModelSymbol[] Model3Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 4
/// </summary>
public ModelSymbol[] Model4Symbols = new ModelSymbol[0x18 + 1];
/// <summary>
/// Symbol table for Selector 5
/// </summary>
public ModelSymbol[] Model5Symbols = new ModelSymbol[0x24 + 1];
/// <summary>
/// Symbol table for Selector 6 Position
/// </summary>
public ModelSymbol[] Model6PositionSymbols = new ModelSymbol[0x2a + 1];
/// <summary>
/// Symbol table for Selector 6 Length
/// </summary>
public ModelSymbol[] Model6LengthSymbols = new ModelSymbol[0x1b + 1];
#endregion
#region Decompression Tables
/// <summary>
/// An index to the position slot bases
/// </summary>
public uint[] PositionSlotBases = new uint[42]
{
0x00000, 0x00001, 0x00002, 0x00003, 0x00004, 0x00006, 0x00008, 0x0000c,
0x00010, 0x00018, 0x00020, 0x00030, 0x00040, 0x00060, 0x00080, 0x000c0,
0x00100, 0x00180, 0x00200, 0x00300, 0x00400, 0x00600, 0x00800, 0x00c00,
0x01000, 0x01800, 0x02000, 0x03000, 0x04000, 0x06000, 0x08000, 0x0c000,
0x10000, 0x18000, 0x20000, 0x30000, 0x40000, 0x60000, 0x80000, 0xc0000,
0x100000, 0x180000
};
/// <summary>
/// How many bits of offset-from-base data is needed
/// </summary>
public byte[] ExtraBits = new byte[42]
{
0, 0, 0, 0, 1, 1, 2, 2,
3, 3, 4, 4, 5, 5, 6, 6,
7, 7, 8, 8, 9, 9, 10, 10,
11, 11, 12, 12, 13, 13, 14, 14,
15, 15, 16, 16, 17, 17, 18, 18,
19, 19
};
/// <summary>
/// An index to the position slot bases [Selector 6]
/// </summary>
public byte[] LengthBases = new byte[27]
{
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x08,
0x0a, 0x0c, 0x0e, 0x12, 0x16, 0x1a, 0x1e, 0x26,
0x2e, 0x36, 0x3e, 0x4e, 0x5e, 0x6e, 0x7e, 0x9e,
0xbe, 0xde, 0xfe
};
/// <summary>
/// How many bits of offset-from-base data is needed [Selector 6]
/// </summary>
public byte[] LengthExtraBits = new byte[27]
{
0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 2, 2, 2, 2, 3, 3,
3, 3, 4, 4, 4, 4, 5, 5,
5, 5, 0
};
#endregion
#region Decompression State
/// <summary>
/// Bit buffer to persist between runs
/// </summary>
public uint BitBuffer = 0;
/// <summary>
/// Bits remaining to persist between runs
/// </summary>
public int BitsLeft = 0;
#endregion
}
}

View File

@@ -1,304 +0,0 @@
namespace BinaryObjectScanner.Compression.bzip2
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib.h"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib_private.h"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/blocksort.c"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/crctable.c"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/randtable.c"/>
public static class Constants
{
#region bzlib.h
public const int BZ_RUN = 0;
public const int BZ_FLUSH = 1;
public const int BZ_FINISH = 2;
public const int BZ_OK = 0;
public const int BZ_RUN_OK = 1;
public const int BZ_FLUSH_OK = 2;
public const int BZ_FINISH_OK = 3;
public const int BZ_STREAM_END = 4;
public const int BZ_SEQUENCE_ERROR = (-1);
public const int BZ_PARAM_ERROR = (-2);
public const int BZ_MEM_ERROR = (-3);
public const int BZ_DATA_ERROR = (-4);
public const int BZ_DATA_ERROR_MAGIC = (-5);
public const int BZ_IO_ERROR = (-6);
public const int BZ_UNEXPECTED_EOF = (-7);
public const int BZ_OUTBUFF_FULL = (-8);
public const int BZ_CONFIG_ERROR = (-9);
public const int BZ_MAX_UNUSED = 5000;
#endregion
#region bzlib_private.h
internal const string BZ_VERSION = "1.0.5, 10-Dec-2007";
/*-- Header bytes. --*/
internal const byte BZ_HDR_B = 0x42; /* 'B' */
internal const byte BZ_HDR_Z = 0x5a; /* 'Z' */
internal const byte BZ_HDR_h = 0x68; /* 'h' */
internal const byte BZ_HDR_0 = 0x30; /* '0' */
/*-- Constants for the back end. --*/
internal const int BZ_MAX_ALPHA_SIZE = 258;
internal const int BZ_MAX_CODE_LEN = 23;
internal const int BZ_RUNA = 0;
internal const int BZ_RUNB = 1;
internal const int BZ_N_GROUPS = 6;
internal const int BZ_G_SIZE = 50;
internal const int BZ_N_ITERS = 4;
internal const int BZ_MAX_SELECTORS = (2 + (900000 / BZ_G_SIZE));
/*-- States and modes for compression. --*/
internal const int BZ_M_IDLE = 1;
internal const int BZ_M_RUNNING = 2;
internal const int BZ_M_FLUSHING = 3;
internal const int BZ_M_FINISHING = 4;
internal const int BZ_S_OUTPUT = 1;
internal const int BZ_S_INPUT = 2;
internal const int BZ_N_RADIX = 2;
internal const int BZ_N_QSORT = 12;
internal const int BZ_N_SHELL = 18;
internal const int BZ_N_OVERSHOOT = (BZ_N_RADIX + BZ_N_QSORT + BZ_N_SHELL + 2);
/*-- states for decompression. --*/
internal const int BZ_X_IDLE = 1;
internal const int BZ_X_OUTPUT = 2;
internal const int BZ_X_MAGIC_1 = 10;
internal const int BZ_X_MAGIC_2 = 11;
internal const int BZ_X_MAGIC_3 = 12;
internal const int BZ_X_MAGIC_4 = 13;
internal const int BZ_X_BLKHDR_1 = 14;
internal const int BZ_X_BLKHDR_2 = 15;
internal const int BZ_X_BLKHDR_3 = 16;
internal const int BZ_X_BLKHDR_4 = 17;
internal const int BZ_X_BLKHDR_5 = 18;
internal const int BZ_X_BLKHDR_6 = 19;
internal const int BZ_X_BCRC_1 = 20;
internal const int BZ_X_BCRC_2 = 21;
internal const int BZ_X_BCRC_3 = 22;
internal const int BZ_X_BCRC_4 = 23;
internal const int BZ_X_RANDBIT = 24;
internal const int BZ_X_ORIGPTR_1 = 25;
internal const int BZ_X_ORIGPTR_2 = 26;
internal const int BZ_X_ORIGPTR_3 = 27;
internal const int BZ_X_MAPPING_1 = 28;
internal const int BZ_X_MAPPING_2 = 29;
internal const int BZ_X_SELECTOR_1 = 30;
internal const int BZ_X_SELECTOR_2 = 31;
internal const int BZ_X_SELECTOR_3 = 32;
internal const int BZ_X_CODING_1 = 33;
internal const int BZ_X_CODING_2 = 34;
internal const int BZ_X_CODING_3 = 35;
internal const int BZ_X_MTF_1 = 36;
internal const int BZ_X_MTF_2 = 37;
internal const int BZ_X_MTF_3 = 38;
internal const int BZ_X_MTF_4 = 39;
internal const int BZ_X_MTF_5 = 40;
internal const int BZ_X_MTF_6 = 41;
internal const int BZ_X_ENDHDR_2 = 42;
internal const int BZ_X_ENDHDR_3 = 43;
internal const int BZ_X_ENDHDR_4 = 44;
internal const int BZ_X_ENDHDR_5 = 45;
internal const int BZ_X_ENDHDR_6 = 46;
internal const int BZ_X_CCRC_1 = 47;
internal const int BZ_X_CCRC_2 = 48;
internal const int BZ_X_CCRC_3 = 49;
internal const int BZ_X_CCRC_4 = 50;
/*-- Constants for the fast MTF decoder. --*/
internal const int MTFA_SIZE = 4096;
internal const int MTFL_SIZE = 16;
#endregion
#region blocksort.c
internal const int FALLBACK_QSORT_SMALL_THRESH = 10;
internal const int FALLBACK_QSORT_STACK_SIZE = 100;
/*--
Knuth's increments seem to work better
than Incerpi-Sedgewick here. Possibly
because the number of elems to sort is
usually small, typically <= 20.
--*/
internal static readonly int[] incs = new int[14]
{
1, 4, 13, 40, 121, 364, 1093, 3280,
9841, 29524, 88573, 265720, 797161, 2391484
};
/*--
The following is an implementation of
an elegant 3-way quicksort for strings,
described in a paper "Fast Algorithms for
Sorting and Searching Strings", by Robert
Sedgewick and Jon L. Bentley.
--*/
internal const int MAIN_QSORT_SMALL_THRESH = 20;
internal const int MAIN_QSORT_DEPTH_THRESH = (BZ_N_RADIX + BZ_N_QSORT);
internal const int MAIN_QSORT_STACK_SIZE = 100;
internal const uint SETMASK = 1 << 21;
internal const uint CLEARMASK = ~SETMASK;
#endregion
#region crctable.c
/// <summary>
/// Table for doing CRCs
/// </summary>
internal static readonly uint[] BZ2_crc32Table = new uint[256]
{
0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9,
0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005,
0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd,
0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9,
0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75,
0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011,
0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd,
0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039,
0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5,
0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81,
0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49,
0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95,
0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1,
0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d,
0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae,
0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072,
0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16,
0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca,
0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde,
0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02,
0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066,
0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba,
0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e,
0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692,
0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a,
0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e,
0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2,
0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686,
0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a,
0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637,
0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb,
0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f,
0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53,
0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47,
0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b,
0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff,
0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623,
0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7,
0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f,
0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3,
0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7,
0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b,
0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f,
0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3,
0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640,
0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c,
0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8,
0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24,
0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30,
0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec,
0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088,
0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654,
0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c,
0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18,
0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4,
0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0,
0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c,
0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668,
0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4
};
#endregion
#region randtable.c
/// <summary>
/// Table for randomising repetitive blocks
/// </summary>
internal static readonly int[] BZ2_rNums = new int[512]
{
619, 720, 127, 481, 931, 816, 813, 233, 566, 247,
985, 724, 205, 454, 863, 491, 741, 242, 949, 214,
733, 859, 335, 708, 621, 574, 73, 654, 730, 472,
419, 436, 278, 496, 867, 210, 399, 680, 480, 51,
878, 465, 811, 169, 869, 675, 611, 697, 867, 561,
862, 687, 507, 283, 482, 129, 807, 591, 733, 623,
150, 238, 59, 379, 684, 877, 625, 169, 643, 105,
170, 607, 520, 932, 727, 476, 693, 425, 174, 647,
73, 122, 335, 530, 442, 853, 695, 249, 445, 515,
909, 545, 703, 919, 874, 474, 882, 500, 594, 612,
641, 801, 220, 162, 819, 984, 589, 513, 495, 799,
161, 604, 958, 533, 221, 400, 386, 867, 600, 782,
382, 596, 414, 171, 516, 375, 682, 485, 911, 276,
98, 553, 163, 354, 666, 933, 424, 341, 533, 870,
227, 730, 475, 186, 263, 647, 537, 686, 600, 224,
469, 68, 770, 919, 190, 373, 294, 822, 808, 206,
184, 943, 795, 384, 383, 461, 404, 758, 839, 887,
715, 67, 618, 276, 204, 918, 873, 777, 604, 560,
951, 160, 578, 722, 79, 804, 96, 409, 713, 940,
652, 934, 970, 447, 318, 353, 859, 672, 112, 785,
645, 863, 803, 350, 139, 93, 354, 99, 820, 908,
609, 772, 154, 274, 580, 184, 79, 626, 630, 742,
653, 282, 762, 623, 680, 81, 927, 626, 789, 125,
411, 521, 938, 300, 821, 78, 343, 175, 128, 250,
170, 774, 972, 275, 999, 639, 495, 78, 352, 126,
857, 956, 358, 619, 580, 124, 737, 594, 701, 612,
669, 112, 134, 694, 363, 992, 809, 743, 168, 974,
944, 375, 748, 52, 600, 747, 642, 182, 862, 81,
344, 805, 988, 739, 511, 655, 814, 334, 249, 515,
897, 955, 664, 981, 649, 113, 974, 459, 893, 228,
433, 837, 553, 268, 926, 240, 102, 654, 459, 51,
686, 754, 806, 760, 493, 403, 415, 394, 687, 700,
946, 670, 656, 610, 738, 392, 760, 799, 887, 653,
978, 321, 576, 617, 626, 502, 894, 679, 243, 440,
680, 879, 194, 572, 640, 724, 926, 56, 204, 700,
707, 151, 457, 449, 797, 195, 791, 558, 945, 679,
297, 59, 87, 824, 713, 663, 412, 693, 342, 606,
134, 108, 571, 364, 631, 212, 174, 643, 304, 329,
343, 97, 430, 751, 497, 314, 983, 374, 822, 928,
140, 206, 73, 263, 980, 736, 876, 478, 430, 305,
170, 514, 364, 692, 829, 82, 855, 953, 676, 246,
369, 970, 294, 750, 807, 827, 150, 790, 288, 923,
804, 378, 215, 828, 592, 281, 565, 555, 710, 82,
896, 831, 547, 261, 524, 462, 293, 465, 502, 56,
661, 821, 976, 991, 658, 869, 905, 758, 745, 193,
768, 550, 608, 933, 378, 286, 215, 979, 792, 961,
61, 688, 793, 644, 986, 403, 106, 366, 905, 644,
372, 567, 466, 434, 645, 210, 389, 550, 919, 135,
780, 773, 635, 389, 707, 100, 626, 958, 165, 504,
920, 176, 193, 713, 857, 265, 203, 50, 668, 108,
645, 990, 626, 197, 510, 357, 358, 850, 858, 364,
936, 638
};
#endregion
}
}

View File

@@ -1,100 +0,0 @@
using static BinaryObjectScanner.Compression.bzip2.Constants;
namespace BinaryObjectScanner.Compression.bzip2
{
/// <summary>
/// Structure holding all the decompression-side stuff.
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib_private.h"/>
internal unsafe class DState
{
/* pointer back to the struct bz_stream */
public bz_stream strm;
/* state indicator for this stream */
public int state;
/* for doing the final run-length decoding */
public byte state_out_ch;
public int state_out_len;
public bool blockRandomised;
public int rNToGo;
public int rTPos;
/* the buffer for bit stream reading */
public uint bsBuff;
public int bsLive;
/* misc administratium */
public int blockSize100k;
public bool smallDecompress;
public int currBlockNo;
public int verbosity;
/* for undoing the Burrows-Wheeler transform */
public int origPtr;
public uint tPos;
public int k0;
public int[] unzftab = new int[256];
public int nblock_used;
public int[] cftab = new int[257];
public int[] cftabCopy = new int[257];
/* for undoing the Burrows-Wheeler transform (FAST) */
public uint* tt;
/* for undoing the Burrows-Wheeler transform (SMALL) */
public ushort* ll16;
public byte* ll4;
/* stored and calculated CRCs */
public uint storedBlockCRC;
public uint storedCombinedCRC;
public uint calculatedBlockCRC;
public uint calculatedCombinedCRC;
/* map of bytes used in block */
public int nInUse;
public bool[] inUse = new bool[256];
public bool[] inUse16 = new bool[16];
public byte[] seqToUnseq = new byte[256];
/* for decoding the MTF values */
public byte[] mtfa = new byte[MTFA_SIZE];
public int[] mtfbase = new int[256 / MTFL_SIZE];
public byte[] selector = new byte[BZ_MAX_SELECTORS];
public byte[] selectorMtf = new byte[BZ_MAX_SELECTORS];
public byte[,] len = new byte[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] limit = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] @base = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] perm = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[] minLens = new int[BZ_N_GROUPS];
/* save area for scalars in the main decompress code */
public int save_i;
public int save_j;
public int save_t;
public int save_alphaSize;
public int save_nGroups;
public int save_nSelectors;
public int save_EOB;
public int save_groupNo;
public int save_groupPos;
public int save_nextSym;
public int save_nblockMAX;
public int save_nblock;
public int save_es;
public int save_N;
public int save_curr;
public int save_zt;
public int save_zn;
public int save_zvec;
public int save_zj;
public int save_gSel;
public int save_gMinlen;
public int* save_gLimit;
public int* save_gBase;
public int* save_gPerm;
}
}

View File

@@ -1,80 +0,0 @@
using static BinaryObjectScanner.Compression.bzip2.Constants;
namespace BinaryObjectScanner.Compression.bzip2
{
/// <summary>
/// Structure holding all the compression-side stuff.
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib_private.h"/>
internal unsafe class EState
{
/* pointer back to the struct bz_stream */
public bz_stream* strm;
/* mode this stream is in, and whether inputting */
/* or outputting data */
public int mode;
public int state;
/* remembers avail_in when flush/finish requested */
public uint avail_in_expect;
/* for doing the block sorting */
public uint* arr1;
public uint* arr2;
public uint* ftab;
public int origPtr;
/* aliases for arr1 and arr2 */
public uint* ptr;
public byte* block;
public ushort* mtfv;
public byte* zbits;
/* for deciding when to use the fallback sorting algorithm */
public int workFactor;
/* run-length-encoding of the input */
public uint state_in_ch;
public int state_in_len;
public int rNToGo;
public int rTPos;
/* input and output limits and current posns */
public int nblock;
public int nblockMAX;
public int numZ;
public int state_out_pos;
/* map of bytes used in block */
public int nInUse;
public bool[] inUse = new bool[256];
public byte[] unseqToSeq = new byte[256];
/* the buffer for bit stream creation */
public uint bsBuff;
public int bsLive;
/* block and combined CRCs */
public uint blockCRC;
public uint combinedCRC;
/* misc administratium */
public int verbosity;
public int blockNo;
public int blockSize100k;
/* stuff for coding the MTF values */
public int nMTF;
public int[] mtfFreq = new int[BZ_MAX_ALPHA_SIZE];
public byte[] selector = new byte[BZ_MAX_SELECTORS];
public byte[] selectorMtf = new byte[BZ_MAX_SELECTORS];
public byte[,] len = new byte[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] code = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] rfreq = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
/* second dimension: only 3 needed; 4 makes index calculations faster */
public uint[,] len_pack = new uint[BZ_MAX_ALPHA_SIZE, 4];
}
}

View File

@@ -1,217 +0,0 @@
using static BinaryObjectScanner.Compression.bzip2.Constants;
namespace BinaryObjectScanner.Compression.bzip2
{
/// <summary>
/// Huffman coding low-level stuff
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/huffman.c"/>
internal static unsafe class Huffman
{
public static void BZ2_hbMakeCodeLengths(byte* len, int* freq, int alphaSize, int maxLen)
{
/*--
Nodes and heap entries run from 1. Entry 0
for both the heap and nodes is a sentinel.
--*/
int nNodes, nHeap, n1, n2, i, j, k;
bool tooLong;
int[] heap = new int[BZ_MAX_ALPHA_SIZE + 2];
int[] weight = new int[BZ_MAX_ALPHA_SIZE * 2];
int[] parent = new int[BZ_MAX_ALPHA_SIZE * 2];
for (i = 0; i < alphaSize; i++)
{
weight[i + 1] = (freq[i] == 0 ? 1 : freq[i]) << 8;
}
while (true)
{
nNodes = alphaSize;
nHeap = 0;
heap[0] = 0;
weight[0] = 0;
parent[0] = -2;
for (i = 1; i <= alphaSize; i++)
{
parent[i] = -1;
nHeap++;
heap[nHeap] = i;
UPHEAP(nHeap, heap, weight);
}
//AssertH(nHeap < (BZ_MAX_ALPHA_SIZE + 2), 2001);
while (nHeap > 1)
{
n1 = heap[1]; heap[1] = heap[nHeap]; nHeap--; DOWNHEAP(1, nHeap, heap, weight);
n2 = heap[1]; heap[1] = heap[nHeap]; nHeap--; DOWNHEAP(1, nHeap, heap, weight);
nNodes++;
parent[n1] = parent[n2] = nNodes;
weight[nNodes] = ADDWEIGHTS(weight[n1], weight[n2]);
parent[nNodes] = -1;
nHeap++;
heap[nHeap] = nNodes;
UPHEAP(nHeap, heap, weight);
}
//AssertH(nNodes < (BZ_MAX_ALPHA_SIZE * 2), 2002);
tooLong = false;
for (i = 1; i <= alphaSize; i++)
{
j = 0;
k = i;
while (parent[k] >= 0) { k = parent[k]; j++; }
len[i - 1] = (byte)j;
if (j > maxLen) tooLong = true;
}
if (!tooLong) break;
/* 17 Oct 04: keep-going condition for the following loop used
to be 'i < alphaSize', which missed the last element,
theoretically leading to the possibility of the compressor
looping. However, this count-scaling step is only needed if
one of the generated Huffman code words is longer than
maxLen, which up to and including version 1.0.2 was 20 bits,
which is extremely unlikely. In version 1.0.3 maxLen was
changed to 17 bits, which has minimal effect on compression
ratio, but does mean this scaling step is used from time to
time, enough to verify that it works.
This means that bzip2-1.0.3 and later will only produce
Huffman codes with a maximum length of 17 bits. However, in
order to preserve backwards compatibility with bitstreams
produced by versions pre-1.0.3, the decompressor must still
handle lengths of up to 20. */
for (i = 1; i <= alphaSize; i++)
{
j = weight[i] >> 8;
j = 1 + (j / 2);
weight[i] = j << 8;
}
}
}
public static void BZ2_hbAssignCodes(int* code, byte* length, int minLen, int maxLen, int alphaSize)
{
int n, vec, i;
vec = 0;
for (n = minLen; n <= maxLen; n++)
{
for (i = 0; i < alphaSize; i++)
{
if (length[i] == n)
{
code[i] = vec;
vec++;
}
};
vec <<= 1;
}
}
public static void BZ2_hbCreateDecodeTables(int* limit, int* @base, int* perm, byte* length, int minLen, int maxLen, int alphaSize)
{
int pp, i, j, vec;
pp = 0;
for (i = minLen; i <= maxLen; i++)
{
for (j = 0; j < alphaSize; j++)
{
if (length[j] == i) { perm[pp] = j; pp++; }
}
};
for (i = 0; i < BZ_MAX_CODE_LEN; i++)
{
@base[i] = 0;
}
for (i = 0; i < alphaSize; i++)
{
@base[length[i] + 1]++;
}
for (i = 1; i < BZ_MAX_CODE_LEN; i++)
{
@base[i] += @base[i - 1];
}
for (i = 0; i < BZ_MAX_CODE_LEN; i++)
{
limit[i] = 0;
}
vec = 0;
for (i = minLen; i <= maxLen; i++)
{
vec += (@base[i + 1] - @base[i]);
limit[i] = vec - 1;
vec <<= 1;
}
for (i = minLen + 1; i <= maxLen; i++)
{
@base[i] = ((limit[i - 1] + 1) << 1) - @base[i];
}
}
#region Macros
private static int WEIGHTOF(int zz0) => (int)(zz0 & 0xffffff00);
private static int DEPTHOF(int zz1) => zz1 & 0x000000ff;
private static int MYMAX(int zz2, int zz3) => zz2 > zz3 ? zz2 : zz3;
private static int ADDWEIGHTS(int zw1, int zw2) => (WEIGHTOF(zw1) + WEIGHTOF(zw2)) | (1 + MYMAX(DEPTHOF(zw1), DEPTHOF(zw2)));
private static void UPHEAP(int z, int[] heap, int[] weight)
{
int zz, tmp;
zz = z; tmp = heap[zz];
while (weight[tmp] < weight[heap[zz >> 1]])
{
heap[zz] = heap[zz >> 1];
zz >>= 1;
}
heap[zz] = tmp;
}
private static void DOWNHEAP(int z, int nHeap, int[] heap, int[] weight)
{
int zz, yy, tmp;
zz = z; tmp = heap[zz];
while (true)
{
yy = zz << 1;
if (yy > nHeap)
break;
if (yy < nHeap && weight[heap[yy + 1]] < weight[heap[yy]])
yy++;
if (weight[tmp] < weight[heap[yy]])
break;
heap[zz] = heap[yy];
zz = yy;
}
heap[zz] = tmp;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,22 +0,0 @@
namespace BinaryObjectScanner.Compression.bzip2
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib.h"/>
public unsafe struct bz_stream
{
public char* next_in;
public uint avail_in;
public uint total_in_lo32;
public uint total_in_hi32;
public char* next_out;
public uint avail_out;
public uint total_out_lo32;
public uint total_out_hi32;
public void* state;
// void *(*bzalloc)(void *,int,int);
// void (*bzfree)(void *,void *);
// void *opaque;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,50 +0,0 @@
using System;
using System.IO;
using BinaryObjectScanner.Interfaces;
namespace BinaryObjectScanner.FileType
{
/// <summary>
/// BFPK custom archive format
/// </summary>
public class BFPK : IExtractable
{
/// <inheritdoc/>
public string Extract(string file, bool includeDebug)
{
if (!File.Exists(file))
return null;
using (var fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read))
{
return Extract(fs, file, includeDebug);
}
}
/// <inheritdoc/>
public string Extract(Stream stream, string file, bool includeDebug)
{
try
{
// Create the wrapper
Wrappers.BFPK bfpk = Wrappers.BFPK.Create(stream);
if (bfpk == null)
return null;
// Create a temp output directory
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempPath);
// Extract all files
bfpk.ExtractAll(tempPath);
return tempPath;
}
catch (Exception ex)
{
if (includeDebug) Console.WriteLine(ex);
return null;
}
}
}
}

View File

@@ -1,51 +0,0 @@
using System;
using System.IO;
using BinaryObjectScanner.Interfaces;
namespace BinaryObjectScanner.FileType
{
/// <summary>
/// Half-Life Level
/// </summary>
public class BSP : IExtractable
{
/// <inheritdoc/>
public string Extract(string file, bool includeDebug)
{
if (!File.Exists(file))
return null;
using (var fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read))
{
return Extract(fs, file, includeDebug);
}
}
/// <inheritdoc/>
public string Extract(Stream stream, string file, bool includeDebug)
{
try
{
// Create the wrapper
Wrappers.BSP bsp = Wrappers.BSP.Create(stream);
if (bsp == null)
return null;
// Create a temp output directory
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempPath);
// Loop through and extract all files
bsp.ExtractAllLumps(tempPath);
bsp.ExtractAllTextures(tempPath);
return tempPath;
}
catch (Exception ex)
{
if (includeDebug) Console.WriteLine(ex);
return null;
}
}
}
}

View File

@@ -1,39 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BinaryObjectScanner.FileType</Title>
<AssemblyName>BinaryObjectScanner.FileType</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.8</Version>
<AssemblyVersion>2.8</AssemblyVersion>
<FileVersion>2.8</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\BinaryObjectScanner.Compression\BinaryObjectScanner.Compression.csproj" />
<ProjectReference Include="..\BinaryObjectScanner.GameEngine\BinaryObjectScanner.GameEngine.csproj" />
<ProjectReference Include="..\BinaryObjectScanner.Interfaces\BinaryObjectScanner.Interfaces.csproj" />
<ProjectReference Include="..\BinaryObjectScanner.Matching\BinaryObjectScanner.Matching.csproj" />
<ProjectReference Include="..\BinaryObjectScanner.Packer\BinaryObjectScanner.Packer.csproj" />
<ProjectReference Include="..\BinaryObjectScanner.Protection\BinaryObjectScanner.Protection.csproj" />
<ProjectReference Include="..\BinaryObjectScanner.Wrappers\BinaryObjectScanner.Wrappers.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="OpenMcdf" Version="2.2.1.12" />
<PackageReference Include="SharpCompress" Version="0.32.2" />
<PackageReference Include="UnshieldSharp" Version="1.6.9" />
</ItemGroup>
</Project>

View File

@@ -1,50 +0,0 @@
using System;
using System.IO;
using BinaryObjectScanner.Interfaces;
namespace BinaryObjectScanner.FileType
{
/// <summary>
/// Half-Life Game Cache File
/// </summary>
public class GCF : IExtractable
{
/// <inheritdoc/>
public string Extract(string file, bool includeDebug)
{
if (!File.Exists(file))
return null;
using (var fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read))
{
return Extract(fs, file, includeDebug);
}
}
/// <inheritdoc/>
public string Extract(Stream stream, string file, bool includeDebug)
{
try
{
// Create the wrapper
Wrappers.GCF gcf = Wrappers.GCF.Create(stream);
if (gcf == null)
return null;
// Create a temp output directory
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempPath);
// Loop through and extract all files
gcf.ExtractAll(tempPath);
return tempPath;
}
catch (Exception ex)
{
if (includeDebug) Console.WriteLine(ex);
return null;
}
}
}
}

View File

@@ -1,58 +0,0 @@
using System;
using System.IO;
using BinaryObjectScanner.Interfaces;
using BinaryObjectScanner.Wrappers;
namespace BinaryObjectScanner.FileType
{
/// <summary>
/// Microsoft cabinet file
/// </summary>
/// <remarks>Specification available at <see href="http://download.microsoft.com/download/5/0/1/501ED102-E53F-4CE0-AA6B-B0F93629DDC6/Exchange/%5BMS-CAB%5D.pdf"/></remarks>
/// <see href="https://github.com/wine-mirror/wine/tree/master/dlls/cabinet"/>
public class MicrosoftCAB : IExtractable
{
/// <inheritdoc/>
public string Extract(string file, bool includeDebug)
{
if (!File.Exists(file))
return null;
using (var fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read))
{
return Extract(fs, file, includeDebug);
}
}
/// <inheritdoc/>
public string Extract(Stream stream, string file, bool includeDebug)
{
try
{
// TODO: Fix/re-enable/do ANYTHING to get this working again
return null;
// Open the cab file
var cabFile = MicrosoftCabinet.Create(stream);
if (cabFile == null)
return null;
// Create a temp output directory
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempPath);
// If entry extraction fails
bool success = cabFile.ExtractAll(tempPath);
if (!success)
return null;
return tempPath;
}
catch (Exception ex)
{
if (includeDebug) Console.WriteLine(ex);
return null;
}
}
}
}

View File

@@ -1,50 +0,0 @@
using System;
using System.IO;
using BinaryObjectScanner.Interfaces;
namespace BinaryObjectScanner.FileType
{
/// <summary>
/// Half-Life Package File
/// </summary>
public class PAK : IExtractable
{
/// <inheritdoc/>
public string Extract(string file, bool includeDebug)
{
if (!File.Exists(file))
return null;
using (var fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read))
{
return Extract(fs, file, includeDebug);
}
}
/// <inheritdoc/>
public string Extract(Stream stream, string file, bool includeDebug)
{
try
{
// Create the wrapper
Wrappers.PAK pak = Wrappers.PAK.Create(stream);
if (pak == null)
return null;
// Create a temp output directory
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempPath);
// Loop through and extract all files
pak.ExtractAll(tempPath);
return tempPath;
}
catch (Exception ex)
{
if (includeDebug) Console.WriteLine(ex);
return null;
}
}
}
}

View File

@@ -1,50 +0,0 @@
using System;
using System.IO;
using BinaryObjectScanner.Interfaces;
namespace BinaryObjectScanner.FileType
{
/// <summary>
/// NovaLogic Game Archive Format
/// </summary>
public class PFF : IExtractable
{
/// <inheritdoc/>
public string Extract(string file, bool includeDebug)
{
if (!File.Exists(file))
return null;
using (var fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read))
{
return Extract(fs, file, includeDebug);
}
}
/// <inheritdoc/>
public string Extract(Stream stream, string file, bool includeDebug)
{
try
{
// Create the wrapper
Wrappers.PFF pff = Wrappers.PFF.Create(stream);
if (pff == null)
return null;
// Create a temp output directory
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempPath);
// Extract all files
pff.ExtractAll(tempPath);
return tempPath;
}
catch (Exception ex)
{
if (includeDebug) Console.WriteLine(ex.Message);
return null;
}
}
}
}

View File

@@ -1,50 +0,0 @@
using System;
using System.IO;
using BinaryObjectScanner.Interfaces;
namespace BinaryObjectScanner.FileType
{
/// <summary>
/// SGA game archive
/// </summary>
public class SGA : IExtractable
{
/// <inheritdoc/>
public string Extract(string file, bool includeDebug)
{
if (!File.Exists(file))
return null;
using (var fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read))
{
return Extract(fs, file, includeDebug);
}
}
/// <inheritdoc/>
public string Extract(Stream stream, string file, bool includeDebug)
{
try
{
// Create the wrapper
Wrappers.SGA sga = Wrappers.SGA.Create(stream);
if (sga == null)
return null;
// Create a temp output directory
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempPath);
// Loop through and extract all files
sga.ExtractAll(tempPath);
return tempPath;
}
catch (Exception ex)
{
if (includeDebug) Console.WriteLine(ex);
return null;
}
}
}
}

View File

@@ -1,50 +0,0 @@
using System;
using System.IO;
using BinaryObjectScanner.Interfaces;
namespace BinaryObjectScanner.FileType
{
/// <summary>
/// Half-Life 2 Level
/// </summary>
public class VBSP : IExtractable
{
/// <inheritdoc/>
public string Extract(string file, bool includeDebug)
{
if (!File.Exists(file))
return null;
using (var fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read))
{
return Extract(fs, file, includeDebug);
}
}
/// <inheritdoc/>
public string Extract(Stream stream, string file, bool includeDebug)
{
try
{
// Create the wrapper
Wrappers.VBSP vbsp = Wrappers.VBSP.Create(stream);
if (vbsp == null)
return null;
// Create a temp output directory
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempPath);
// Loop through and extract all files
vbsp.ExtractAllLumps(tempPath);
return tempPath;
}
catch (Exception ex)
{
if (includeDebug) Console.WriteLine(ex.ToString());
return null;
}
}
}
}

View File

@@ -1,50 +0,0 @@
using System;
using System.IO;
using BinaryObjectScanner.Interfaces;
namespace BinaryObjectScanner.FileType
{
/// <summary>
/// Valve Package File
/// </summary>
public class VPK : IExtractable
{
/// <inheritdoc/>
public string Extract(string file, bool includeDebug)
{
if (!File.Exists(file))
return null;
using (var fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read))
{
return Extract(fs, file, includeDebug);
}
}
/// <inheritdoc/>
public string Extract(Stream stream, string file, bool includeDebug)
{
try
{
// Create the wrapper
Wrappers.VPK vpk = Wrappers.VPK.Create(stream);
if (vpk == null)
return null;
// Create a temp output directory
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempPath);
// Loop through and extract all files
vpk.ExtractAll(tempPath);
return tempPath;
}
catch (Exception ex)
{
if (includeDebug) Console.WriteLine(ex);
return null;
}
}
}
}

View File

@@ -1,50 +0,0 @@
using System;
using System.IO;
using BinaryObjectScanner.Interfaces;
namespace BinaryObjectScanner.FileType
{
/// <summary>
/// Half-Life Texture Package File
/// </summary>
public class WAD : IExtractable
{
/// <inheritdoc/>
public string Extract(string file, bool includeDebug)
{
if (!File.Exists(file))
return null;
using (var fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read))
{
return Extract(fs, file, includeDebug);
}
}
/// <inheritdoc/>
public string Extract(Stream stream, string file, bool includeDebug)
{
try
{
// Create the wrapper
Wrappers.WAD wad = Wrappers.WAD.Create(stream);
if (wad == null)
return null;
// Create a temp output directory
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempPath);
// Loop through and extract all files
wad.ExtractAllLumps(tempPath);
return tempPath;
}
catch (Exception ex)
{
if (includeDebug) Console.WriteLine(ex);
return null;
}
}
}
}

View File

@@ -1,50 +0,0 @@
using System;
using System.IO;
using BinaryObjectScanner.Interfaces;
namespace BinaryObjectScanner.FileType
{
/// <summary>
/// XBox Package File
/// </summary>
public class XZP : IExtractable
{
/// <inheritdoc/>
public string Extract(string file, bool includeDebug)
{
if (!File.Exists(file))
return null;
using (var fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read))
{
return Extract(fs, file, includeDebug);
}
}
/// <inheritdoc/>
public string Extract(Stream stream, string file, bool includeDebug)
{
try
{
// Create the wrapper
Wrappers.XZP xzp = Wrappers.XZP.Create(stream);
if (xzp == null)
return null;
// Create a temp output directory
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempPath);
// Loop through and extract all files
xzp.ExtractAll(tempPath);
return tempPath;
}
catch (Exception ex)
{
if (includeDebug) Console.WriteLine(ex);
return null;
}
}
}
}

View File

@@ -1,29 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BinaryObjectScanner.GameEngine</Title>
<AssemblyName>BinaryObjectScanner.GameEngine</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.8</Version>
<AssemblyVersion>2.8</AssemblyVersion>
<FileVersion>2.8</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\BinaryObjectScanner.Interfaces\BinaryObjectScanner.Interfaces.csproj" />
<ProjectReference Include="..\BinaryObjectScanner.Matching\BinaryObjectScanner.Matching.csproj" />
<ProjectReference Include="..\BinaryObjectScanner.Wrappers\BinaryObjectScanner.Wrappers.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,7 +0,0 @@
namespace BinaryObjectScanner.GameEngine
{
/// <summary>
/// This class exists for reflection purposes and should not be used
/// </summary>
public sealed class _DUMMY { }
}

View File

@@ -1,27 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BinaryObjectScanner.Interfaces</Title>
<AssemblyName>BinaryObjectScanner.Interfaces</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.8</Version>
<AssemblyVersion>2.8</AssemblyVersion>
<FileVersion>2.8</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\BinaryObjectScanner.Wrappers\BinaryObjectScanner.Wrappers.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,23 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BinaryObjectScanner.Matching</Title>
<AssemblyName>BinaryObjectScanner.Matching</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2018-2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.8</Version>
<AssemblyVersion>2.8</AssemblyVersion>
<FileVersion>2.8</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
</Project>

View File

@@ -1,200 +0,0 @@
using System.IO;
namespace BinaryObjectScanner.Matching
{
/// <summary>
/// Content matching criteria
/// </summary>
public class ContentMatch : IMatch<byte?[]>
{
/// <summary>
/// Content to match
/// </summary>
public byte?[] Needle { get; set; }
/// <summary>
/// Starting index for matching
/// </summary>
public int Start { get; set; }
/// <summary>
/// Ending index for matching
/// </summary>
public int End { get; set; }
/// <summary>
/// Constructor
/// </summary>
/// <param name="needle">Byte array representing the search</param>
/// <param name="start">Optional starting index</param>
/// <param name="end">Optional ending index</param>
public ContentMatch(byte?[] needle, int start = -1, int end = -1)
{
Needle = needle;
Start = start;
End = end;
}
#region Array Matching
/// <summary>
/// Get if this match can be found in a stack
/// </summary>
/// <param name="stack">Array to search for the given content</param>
/// <param name="reverse">True to search from the end of the array, false from the start</param>
/// <returns>Tuple of success and found position</returns>
public (bool success, int position) Match(byte[] stack, bool reverse = false)
{
// If either array is null or empty, we can't do anything
if (stack == null || stack.Length == 0 || Needle == null || Needle.Length == 0)
return (false, -1);
// If the needle array is larger than the stack array, it can't be contained within
if (Needle.Length > stack.Length)
return (false, -1);
// Set the default start and end values
int start = Start;
int end = End;
// If start or end are not set properly, set them to defaults
if (start < 0)
start = 0;
if (end < 0)
end = stack.Length - Needle.Length;
for (int i = reverse ? end : start; reverse ? i > start : i < end; i += reverse ? -1 : 1)
{
// If we somehow have an invalid end and we haven't matched, return
if (i > stack.Length)
return (false, -1);
// Check to see if the values are equal
if (EqualAt(stack, i))
return (true, i);
}
return (false, -1);
}
/// <summary>
/// Get if a stack at a certain index is equal to a needle
/// </summary>
/// <param name="stack">Array to search for the given content</param>
/// <param name="index">Starting index to check equality</param>
/// <returns>True if the needle matches the stack at a given index</returns>
private bool EqualAt(byte[] stack, int index)
{
// If the index is invalid, we can't do anything
if (index < 0)
return false;
// If we're too close to the end of the stack, return false
if (Needle.Length > stack.Length - index)
return false;
// Loop through and check the value
for (int i = 0; i < Needle.Length; i++)
{
// A null value is a wildcard
if (Needle[i] == null)
continue;
else if (stack[i + index] != Needle[i])
return false;
}
return true;
}
#endregion
#region Stream Matching
/// <summary>
/// Get if this match can be found in a stack
/// </summary>
/// <param name="stack">Stream to search for the given content</param>
/// <param name="reverse">True to search from the end of the array, false from the start</param>
/// <returns>Tuple of success and found position</returns>
public (bool success, int position) Match(Stream stack, bool reverse = false)
{
// If either array is null or empty, we can't do anything
if (stack == null || stack.Length == 0 || Needle == null || Needle.Length == 0)
return (false, -1);
// If the needle array is larger than the stack array, it can't be contained within
if (Needle.Length > stack.Length)
return (false, -1);
// Set the default start and end values
int start = Start;
int end = End;
// If start or end are not set properly, set them to defaults
if (start < 0)
start = 0;
if (end < 0)
end = (int)(stack.Length - Needle.Length);
for (int i = reverse ? end : start; reverse ? i > start : i < end; i += reverse ? -1 : 1)
{
// If we somehow have an invalid end and we haven't matched, return
if (i > stack.Length)
return (false, -1);
// Check to see if the values are equal
if (EqualAt(stack, i))
return (true, i);
}
return (false, -1);
}
/// <summary>
/// Get if a stack at a certain index is equal to a needle
/// </summary>
/// <param name="stack">Stream to search for the given content</param>
/// <param name="index">Starting index to check equality</param>
/// <returns>True if the needle matches the stack at a given index</returns>
private bool EqualAt(Stream stack, int index)
{
// If the index is invalid, we can't do anything
if (index < 0)
return false;
// If we're too close to the end of the stack, return false
if (Needle.Length > stack.Length - index)
return false;
// Save the current position and move to the index
long currentPosition = stack.Position;
stack.Seek(index, SeekOrigin.Begin);
// Set the return value
bool matched = true;
// Loop through and check the value
for (int i = 0; i < Needle.Length; i++)
{
byte stackValue = (byte)stack.ReadByte();
// A null value is a wildcard
if (Needle[i] == null)
{
continue;
}
else if (stackValue != Needle[i])
{
matched = false;
break;
}
}
// Reset the position and return the value
stack.Seek(currentPosition, SeekOrigin.Begin);
return matched;
}
#endregion
}
}

View File

@@ -1,199 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace BinaryObjectScanner.Matching
{
/// <summary>
/// A set of content matches that work together
/// </summary>
public class ContentMatchSet : MatchSet<ContentMatch, byte?[]>
{
/// <summary>
/// Function to get a content version
/// </summary>
/// <remarks>
/// A content version method takes the file path, the file contents,
/// and a list of found positions and returns a single string. That
/// string is either a version string, in which case it will be appended
/// to the protection name, or `null`, in which case it will cause
/// the protection to be omitted.
/// </remarks>
public Func<string, byte[], List<int>, string> GetArrayVersion { get; set; }
/// <summary>
/// Function to get a content version
/// </summary>
/// <remarks>
/// A content version method takes the file path, the file contents,
/// and a list of found positions and returns a single string. That
/// string is either a version string, in which case it will be appended
/// to the protection name, or `null`, in which case it will cause
/// the protection to be omitted.
/// </remarks>
public Func<string, Stream, List<int>, string> GetStreamVersion { get; set; }
#region Generic Constructors
public ContentMatchSet(byte?[] needle, string protectionName)
: this(new List<byte?[]> { needle }, getArrayVersion: null, protectionName) { }
public ContentMatchSet(List<byte?[]> needles, string protectionName)
: this(needles, getArrayVersion: null, protectionName) { }
public ContentMatchSet(ContentMatch needle, string protectionName)
: this(new List<ContentMatch>() { needle }, getArrayVersion: null, protectionName) { }
public ContentMatchSet(List<ContentMatch> needles, string protectionName)
: this(needles, getArrayVersion: null, protectionName) { }
#endregion
#region Array Constructors
public ContentMatchSet(byte?[] needle, Func<string, byte[], List<int>, string> getArrayVersion, string protectionName)
: this(new List<byte?[]> { needle }, getArrayVersion, protectionName) { }
public ContentMatchSet(List<byte?[]> needles, Func<string, byte[], List<int>, string> getArrayVersion, string protectionName)
: this(needles.Select(n => new ContentMatch(n)).ToList(), getArrayVersion, protectionName) { }
public ContentMatchSet(ContentMatch needle, Func<string, byte[], List<int>, string> getArrayVersion, string protectionName)
: this(new List<ContentMatch>() { needle }, getArrayVersion, protectionName) { }
public ContentMatchSet(List<ContentMatch> needles, Func<string, byte[], List<int>, string> getArrayVersion, string protectionName)
{
Matchers = needles;
GetArrayVersion = getArrayVersion;
ProtectionName = protectionName;
}
#endregion
#region Stream Constructors
public ContentMatchSet(byte?[] needle, Func<string, Stream, List<int>, string> getStreamVersion, string protectionName)
: this(new List<byte?[]> { needle }, getStreamVersion, protectionName) { }
public ContentMatchSet(List<byte?[]> needles, Func<string, Stream, List<int>, string> getStreamVersion, string protectionName)
: this(needles.Select(n => new ContentMatch(n)).ToList(), getStreamVersion, protectionName) { }
public ContentMatchSet(ContentMatch needle, Func<string, Stream, List<int>, string> getStreamVersion, string protectionName)
: this(new List<ContentMatch>() { needle }, getStreamVersion, protectionName) { }
public ContentMatchSet(List<ContentMatch> needles, Func<string, Stream, List<int>, string> getStreamVersion, string protectionName)
{
Matchers = needles;
GetStreamVersion = getStreamVersion;
ProtectionName = protectionName;
}
#endregion
#region Array Matching
/// <summary>
/// Determine whether all content matches pass
/// </summary>
/// <param name="stack">Array to search</param>
/// <returns>Tuple of passing status and matching positions</returns>
public (bool, List<int>) MatchesAll(byte[] stack)
{
// If no content matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, new List<int>());
// Initialize the position list
List<int> positions = new List<int>();
// Loop through all content matches and make sure all pass
foreach (var contentMatch in Matchers)
{
(bool match, int position) = contentMatch.Match(stack);
if (!match)
return (false, new List<int>());
else
positions.Add(position);
}
return (true, positions);
}
/// <summary>
/// Determine whether any content matches pass
/// </summary>
/// <param name="stack">Array to search</param>
/// <returns>Tuple of passing status and first matching position</returns>
public (bool, int) MatchesAny(byte[] stack)
{
// If no content matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, -1);
// Loop through all content matches and make sure all pass
foreach (var contentMatch in Matchers)
{
(bool match, int position) = contentMatch.Match(stack);
if (match)
return (true, position);
}
return (false, -1);
}
#endregion
#region Stream Matching
/// <summary>
/// Determine whether all content matches pass
/// </summary>
/// <param name="stack">Stream to search</param>
/// <returns>Tuple of passing status and matching positions</returns>
public (bool, List<int>) MatchesAll(Stream stack)
{
// If no content matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, new List<int>());
// Initialize the position list
List<int> positions = new List<int>();
// Loop through all content matches and make sure all pass
foreach (var contentMatch in Matchers)
{
(bool match, int position) = contentMatch.Match(stack);
if (!match)
return (false, new List<int>());
else
positions.Add(position);
}
return (true, positions);
}
/// <summary>
/// Determine whether any content matches pass
/// </summary>
/// <param name="stack">Stream to search</param>
/// <returns>Tuple of passing status and first matching position</returns>
public (bool, int) MatchesAny(Stream stack)
{
// If no content matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, -1);
// Loop through all content matches and make sure all pass
foreach (var contentMatch in Matchers)
{
(bool match, int position) = contentMatch.Match(stack);
if (match)
return (true, position);
}
return (false, -1);
}
#endregion
}
}

View File

@@ -1,96 +0,0 @@
using System.Collections.Generic;
using System.Linq;
namespace BinaryObjectScanner.Matching
{
public static class Extensions
{
/// <summary>
/// Find all positions of one array in another, if possible, if possible
/// </summary>
public static List<int> FindAllPositions(this byte[] stack, byte?[] needle, int start = 0, int end = -1)
{
// Get the outgoing list
List<int> positions = new List<int>();
// Initialize the loop variables
bool found = true;
int lastPosition = start;
var matcher = new ContentMatch(needle, end: end);
// Loop over and get all positions
while (found)
{
matcher.Start = lastPosition;
(found, lastPosition) = matcher.Match(stack, false);
if (found)
positions.Add(lastPosition);
}
return positions;
}
/// <summary>
/// Find the first position of one array in another, if possible
/// </summary>
public static bool FirstPosition(this byte[] stack, byte[] needle, out int position, int start = 0, int end = -1)
{
byte?[] nullableNeedle = needle != null ? needle.Select(b => (byte?)b).ToArray() : null;
return stack.FirstPosition(nullableNeedle, out position, start, end);
}
/// <summary>
/// Find the first position of one array in another, if possible
/// </summary>
public static bool FirstPosition(this byte[] stack, byte?[] needle, out int position, int start = 0, int end = -1)
{
var matcher = new ContentMatch(needle, start, end);
(bool found, int foundPosition) = matcher.Match(stack, false);
position = foundPosition;
return found;
}
/// <summary>
/// Find the last position of one array in another, if possible
/// </summary>
public static bool LastPosition(this byte[] stack, byte?[] needle, out int position, int start = 0, int end = -1)
{
var matcher = new ContentMatch(needle, start, end);
(bool found, int foundPosition) = matcher.Match(stack, true);
position = foundPosition;
return found;
}
/// <summary>
/// See if a byte array starts with another
/// </summary>
public static bool StartsWith(this byte[] stack, byte[] needle)
{
return stack.FirstPosition(needle, out int _, start: 0, end: 1);
}
/// <summary>
/// See if a byte array starts with another
/// </summary>
public static bool StartsWith(this byte[] stack, byte?[] needle)
{
return stack.FirstPosition(needle, out int _, start: 0, end: 1);
}
/// <summary>
/// See if a byte array ends with another
/// </summary>
public static bool EndsWith(this byte[] stack, byte[] needle)
{
return stack.FirstPosition(needle, out int _, start: stack.Length - needle.Length);
}
/// <summary>
/// See if a byte array ends with another
/// </summary>
public static bool EndsWith(this byte[] stack, byte?[] needle)
{
return stack.FirstPosition(needle, out int _, start: stack.Length - needle.Length);
}
}
}

View File

@@ -1,7 +0,0 @@
namespace BinaryObjectScanner.Matching
{
public interface IMatch<T>
{
T Needle { get; set; }
}
}

View File

@@ -1,20 +0,0 @@
using System.Collections.Generic;
namespace BinaryObjectScanner.Matching
{
/// <summary>
/// Wrapper for a single set of matching criteria
/// </summary>
public abstract class MatchSet<T, U> where T : IMatch<U>
{
/// <summary>
/// Set of all matchers
/// </summary>
public IEnumerable<T> Matchers { get; set; }
/// <summary>
/// Name of the protection to show
/// </summary>
public string ProtectionName { get; set; }
}
}

View File

@@ -1,340 +0,0 @@
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace BinaryObjectScanner.Matching
{
/// <summary>
/// Helper class for matching
/// </summary>
public static class MatchUtil
{
#region Array Content Matching
/// <summary>
/// Get all content matches for a given list of matchers
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Array to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
public static ConcurrentQueue<string> GetAllMatches(
string file,
byte[] stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug = false)
{
return FindAllMatches(file, stack, matchers, includeDebug, false);
}
/// <summary>
/// Get first content match for a given list of matchers
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Array to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <returns>String representing the matched protection, null otherwise</returns>
public static string GetFirstMatch(
string file,
byte[] stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug = false)
{
var contentMatches = FindAllMatches(file, stack, matchers, includeDebug, true);
if (contentMatches == null || !contentMatches.Any())
return null;
return contentMatches.First();
}
/// <summary>
/// Get the required set of content matches on a per Matcher basis
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Array to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <param name="stopAfterFirst">True to stop after the first match, false otherwise</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
private static ConcurrentQueue<string> FindAllMatches(
string file,
byte[] stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug,
bool stopAfterFirst)
{
// If there's no mappings, we can't match
if (matchers == null || !matchers.Any())
return null;
// Initialize the queue of matched protections
var matchedProtections = new ConcurrentQueue<string>();
// Loop through and try everything otherwise
foreach (var matcher in matchers)
{
// Determine if the matcher passes
(bool passes, List<int> positions) = matcher.MatchesAll(stack);
if (!passes)
continue;
// Format the list of all positions found
string positionsString = string.Join(", ", positions);
// If we there is no version method, just return the protection name
if (matcher.GetArrayVersion == null)
{
matchedProtections.Enqueue((matcher.ProtectionName ?? "Unknown Protection") + (includeDebug ? $" (Index {positionsString})" : string.Empty));
}
// Otherwise, invoke the version method
else
{
// A null version returned means the check didn't pass at the version step
string version = matcher.GetArrayVersion(file, stack, positions);
if (version == null)
continue;
matchedProtections.Enqueue($"{matcher.ProtectionName ?? "Unknown Protection"} {version}".Trim() + (includeDebug ? $" (Index {positionsString})" : string.Empty));
}
// If we're stopping after the first protection, bail out here
if (stopAfterFirst)
return matchedProtections;
}
return matchedProtections;
}
#endregion
#region Stream Content Matching
/// <summary>
/// Get all content matches for a given list of matchers
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Stream to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
public static ConcurrentQueue<string> GetAllMatches(
string file,
Stream stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug = false)
{
return FindAllMatches(file, stack, matchers, includeDebug, false);
}
/// <summary>
/// Get first content match for a given list of matchers
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Stream to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <returns>String representing the matched protection, null otherwise</returns>
public static string GetFirstMatch(
string file,
Stream stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug = false)
{
var contentMatches = FindAllMatches(file, stack, matchers, includeDebug, true);
if (contentMatches == null || !contentMatches.Any())
return null;
return contentMatches.First();
}
/// <summary>
/// Get the required set of content matches on a per Matcher basis
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Stream to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <param name="stopAfterFirst">True to stop after the first match, false otherwise</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
private static ConcurrentQueue<string> FindAllMatches(
string file,
Stream stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug,
bool stopAfterFirst)
{
// If there's no mappings, we can't match
if (matchers == null || !matchers.Any())
return null;
// Initialize the queue of matched protections
var matchedProtections = new ConcurrentQueue<string>();
// Loop through and try everything otherwise
foreach (var matcher in matchers)
{
// Determine if the matcher passes
(bool passes, List<int> positions) = matcher.MatchesAll(stack);
if (!passes)
continue;
// Format the list of all positions found
string positionsString = string.Join(", ", positions);
// If we there is no version method, just return the protection name
if (matcher.GetStreamVersion == null)
{
matchedProtections.Enqueue((matcher.ProtectionName ?? "Unknown Protection") + (includeDebug ? $" (Index {positionsString})" : string.Empty));
}
// Otherwise, invoke the version method
else
{
// A null version returned means the check didn't pass at the version step
string version = matcher.GetStreamVersion(file, stack, positions);
if (version == null)
continue;
matchedProtections.Enqueue($"{matcher.ProtectionName ?? "Unknown Protection"} {version}".Trim() + (includeDebug ? $" (Index {positionsString})" : string.Empty));
}
// If we're stopping after the first protection, bail out here
if (stopAfterFirst)
return matchedProtections;
}
return matchedProtections;
}
#endregion
#region Path Matching
/// <summary>
/// Get all path matches for a given list of matchers
/// </summary>
/// <param name="file">File path to check for matches</param>
/// <param name="matchers">Enumerable of PathMatchSets to be run on the file</param>
/// <param name="any">True if any path match is a success, false if all have to match</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
public static ConcurrentQueue<string> GetAllMatches(string file, IEnumerable<PathMatchSet> matchers, bool any = false)
{
return FindAllMatches(new List<string> { file }, matchers, any, false);
}
// <summary>
/// Get all path matches for a given list of matchers
/// </summary>
/// <param name="files">File paths to check for matches</param>
/// <param name="matchers">Enumerable of PathMatchSets to be run on the file</param>
/// <param name="any">True if any path match is a success, false if all have to match</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
public static ConcurrentQueue<string> GetAllMatches(IEnumerable<string> files, IEnumerable<PathMatchSet> matchers, bool any = false)
{
return FindAllMatches(files, matchers, any, false);
}
/// <summary>
/// Get first path match for a given list of matchers
/// </summary>
/// <param name="file">File path to check for matches</param>
/// <param name="matchers">Enumerable of PathMatchSets to be run on the file</param>
/// <param name="any">True if any path match is a success, false if all have to match</param>
/// <returns>String representing the matched protection, null otherwise</returns>
public static string GetFirstMatch(string file, IEnumerable<PathMatchSet> matchers, bool any = false)
{
var contentMatches = FindAllMatches(new List<string> { file }, matchers, any, true);
if (contentMatches == null || !contentMatches.Any())
return null;
return contentMatches.First();
}
/// <summary>
/// Get first path match for a given list of matchers
/// </summary>
/// <param name="files">File paths to check for matches</param>
/// <param name="matchers">Enumerable of PathMatchSets to be run on the file</param>
/// <param name="any">True if any path match is a success, false if all have to match</param>
/// <returns>String representing the matched protection, null otherwise</returns>
public static string GetFirstMatch(IEnumerable<string> files, IEnumerable<PathMatchSet> matchers, bool any = false)
{
var contentMatches = FindAllMatches(files, matchers, any, true);
if (contentMatches == null || !contentMatches.Any())
return null;
return contentMatches.First();
}
/// <summary>
/// Get the required set of path matches on a per Matcher basis
/// </summary>
/// <param name="files">File paths to check for matches</param>
/// <param name="matchers">Enumerable of PathMatchSets to be run on the file</param>
/// <param name="any">True if any path match is a success, false if all have to match</param>
/// <param name="stopAfterFirst">True to stop after the first match, false otherwise</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
private static ConcurrentQueue<string> FindAllMatches(IEnumerable<string> files, IEnumerable<PathMatchSet> matchers, bool any, bool stopAfterFirst)
{
// If there's no mappings, we can't match
if (matchers == null || !matchers.Any())
return new ConcurrentQueue<string>();
// Initialize the list of matched protections
var matchedProtections = new ConcurrentQueue<string>();
// Loop through and try everything otherwise
foreach (var matcher in matchers)
{
// Determine if the matcher passes
bool passes;
string firstMatchedString;
if (any)
{
(bool anyPasses, string matchedString) = matcher.MatchesAny(files);
passes = anyPasses;
firstMatchedString = matchedString;
}
else
{
(bool allPasses, List<string> matchedStrings) = matcher.MatchesAll(files);
passes = allPasses;
firstMatchedString = matchedStrings.FirstOrDefault();
}
// If we don't have a pass, just continue
if (!passes)
continue;
// If we there is no version method, just return the protection name
if (matcher.GetVersion == null)
{
matchedProtections.Enqueue(matcher.ProtectionName ?? "Unknown Protection");
}
// Otherwise, invoke the version method
else
{
// A null version returned means the check didn't pass at the version step
string version = matcher.GetVersion(firstMatchedString, files);
if (version == null)
continue;
matchedProtections.Enqueue($"{matcher.ProtectionName ?? "Unknown Protection"} {version}".Trim());
}
// If we're stopping after the first protection, bail out here
if (stopAfterFirst)
return matchedProtections;
}
return matchedProtections;
}
#endregion
}
}

View File

@@ -1,71 +0,0 @@
using System.Collections.Generic;
using System.Linq;
namespace BinaryObjectScanner.Matching
{
/// <summary>
/// Path matching criteria
/// </summary>
public class PathMatch : IMatch<string>
{
/// <summary>
/// String to match
/// </summary>
public string Needle { get; set; }
/// <summary>
/// Match exact casing instead of invariant
/// </summary>
public bool MatchExact { get; set; }
/// <summary>
/// Match that values end with the needle and not just contains
/// </summary>
public bool UseEndsWith { get; set; }
/// <summary>
/// Constructor
/// </summary>
/// <param name="needle">String representing the search</param>
/// <param name="matchExact">True to match exact casing, false otherwise</param>
/// <param name="useEndsWith">True to match the end only, false for all contents</param>
public PathMatch(string needle, bool matchExact = false, bool useEndsWith = false)
{
Needle = needle;
MatchExact = matchExact;
UseEndsWith = useEndsWith;
}
#region Matching
/// <summary>
/// Get if this match can be found in a stack
/// </summary>
/// <param name="stack">List of strings to search for the given content</param>
/// <returns>Tuple of success and matched item</returns>
public (bool, string) Match(IEnumerable<string> stack)
{
// If either array is null or empty, we can't do anything
if (stack == null || !stack.Any() || Needle == null || Needle.Length == 0)
return (false, null);
// Preprocess the needle, if necessary
string procNeedle = MatchExact ? Needle : Needle.ToLowerInvariant();
foreach (string stackItem in stack)
{
// Preprocess the stack item, if necessary
string procStackItem = MatchExact ? stackItem : stackItem.ToLowerInvariant();
if (UseEndsWith && procStackItem.EndsWith(procNeedle))
return (true, stackItem);
else if (!UseEndsWith && procStackItem.Contains(procNeedle))
return (true, stackItem);
}
return (false, null);
}
#endregion
}
}

View File

@@ -1,108 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace BinaryObjectScanner.Matching
{
/// <summary>
/// A set of path matches that work together
/// </summary>
public class PathMatchSet : MatchSet<PathMatch, string>
{
/// <summary>
/// Function to get a path version for this Matcher
/// </summary>
/// <remarks>
/// A path version method takes the matched path and an enumerable of files
/// and returns a single string. That string is either a version string,
/// in which case it will be appended to the protection name, or `null`,
/// in which case it will cause the protection to be omitted.
/// </remarks>
public Func<string, IEnumerable<string>, string> GetVersion { get; set; }
#region Constructors
public PathMatchSet(string needle, string protectionName)
: this(new List<string> { needle }, null, protectionName) { }
public PathMatchSet(List<string> needles, string protectionName)
: this(needles, null, protectionName) { }
public PathMatchSet(string needle, Func<string, IEnumerable<string>, string> getVersion, string protectionName)
: this(new List<string> { needle }, getVersion, protectionName) { }
public PathMatchSet(List<string> needles, Func<string, IEnumerable<string>, string> getVersion, string protectionName)
: this(needles.Select(n => new PathMatch(n)).ToList(), getVersion, protectionName) { }
public PathMatchSet(PathMatch needle, string protectionName)
: this(new List<PathMatch>() { needle }, null, protectionName) { }
public PathMatchSet(List<PathMatch> needles, string protectionName)
: this(needles, null, protectionName) { }
public PathMatchSet(PathMatch needle, Func<string, IEnumerable<string>, string> getVersion, string protectionName)
: this(new List<PathMatch>() { needle }, getVersion, protectionName) { }
public PathMatchSet(List<PathMatch> needles, Func<string, IEnumerable<string>, string> getVersion, string protectionName)
{
Matchers = needles;
GetVersion = getVersion;
ProtectionName = protectionName;
}
#endregion
#region Matching
/// <summary>
/// Determine whether all path matches pass
/// </summary>
/// <param name="stack">List of strings to try to match</param>
/// <returns>Tuple of passing status and matching values</returns>
public (bool, List<string>) MatchesAll(IEnumerable<string> stack)
{
// If no path matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, new List<string>());
// Initialize the value list
List<string> values = new List<string>();
// Loop through all path matches and make sure all pass
foreach (var pathMatch in Matchers)
{
(bool match, string value) = pathMatch.Match(stack);
if (!match)
return (false, new List<string>());
else
values.Add(value);
}
return (true, values);
}
/// <summary>
/// Determine whether any path matches pass
/// </summary>
/// <param name="stack">List of strings to try to match</param>
/// <returns>Tuple of passing status and first matching value</returns>
public (bool, string) MatchesAny(IEnumerable<string> stack)
{
// If no path matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, null);
// Loop through all path matches and make sure all pass
foreach (var pathMatch in Matchers)
{
(bool match, string value) = pathMatch.Match(stack);
if (match)
return (true, value);
}
return (false, null);
}
#endregion
}
}

View File

@@ -1,13 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <summary>
/// This record type is undocumented but found in real media key blocks
/// </summary>
public sealed class CopyrightRecord : Record
{
/// <summary>
/// Null-terminated ASCII string representing the copyright
/// </summary>
public string Copyright;
}
}

View File

@@ -1,21 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class DriveRevocationListEntry
{
/// <summary>
/// A 2-byte Range value indicates the range of revoked IDs starting
/// from the ID contained in the record. A value of zero in the Range
/// field indicates that only one ID is being revoked, a value of one
/// in the Range field indicates two IDs are being revoked, and so on.
/// </summary>
public ushort Range;
/// <summary>
/// A 6-byte Drive ID value identifying the Licensed Drive being revoked
/// (or the first in a range of Licensed Drives being revoked, in the
/// case of a non-zero Range value).
/// </summary>
public byte[] DriveID;
}
}

View File

@@ -1,26 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <summary>
/// A properly formatted type 3 or type 4 Media Key Block contains exactly
/// one Drive Revocation List Record. It follows the Host Revocation List
/// Record, although it may not immediately follow it.
///
/// The Drive Revocation List Record is identical to the Host Revocation
/// List Record, except it has type 2016, and it contains Drive Revocation
/// List Entries, not Host Revocation List Entries. The Drive Revocation List
/// Entries refer to Drive IDs in the Drive Certificates.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class DriveRevocationListRecord : Record
{
/// <summary>
/// The total number of Drive Revocation List Entry fields that follow.
/// </summary>
public uint TotalNumberOfEntries;
/// <summary>
/// Revocation list entries
/// </summary>
public DriveRevocationSignatureBlock[] SignatureBlocks;
}
}

View File

@@ -1,17 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class DriveRevocationSignatureBlock
{
/// <summary>
/// The number of Drive Revocation List Entry fields in the signature block.
/// </summary>
public uint NumberOfEntries;
/// <summary>
/// A list of 8-byte Host Drive List Entry fields, the length of this
/// list being equal to the number in the signature block.
/// </summary>
public DriveRevocationListEntry[] EntryFields;
}
}

View File

@@ -1,23 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <summary>
/// A properly formatted MKB shall contain an End of Media Key Block Record.
/// When a device encounters this Record it stops processing the MKB, using
/// whatever Km value it has calculated up to that point as the final Km for
/// that MKB (pending possible checks for correctness of the key, as
/// described previously).
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class EndOfMediaKeyBlockRecord : Record
{
/// <summary>
/// AACS LAs signature on the data in the Media Key Block up to,
/// but not including, this record. Devices depending on the Version
/// Number in the Type and Version Record must verify the signature.
/// Other devices may ignore the signature data. If any device
/// determines that the signature does not verify or is omitted, it
/// must refuse to use the Media Key.
/// </summary>
public byte[] SignatureData;
}
}

View File

@@ -1,46 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
public enum MediaKeyBlockType : uint
{
/// <summary>
/// (Type 3). This is a normal Media Key Block suitable for being recorded
/// on a AACS Recordable Media. Both Class I and Class II Licensed Products
/// use it to directly calculate the Media Key.
/// </summary>
Type3 = 0x00031003,
/// <summary>
/// (Type 4). This is a Media Key Block that has been designed to use Key
/// Conversion Data (KCD). Thus, it is suitable only for pre-recorded media
/// from which the KCD is derived. Both Class I and Class II Licensed Products
/// use it to directly calculate the Media Key.
/// </summary>
Type4 = 0x00041003,
/// <summary>
/// (Type 10). This is a Class II Media Key Block (one that has the functionality
/// of a Sequence Key Block). This can only be processed by Class II Licensed
/// Products; Class I Licensed Products are revoked in Type 10 Media Key Blocks
/// and cannot process them. This type does not contain the Host Revocation List
/// Record, the Drive Revocation List Record, and the Media Key Data Record, as
/// described in the following sections. It does contain the records shown in
/// Section 3.2.5.2, which are only processed by Class II Licensed Products.
/// </summary>
Type10 = 0x000A1003,
}
public enum RecordType : byte
{
EndOfMediaKeyBlock = 0x02,
ExplicitSubsetDifference = 0x04,
MediaKeyData = 0x05,
SubsetDifferenceIndex = 0x07,
TypeAndVersion = 0x10,
DriveRevocationList = 0x20,
HostRevocationList = 0x21,
VerifyMediaKey = 0x81,
// Not documented
Copyright = 0x7F,
}
}

View File

@@ -1,11 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class ExplicitSubsetDifferenceRecord : Record
{
/// <summary>
/// In this record, each subset-difference is encoded with 5 bytes.
/// </summary>
public SubsetDifference[] SubsetDifferences;
}
}

View File

@@ -1,21 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class HostRevocationListEntry
{
/// <summary>
/// A 2-byte Range value indicates the range of revoked IDs starting
/// from the ID contained in the record. A value of zero in the Range
/// field indicates that only one ID is being revoked, a value of one
/// in the Range field indicates two IDs are being revoked, and so on.
/// </summary>
public ushort Range;
/// <summary>
/// A 6-byte Host ID value identifying the host being revoked (or the
/// first in a range of hosts being revoked, in the case of a non-zero
/// Range value).
/// </summary>
public byte[] HostID;
}
}

View File

@@ -1,29 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <summary>
/// A properly formatted type 3 or type 4 Media Key Block shall have exactly
/// one Host Revocation List Record as its second record. This record provides
/// a list of hosts that have been revoked by the AACS LA. The AACS specification
/// is applicable to PC-based system where a Licensed Drive and PC Host act
/// together as the Recording Device and/or Playback Device for AACS Content.
/// AACS uses a drive-host authentication protocol for the host to verify the
/// integrity of the data received from the Licensed Drive, and for the Licensed
/// Drive to check the validity of the host application. The Type and Version
/// Record and the Host Revocation List Record are guaranteed to be the first two
/// records of a Media Key Block, to make it easier for Licensed Drives to extract
/// this data from an arbitrary Media Key Block.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class HostRevocationListRecord : Record
{
/// <summary>
/// The total number of Host Revocation List Entry fields that follow.
/// </summary>
public uint TotalNumberOfEntries;
/// <summary>
/// Revocation list entries
/// </summary>
public HostRevocationSignatureBlock[] SignatureBlocks;
}
}

View File

@@ -1,17 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class HostRevocationSignatureBlock
{
/// <summary>
/// The number of Host Revocation List Entry fields in the signature block.
/// </summary>
public uint NumberOfEntries;
/// <summary>
/// A list of 8-byte Host Revocation List Entry fields, the length of this
/// list being equal to the number in the signature block.
/// </summary>
public HostRevocationListEntry[] EntryFields;
}
}

View File

@@ -1,14 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <summary>
/// A Media Key Block is formatted as a sequence of contiguous Records.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class MediaKeyBlock
{
/// <summary>
/// Records
/// </summary>
public Record[] Records { get; set; }
}
}

View File

@@ -1,18 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <summary>
/// This record gives the associated encrypted media key data for the
/// subset-differences identified in the Explicit Subset-Difference Record.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class MediaKeyDataRecord : Record
{
/// <summary>
/// Each subset difference has its associated 16 bytes in this
/// record, in the same order it is encountered in the subset-difference
/// record. This 16 bytes is the ciphertext value C in the media
/// key calculation.
/// </summary>
public byte[][] MediaKeyData;
}
}

View File

@@ -1,28 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <summary>
/// Each Record begins with a one-byte Record Type field, followed by a
/// three-byte Record Length field.
///
/// The following subsections describe the currently defined Record types,
/// and how a device processes each. All multi-byte integers, including
/// the length field, are “Big Endian”; in other words, the most significant
/// byte comes first in the record.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public abstract class Record
{
/// <summary>
/// The Record Type field value indicates the type of the Record.
/// </summary>
public RecordType RecordType;
/// <summary>
/// The Record Length field value indicates the number of bytes in
/// the Record, including the Record Type and the Record Length
/// fields themselves. Record lengths are always multiples of 4 bytes.
/// </summary>
// <remarks>UInt24 not UInt32</remarks>
public uint RecordLength;
}
}

View File

@@ -1,20 +0,0 @@
namespace BinaryObjectScanner.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class SubsetDifference
{
/// <summary>
/// The mask for u is given by the first byte. That byte is
/// treated as a number, the number of low-order 0-bits in
/// the mask. For example, the value 0x01 denotes a mask of
/// 0xFFFFFFFE; value 0x0A denotes a mask of 0xFFFFFC00.
/// </summary>
public byte Mask;
/// <summary>
/// The last 4 bytes are the uv number, most significant
/// byte first.
/// </summary>
public uint Number;
}
}

Some files were not shown because too many files have changed in this diff Show More