Compare commits

..

1550 Commits

Author SHA1 Message Date
Matt Nadareski
092374a143 Bump version to 2.7 2023-03-06 08:57:43 -05:00
TheRogueArchivist
3cfb60430a Split SafeDisc and CDS-300 checks (#236)
* Split SafeDisc/CDS-300 checks.
* Add new DrvMgt.dll hash check.
2023-03-05 14:33:39 -08:00
TheRogueArchivist
370cc68fa4 Move Freelock notes to DRML (#233)
* Move Freelock notes to DRML.
* Add new Freelock checks.
* Tweak some Freelock output.,
* Fix existing DRML links from other protections.
2023-02-09 06:05:13 -08:00
TheRogueArchivist
8fe5046c19 Add new MGI Registration check (#232)
* Add new MGI Registration check

* Add new MGI Registration check

* Address PR comment

* Address PR comment
2023-02-08 13:04:47 -08:00
TheRogueArchivist
37e7604441 Add MGI Registration detection (#231)
* Add MGI Registration detection.
* Update README.
2023-02-08 11:43:15 -08:00
TheRogueArchivist
7651b34855 Add new CD-Key/Serial check (#230)
* Add new CD-Key/Serial check.
2023-02-07 07:00:04 -08:00
TheRogueArchivist
4bf89b1d5f Add new known C-Dilla versions (#229)
* Add two new known C-Dilla versions.

* Make IA samples more consistent.

* Remove TODO.
2023-02-07 06:59:49 -08:00
TheRogueArchivist
0287284909 Add more CD-Cops notes (#228)
* Add more CD-Cops notes and samples.
2023-02-07 06:59:31 -08:00
Matt Nadareski
a8453b3f21 Descriptions for all! 2023-01-18 11:18:53 -08:00
Matt Nadareski
2552564953 Create wrapper creation method 2023-01-18 10:56:19 -08:00
Matt Nadareski
0d4d19559a else if and a note 2023-01-18 08:39:27 -08:00
Matt Nadareski
52f4132ccb Be smarter about EXE checks 2023-01-18 08:36:16 -08:00
Matt Nadareski
cb6440662b Create LE during scan as well 2023-01-17 20:22:25 -08:00
Matt Nadareski
6293895611 Make it so debug doesn't return early 2023-01-17 20:21:25 -08:00
Matt Nadareski
f564fb6e9e Make JSON handling internally a bit easier 2023-01-16 22:15:45 -08:00
Matt Nadareski
3f2adfcf62 Add explicit note about JSON output 2023-01-16 22:12:54 -08:00
Matt Nadareski
2c979f291e Add Options class, allow multiple features 2023-01-16 21:52:32 -08:00
Matt Nadareski
7e7b2ee64a Support PFF version 0 (nw) 2023-01-16 14:34:28 -08:00
Matt Nadareski
87108405a8 Add PFF support (full) 2023-01-15 23:33:09 -08:00
Matt Nadareski
9fb055cbff Move file name into try/catch 2023-01-15 17:45:11 -08:00
Matt Nadareski
e690f0137e Don't try to unpack invalid IS-CAB files 2023-01-15 17:44:25 -08:00
Matt Nadareski
87c08d6fbd Replace EntryHeader with AudioHeader 2023-01-15 16:26:05 -08:00
Matt Nadareski
8c164d776e Start adding separate header information 2023-01-15 12:30:22 -08:00
Matt Nadareski
964271b4e1 Remove now-redundant note 2023-01-15 11:57:41 -08:00
Matt Nadareski
e99ba48f07 Determine PLJv2 Block 3 format, add notes 2023-01-15 02:07:56 -08:00
Matt Nadareski
62b1627b04 More PLJv2 notes/support 2023-01-15 00:19:16 -08:00
Matt Nadareski
3a54997d42 Fix corner case in rapid scans 2023-01-14 23:31:31 -08:00
Matt Nadareski
7d95a43b4b Fix RCDATA issue 2023-01-14 23:26:52 -08:00
Matt Nadareski
23ea8710c0 Read PLJv2 track ID and year 2023-01-14 23:14:50 -08:00
Matt Nadareski
0b62a52991 PLJv2 doesn't seem to have offsets 2023-01-14 23:06:50 -08:00
Matt Nadareski
1143c8a8b7 Add notes, fix v2 skipping 2023-01-14 22:52:22 -08:00
Matt Nadareski
a5b66caae6 "Support" PlayJ v2 by skipping fields 2023-01-14 22:36:57 -08:00
Matt Nadareski
b0b87d05fd Add PLJ builders/wrappers/printing 2023-01-14 22:24:25 -08:00
Matt Nadareski
cb3c666f64 Add PlayJ models 2023-01-14 21:43:59 -08:00
Matt Nadareski
12fdae7944 Fill in more notes before modelling 2023-01-14 21:20:45 -08:00
Matt Nadareski
e76bc70ec6 Fill out more PLJ notes before modelling 2023-01-14 19:47:24 -08:00
Matt Nadareski
e78bb8cb41 Add PLJ header format notes 2023-01-14 14:19:38 -08:00
Matt Nadareski
5153e73f42 Update README 2023-01-14 01:40:57 -08:00
Matt Nadareski
7f36ff8a2b More fixes to IS-CAB 2023-01-14 01:39:15 -08:00
TheRogueArchivist
99a8a39dda Move MediaCloQ notes to DRML (#227)
* Move MediaCloQ notes to DRML.
* Add TODO and fix typo.
2023-01-14 00:44:10 -08:00
Matt Nadareski
adbf983e65 Hook up IS-CAB printing 2023-01-14 00:42:03 -08:00
Matt Nadareski
d7639495ac Fill in some missing IS-CAB parts 2023-01-14 00:41:13 -08:00
Matt Nadareski
fbe09d9082 Add IS-CAB wrapper 2023-01-13 22:24:25 -08:00
Matt Nadareski
70468b72c3 Fix up IS-CAB a little 2023-01-13 21:40:01 -08:00
TheRogueArchivist
90f4af1121 Add version finding to DiscGuard (#226)
* Add version finding to DiscGuard

* Add version finding to DiscGuard.

* Update notes.

* Address PR comments
2023-01-13 20:30:42 -08:00
Matt Nadareski
aa37449bbf Update developer guide 2023-01-13 15:34:10 -08:00
Matt Nadareski
c835e04722 Update coding guide 2023-01-13 15:29:56 -08:00
Matt Nadareski
29b999b8ed Info should act like scan 2023-01-13 15:15:30 -08:00
Matt Nadareski
9ddd6cc317 Write info outputs to file for easier use 2023-01-13 14:20:41 -08:00
Matt Nadareski
3a694f0e31 Pretty print uses StringBuildernow 2023-01-13 14:04:21 -08:00
Matt Nadareski
080cbda588 Rename Print to PrettyPrint 2023-01-13 12:02:42 -08:00
Matt Nadareski
fd066e8aae Simplify printer code, don't duplicate print 2023-01-13 11:14:34 -08:00
Matt Nadareski
0fd0cf689a Add JSON serialization to wrappers (.NET 6) 2023-01-13 10:41:50 -08:00
Matt Nadareski
f85adda24c Add some DVD models 2023-01-12 23:38:09 -08:00
Matt Nadareski
2d1e8e02aa Overhaul BD+ to model/builder/wrapper 2023-01-12 14:45:04 -08:00
Matt Nadareski
371fbee7a4 Replace current AACS checks 2023-01-12 13:57:10 -08:00
Matt Nadareski
a5bb95e7c1 Hook up AACS media block printing 2023-01-12 13:29:02 -08:00
Matt Nadareski
53b5a443fe Add AACS wrapper and printing 2023-01-12 13:28:12 -08:00
Matt Nadareski
a230871f75 Add AACS media block builder 2023-01-12 12:28:31 -08:00
Matt Nadareski
f560ce17e8 Add AACS media key block to file types 2023-01-12 09:44:31 -08:00
Matt Nadareski
b96329bd33 Add AACS media key block models 2023-01-12 09:40:02 -08:00
Matt Nadareski
913f7802de Fix LE/LX debug parsing 2023-01-11 13:55:00 -08:00
Matt Nadareski
a9f61ed51e Hook up LE/LX printing 2023-01-11 13:39:49 -08:00
Matt Nadareski
04c0835228 Add LE/LX printing to wrapper 2023-01-11 13:39:23 -08:00
Matt Nadareski
af7ff05ecf Fill out LE/LX builder 2023-01-11 11:44:13 -08:00
Matt Nadareski
4ccf80189e Start filling out LE/LX builder 2023-01-10 23:41:15 -08:00
Matt Nadareski
b417229ee6 Add print debug, fix NE printing 2023-01-10 23:15:59 -08:00
Matt Nadareski
61457582b3 More summary info fleshing out 2023-01-10 12:23:48 -08:00
Matt Nadareski
ecc1613f49 Start adding some MSI-specific things 2023-01-10 11:59:33 -08:00
Matt Nadareski
5ea89eefe8 MSI was really CFB all along 2023-01-10 10:51:36 -08:00
Matt Nadareski
661808826a Add hex to outputs for debugging 2023-01-10 09:50:49 -08:00
Matt Nadareski
342f78ffd0 Add extension properties for sector sizes 2023-01-09 22:23:12 -08:00
Matt Nadareski
cc6a65d5e4 Slight reorganization 2023-01-09 22:17:58 -08:00
Matt Nadareski
068ee76983 Add generic data reading 2023-01-09 22:15:37 -08:00
Matt Nadareski
b980b33019 Add sector chain helpers 2023-01-09 22:04:29 -08:00
Matt Nadareski
4327ce7848 Fix typo in directory entry output 2023-01-09 21:40:56 -08:00
Matt Nadareski
2c813e7b3d Add CFB to the readme 2023-01-09 21:27:00 -08:00
Matt Nadareski
d69746f7ef Fix printing, hook up to printer 2023-01-09 21:18:45 -08:00
Matt Nadareski
ce8f73d30d Add CFB wrapper and printing 2023-01-09 21:14:21 -08:00
Matt Nadareski
d6602ac8a8 Make UPX a little safer 2023-01-09 16:34:45 -08:00
Matt Nadareski
6478af03e7 Add class IDs we care about 2023-01-09 16:34:22 -08:00
Matt Nadareski
f086e63914 Omit top 32-bits for version 3 2023-01-09 16:33:56 -08:00
Matt Nadareski
6fbb6bd8dc Implement directory section parsing 2023-01-09 15:58:10 -08:00
Matt Nadareski
008629b61f Add all but directory sectors 2023-01-09 14:32:40 -08:00
Matt Nadareski
b7704dbe57 Add skeleton builder for CFB 2023-01-09 13:24:11 -08:00
Matt Nadareski
5aff2d0b1b Add CFB models 2023-01-09 13:11:06 -08:00
Matt Nadareski
771bbeed6a Add DS/3DS to readme 2023-01-09 11:43:07 -08:00
Matt Nadareski
53dd1e9aa5 Add alignment, partition parsing, fix BE 2023-01-09 11:33:54 -08:00
Matt Nadareski
6b7ed456ac Hook up CIA to printer 2023-01-09 11:31:17 -08:00
Matt Nadareski
5e2185dffd Add CIA printing 2023-01-09 10:53:15 -08:00
Matt Nadareski
b5d318013b Add CIA builder 2023-01-09 09:53:57 -08:00
Matt Nadareski
07a926e50c Forgot references 2023-01-08 21:39:48 -08:00
Matt Nadareski
78bbb63c11 Fix formatting issue; clarification 2023-01-08 21:34:52 -08:00
Matt Nadareski
1fd613c2b2 "Library" not "utility"; clarification 2023-01-08 21:31:41 -08:00
Matt Nadareski
792833ebc8 Update readme and description 2023-01-08 21:27:23 -08:00
Matt Nadareski
7392fce770 Add NDS name table/FAT printing 2023-01-08 10:24:27 -08:00
Matt Nadareski
6c621c743d Add NDS FAT parsing 2023-01-08 00:20:57 -08:00
Matt Nadareski
50a7883958 Add NDS name table parsing 2023-01-08 00:16:01 -08:00
Matt Nadareski
3882db6fc6 Add RomFS header parsing/printing 2023-01-07 23:09:26 -08:00
Matt Nadareski
c91691f79b Remove duplicate TODO 2023-01-07 22:57:46 -08:00
Matt Nadareski
17fbf1163d Add ExeFS printing 2023-01-07 22:50:31 -08:00
Matt Nadareski
9a1bbd7e0d Add ExeFS header parsing 2023-01-07 22:44:31 -08:00
Matt Nadareski
bf6f3bad46 Print extended headers 2023-01-07 22:33:55 -08:00
Matt Nadareski
b99b2a53cf Add extended header parsing for N3DS 2023-01-07 22:12:58 -08:00
Matt Nadareski
1fec5c15d4 Fix N3DS parsing and printing 2023-01-07 21:42:13 -08:00
Matt Nadareski
a8b13e60b6 Fix printing if-statements 2023-01-07 20:55:17 -08:00
Matt Nadareski
4c0c44de6b Add DS/3DS to Test printer 2023-01-07 14:50:21 -08:00
Matt Nadareski
af0623beea Add DS/3DS to supported file types 2023-01-07 14:47:36 -08:00
Matt Nadareski
761b418d21 Fix DS/3DS sttuff, add 3DS wrapper/printing 2023-01-07 12:33:15 -08:00
Matt Nadareski
0316edb8cb Split header into 2 files 2023-01-06 23:48:26 -08:00
Matt Nadareski
48cf417d60 Add wrapper and printing for NDS 2023-01-06 23:39:32 -08:00
Matt Nadareski
83af2926aa Add N3DS builder 2023-01-06 16:14:00 -08:00
Matt Nadareski
ddfa820004 Add NDS builder 2023-01-06 15:34:04 -08:00
Matt Nadareski
80986978cb Add N3DS to models 2023-01-06 15:20:10 -08:00
Matt Nadareski
68283554e9 Add NDS to models 2023-01-06 14:37:40 -08:00
Matt Nadareski
0fcedfafbe Bump version to 2.6 2023-01-06 13:04:52 -08:00
Matt Nadareski
d075ce4ebd Relicense to MIT
I am relicensing this code with the permission of all code contributors to this port of the code. All code that this is based on is unaffected by this.
2023-01-06 11:43:50 -08:00
Matt Nadareski
3c6fcb4e4d Update to UnshieldSharp 1.6.9 2023-01-06 11:23:29 -08:00
Matt Nadareski
5d37b7947e Use license expression 2023-01-06 10:57:55 -08:00
Matt Nadareski
537b5c83f2 Add commit hash to artifacts 2023-01-06 10:56:49 -08:00
Matt Nadareski
44d2c90aea Update to WiseUnpacker 1.0.4 2023-01-05 23:19:44 -08:00
Matt Nadareski
af0c984b08 Skip trying to parse null debug data 2023-01-05 22:42:54 -08:00
Matt Nadareski
e9f01c7a10 Make ImpulseReactor check safer for null values 2023-01-05 22:30:56 -08:00
TheRogueArchivist
0c468e3489 Fix typo in SafeDisc detection (#224)
* Fix typo in SafeDisc detection.
2023-01-05 20:34:05 -08:00
TheRogueArchivist
aff43b7625 Add encrypted Link Data Security file detection (#223)
* Add encrypted Link Data Security file (LDSCRYPT) detection.

* Update CD-Cops notes.
2023-01-05 14:55:21 -08:00
Matt Nadareski
dc53ebd378 Try outputting directly to the correct folder 2023-01-05 13:15:06 -08:00
Matt Nadareski
7a7518d92a Buillders not Builder 2023-01-05 13:13:22 -08:00
Matt Nadareski
ee182b8ea7 Remove old Nuget remnants, fix relative path 2023-01-05 13:07:45 -08:00
Matt Nadareski
c1bf48480c Avoid moves, reference directly 2023-01-05 13:02:25 -08:00
Matt Nadareski
b5bb88553c Try quotes again, just differently 2023-01-05 12:57:12 -08:00
Matt Nadareski
9b9fc09bdf Add trailing slash for mv 2023-01-05 12:00:46 -08:00
Matt Nadareski
5709c88232 Disable Nuget, rely on dotnet instead 2023-01-05 12:00:13 -08:00
Matt Nadareski
522c9fba08 Try using different mv command 2023-01-05 11:55:14 -08:00
Matt Nadareski
3a7b15f231 Add Nuget packages to auto-build 2023-01-05 11:43:18 -08:00
TheRogueArchivist
cb2c96ef7d Add basic HyperTech CrackProof detection (#222)
* Add basic HyperTech CrackProof detection

* Add super basic, incomplete HyperTech CrackProof detection.

* Address PR comments
2023-01-05 11:24:40 -08:00
Matt Nadareski
f89f691ee3 Archive scanning is not content scanning 2023-01-05 11:00:29 -08:00
Matt Nadareski
a0f59b774a Enable content scanning flag in Test 2023-01-05 10:54:34 -08:00
Matt Nadareski
65adf2109d Add content scanning flag in Scanner/Test 2023-01-05 10:53:06 -08:00
Matt Nadareski
92da1695bf Add path scanning flag to Test 2023-01-05 10:49:01 -08:00
Matt Nadareski
e46011beff Add path scanning flag in Scanner 2023-01-05 10:46:13 -08:00
Matt Nadareski
e733dea80e CD-Cops path checks are case-sensitive 2023-01-05 10:38:11 -08:00
Matt Nadareski
6a27161322 Add WiseInst for old NE Wise 2023-01-04 23:53:46 -08:00
Matt Nadareski
6bb4193167 Hook up NE Wise extraction thru WiseUnpacker again 2023-01-04 23:31:52 -08:00
Matt Nadareski
52c34250ff Add NE Wise note for later 2023-01-04 23:13:45 -08:00
Matt Nadareski
cd72cccbaa Fix incorrect printer output 2023-01-04 23:00:00 -08:00
Matt Nadareski
ad9689440a Use NE format property too (nw) 2023-01-04 22:53:52 -08:00
Matt Nadareski
dd193352e2 Remove unused directory creation 2023-01-04 22:44:14 -08:00
Matt Nadareski
a376041be4 Separate out Wise PE extraction 2023-01-04 22:43:11 -08:00
Matt Nadareski
7d14eb35ba Support PKZIP-compressed Wise installer data 2023-01-04 22:37:44 -08:00
Matt Nadareski
fdad3b0c87 Print first 16 bytes of unknown resources 2023-01-04 20:29:41 -08:00
Matt Nadareski
8ebaa59b5f Add precursor position check for segment validity 2023-01-04 20:24:31 -08:00
Matt Nadareski
6037be404c Fix bad LaserLok check 2023-01-04 20:22:20 -08:00
Matt Nadareski
1ead65126f WIP bzip2 code 2023-01-04 20:19:15 -08:00
Matt Nadareski
cfa1bc8875 Port ADPCM compression from stormlib 2023-01-04 11:03:03 -08:00
Matt Nadareski
cfbac7a9ab Ensure required field marked as such 2023-01-04 10:27:37 -08:00
Matt Nadareski
6f62777033 Split files for clarity 2023-01-04 10:23:03 -08:00
Matt Nadareski
0a523d74d5 x, not e 2023-01-04 10:13:36 -08:00
Matt Nadareski
907d0eb724 Enable experimental extraction feature 2023-01-04 10:10:42 -08:00
Matt Nadareski
edcf20885c Extraction is possible 2023-01-04 10:05:21 -08:00
Matt Nadareski
bb838c5b1f Fix error for small executables 2023-01-03 23:43:34 -08:00
Matt Nadareski
fa90618200 Remove one more note in README 2023-01-03 23:14:24 -08:00
Matt Nadareski
b421d8f818 Order of operations is important 2023-01-03 23:01:43 -08:00
Matt Nadareski
c1151b7e93 Minor cleanup and bugfixing 2023-01-03 22:32:22 -08:00
Matt Nadareski
b1d6b1be9c Fix errantly deleted line in csproj 2023-01-03 22:24:41 -08:00
Matt Nadareski
cb45405e09 Remove remnants of Dtf and LibMSPackSharp 2023-01-03 22:21:29 -08:00
Matt Nadareski
6ef9f2856a Remove Dtf and LibMSPackSharp 2023-01-03 22:15:53 -08:00
Matt Nadareski
c14300ffbb Port MS-ZIP for MS-CAB 2023-01-03 22:11:57 -08:00
Matt Nadareski
42b4c40d87 Port LZX init and decompression 2023-01-03 19:00:21 -08:00
Matt Nadareski
1c78dac79f Port LZX read lengths 2023-01-03 16:11:30 -08:00
Matt Nadareski
bb6a045dd3 Port LZX macros 2023-01-03 16:08:50 -08:00
Matt Nadareski
b273d8314a Migrate some easy LZX stuff 2023-01-03 14:46:30 -08:00
Matt Nadareski
015b895177 Start trying to get Quantum archives to extract 2023-01-03 13:53:44 -08:00
Matt Nadareski
fce9ce4eb4 Handle unknown data better 2023-01-03 10:59:41 -08:00
Matt Nadareski
e6463adb65 Add Quantum to README 2023-01-03 10:46:09 -08:00
Matt Nadareski
b07fd29753 Minor tweaks from issues found during extraction 2023-01-03 10:42:57 -08:00
Matt Nadareski
2416a035c7 Add Quantum archive models/builder/wrapper 2023-01-03 09:28:16 -08:00
Matt Nadareski
c2c125fd29 Add extract all to BFPK 2023-01-03 09:19:35 -08:00
Matt Nadareski
580bf0494d Fix writing cabinet file data 2023-01-03 00:19:03 -08:00
Matt Nadareski
eefc52d1dd Be safer with uneven lengths 2023-01-03 00:14:42 -08:00
Matt Nadareski
c28de855e2 Take sligthly more academic approach 2023-01-03 00:12:31 -08:00
Matt Nadareski
5e2d7505da Consistency tweaks (nw) 2023-01-02 23:54:40 -08:00
Matt Nadareski
f550c96541 Take some cues from libmspack 2023-01-02 23:35:09 -08:00
Matt Nadareski
d222eec1b1 I want to get off the Quantum ride 2023-01-02 22:03:36 -08:00
Matt Nadareski
be129262da Next level is a set of values, not a single one 2023-01-02 15:04:00 -08:00
Matt Nadareski
87080c906d Port more of the easy stuff over 2023-01-02 15:01:24 -08:00
Matt Nadareski
cdeaf09ed6 Start doing a better job with MSZIP 2023-01-02 11:19:06 -08:00
Matt Nadareski
b17f8dac7a Start fixing Quantum with safeguards, hook up 2023-01-02 10:17:27 -08:00
Matt Nadareski
ec758e5c18 Finalize Quantum port to C# (pass 1) 2023-01-02 00:03:03 -08:00
Matt Nadareski
350f9630df Convert macros to methods, mostly 2023-01-01 23:45:13 -08:00
Matt Nadareski
557c760197 Move Quantum macro notes to Compression 2023-01-01 22:06:09 -08:00
Matt Nadareski
895f40414d Some Quantum things to models / compression 2023-01-01 22:02:54 -08:00
Matt Nadareski
96fbb38b1c Bring in the rest of CAB code commented 2023-01-01 21:34:56 -08:00
Matt Nadareski
650d01d7be Update README with SharpZipLib 2022-12-31 11:44:49 -08:00
Matt Nadareski
3977342a67 Fix empty-not-null data 2022-12-31 11:39:35 -08:00
Matt Nadareski
8a61f01e1b Handle last blocks more efficiently 2022-12-31 11:35:22 -08:00
Matt Nadareski
75a4371f36 Handle MS-CAB compression type masking better 2022-12-31 11:21:29 -08:00
Matt Nadareski
86ee4786a0 Add mostly-working MS-ZIP based on zlib 2022-12-31 10:57:49 -08:00
Matt Nadareski
ef710463ae Port more CAB code (nw, commented out) 2022-12-30 23:07:41 -08:00
Matt Nadareski
e6b153bcbd Add last models from headers 2022-12-30 21:25:46 -08:00
Matt Nadareski
9c9eb8ca7b Align to correct boundary, look for executables 2022-12-30 14:00:10 -08:00
Matt Nadareski
295f438ff1 Forgot the after_build tag 2022-12-30 11:15:06 -08:00
TheRogueArchivist
cc1ad3e690 Update Cenega ProtectDVD (#221)
* Add new export check for Cenega ProtectDVD.

* Update notes and add link to DRML.
2022-12-30 09:54:44 -08:00
Matt Nadareski
3ebb3822dd Revert changes to launch.json 2022-12-30 09:38:11 -08:00
Matt Nadareski
6e22bd4c8d Fix hidden resource parsing 2022-12-30 09:35:35 -08:00
Matt Nadareski
1027956892 Lock section names, scan for hidden resources 2022-12-30 09:09:42 -08:00
Matt Nadareski
a46d52ddbb Shout out DRML in README 2022-12-29 23:34:00 -08:00
Matt Nadareski
fd5e78eb8a Open the path to future .NET versions 2022-12-29 23:01:28 -08:00
Matt Nadareski
8a326cbb91 Fix build zipfile names 2022-12-29 22:15:55 -08:00
Matt Nadareski
09a7893021 Try to get better at artifacts 2022-12-29 22:01:07 -08:00
Matt Nadareski
9c10af58d8 Remove AnyCPU from configuration 2022-12-29 21:27:45 -08:00
Matt Nadareski
81fbe251ba Specify all files in publish directories 2022-12-29 21:22:08 -08:00
Matt Nadareski
5203f0ea57 Attempt to fix AppVeyor build issues 2022-12-29 21:13:42 -08:00
Matt Nadareski
51644c2178 Update to latest LibMSPackSharp 2022-12-29 21:11:12 -08:00
Matt Nadareski
f326a20019 Runtime identifiers, explicit .NET Framework 4.8 2022-12-29 21:06:14 -08:00
Matt Nadareski
daea4ea460 Add CrypKey PE content checks 2022-12-28 23:28:38 -08:00
Matt Nadareski
c9c14bcebf Add note of where to find implementation 2022-12-28 23:07:23 -08:00
Matt Nadareski
9dc21c01f1 Add Microsoft LZ-compressed files support 2022-12-28 22:54:56 -08:00
Matt Nadareski
555dbd592c Re-disable MS-CAB extraction on .NET 6.0 2022-12-28 22:31:46 -08:00
Matt Nadareski
5e027b75b2 Update README with CExe extraction support 2022-12-28 22:21:18 -08:00
Matt Nadareski
01a504dab7 Remove unused using 2022-12-28 22:20:32 -08:00
Matt Nadareski
70eeaaac28 Replace direct LZ ports with cleaned versions 2022-12-28 22:18:23 -08:00
Matt Nadareski
0397d529bb Add LZ decompression to CExe 2022-12-28 17:24:55 -08:00
Matt Nadareski
25bed747f2 First ported implementation of LZ 2022-12-28 17:24:30 -08:00
Matt Nadareski
d17b90e782 Fix constant strings 2022-12-28 17:15:42 -08:00
Matt Nadareski
7868e22a95 Fix constant strings 2022-12-28 17:10:42 -08:00
Matt Nadareski
81f6e2057e Add note for future work 2022-12-28 15:37:09 -08:00
Matt Nadareski
1c3e37ee2b Add and use XZP constants 2022-12-28 15:35:54 -08:00
Matt Nadareski
27756db621 Add and use WAD constants 2022-12-28 15:30:22 -08:00
Matt Nadareski
7f71b04ef7 Add and use VPK constants 2022-12-28 15:27:10 -08:00
Matt Nadareski
c48522e6c0 Add and use VBSP constants 2022-12-28 15:22:16 -08:00
Matt Nadareski
fbf629dd8b Add and use SGA constants 2022-12-28 15:17:34 -08:00
Matt Nadareski
404b2889ff Add SFFS constants 2022-12-28 15:13:38 -08:00
Matt Nadareski
5ccb9d16a8 Add and use PE constants 2022-12-28 15:09:31 -08:00
Matt Nadareski
66b562f24b Add and use PAK constants 2022-12-28 15:03:41 -08:00
Matt Nadareski
ef25b88717 Get CExe prepped for LZ 2022-12-28 15:01:59 -08:00
Matt Nadareski
f22f7273a9 Add and use MZ constants 2022-12-28 14:50:48 -08:00
Matt Nadareski
ea9902c946 Add and use MS-CAB constants 2022-12-28 14:40:40 -08:00
Matt Nadareski
051c38c6df Add and use MoPaQ constants 2022-12-28 14:39:22 -08:00
Matt Nadareski
6b832026b4 Add and use NE constants 2022-12-28 14:26:12 -08:00
Matt Nadareski
81d7151f8f Create new Compression library (nw) 2022-12-28 14:18:50 -08:00
Matt Nadareski
283672e909 Add and use NCF constants 2022-12-28 14:02:09 -08:00
Matt Nadareski
94dfba4b4f Add LZ models 2022-12-28 10:46:33 -08:00
Matt Nadareski
2b66efd11b Add LE/LX constants 2022-12-28 10:31:28 -08:00
Matt Nadareski
14fb3f5758 Add and use IS-CAB constants 2022-12-28 10:21:19 -08:00
Matt Nadareski
9354f0f092 Add and use GCF constants 2022-12-28 10:14:00 -08:00
Matt Nadareski
c3636a0743 Add and use BSP constants 2022-12-28 10:04:10 -08:00
Matt Nadareski
e07b66812c Add and use BFPK constants 2022-12-28 09:57:22 -08:00
Matt Nadareski
c1d231db60 Create Compression models subfolder 2022-12-28 09:47:25 -08:00
TheRogueArchivist
757ab1f228 Fix FreeLock False Positive (#219)
* Fix FreeLock False Positive
2022-12-27 23:30:46 -08:00
TheRogueArchivist
6e42e7fb6b Remove dummy Roxxe file (#217)
* Remove dummy Roxxe file. The notes have been moved to DRML.
2022-12-27 23:13:53 -08:00
TheRogueArchivist
92f88efd4e Fix false positives in Bitpool and LaserLok (#218)
* Fix false positives in Bitpool and LaserLok.
2022-12-27 23:13:47 -08:00
Matt Nadareski
7ba2194d97 Add CExe extraction (partial) 2022-12-27 23:12:52 -08:00
Matt Nadareski
a5f2e2f5c8 Use slightly different zlib port 2022-12-27 22:25:49 -08:00
Matt Nadareski
8658c24ef0 Be more overzealous when locking 2022-12-27 22:25:16 -08:00
Matt Nadareski
0cded076e4 Fix invocation of scannable classes 2022-12-27 22:11:01 -08:00
Matt Nadareski
135c0b6d38 Use slightly different zlib port 2022-12-27 21:47:56 -08:00
Matt Nadareski
769fe42a7a Add TAR skeleton models, for kicks 2022-12-27 21:46:42 -08:00
Matt Nadareski
ab6fcd73e0 Seal as many of the models as possible 2022-12-27 17:12:55 -08:00
Matt Nadareski
3278420d72 Create placeholder for IS-CAB info printing 2022-12-27 16:56:00 -08:00
Matt Nadareski
98395387a7 Add IS-CAB models and builder 2022-12-27 16:54:06 -08:00
TheRogueArchivist
180a097213 Remove dummy Alcatraz file (#215)
* Remove dummy Alcatraz file. The notes have been moved to DRML.
2022-12-27 11:00:17 -08:00
TheRogueArchivist
18ac00080a Move Bitpool comments into DRML (#213)
* Move Bitpool comments into DRML, tidying up BOS,
2022-12-27 11:00:00 -08:00
Matt Nadareski
4da713702a Add header matchers for Wise 2022-12-27 10:53:28 -08:00
Matt Nadareski
68be17de66 Fix SGA builder and wrapper 2022-12-27 10:19:11 -08:00
Matt Nadareski
18cdf9d7ed Use new SGA extraction 2022-12-27 01:07:46 -08:00
Matt Nadareski
c389ea1e49 Add SGA extraction 2022-12-27 00:55:24 -08:00
Matt Nadareski
1f65b0352d Add SGA wrapper 2022-12-26 23:22:03 -08:00
Matt Nadareski
0e63b6638c Do some more work on MSZIP (nw) 2022-12-26 21:39:52 -08:00
Matt Nadareski
fcda1f119b Split MS-CAB wrapper 2022-12-26 15:04:17 -08:00
Matt Nadareski
bb130849ee Fix build with missed changes 2022-12-26 14:52:12 -08:00
Matt Nadareski
7b209eec6c Update to latest LibMSPackSharp 2022-12-26 14:46:17 -08:00
Matt Nadareski
9e7a84d2d6 Remove generic "Valve" 2022-12-26 14:45:49 -08:00
Matt Nadareski
baf43ea307 Remove remnants of HLLibSharp 2022-12-26 14:42:41 -08:00
Matt Nadareski
f524f0da3e Remove HLLibSharp as submodule 2022-12-26 14:37:59 -08:00
Matt Nadareski
49781c47a9 Add executables to support table 2022-12-26 13:01:37 -08:00
Matt Nadareski
aa11ce807a Better container support matrix 2022-12-26 12:58:03 -08:00
Matt Nadareski
702115c55a Make interfaces public 2022-12-26 12:36:09 -08:00
Matt Nadareski
17cb1bf9b0 Add XZP wrapper, extraction, and use it 2022-12-26 12:33:58 -08:00
Matt Nadareski
590c4e0a23 Make extension checks better 2022-12-26 11:43:04 -08:00
Matt Nadareski
fd6196a880 Add WAD wrapper, extraction, and use it 2022-12-26 11:34:17 -08:00
Matt Nadareski
2875f7ff7a Add VBSP wrapper, extraction, and use it 2022-12-26 10:58:16 -08:00
Matt Nadareski
50fe127a8d Add PAK wrapper, extraction, and use it 2022-12-26 10:26:26 -08:00
Matt Nadareski
94ebe5b707 Add NCF wrapper 2022-12-25 23:33:52 -08:00
Matt Nadareski
dc3914e976 Use new GCF extraction code 2022-12-25 22:55:48 -08:00
Matt Nadareski
ef2f037909 Add GCF extraction 2022-12-25 22:53:01 -08:00
Matt Nadareski
374f286585 Add GCF wrapper 2022-12-25 21:27:06 -08:00
Matt Nadareski
34cd933f78 Reset launch.json 2022-12-24 22:31:21 -08:00
Matt Nadareski
f8d533e592 Use new BSP extarction code 2022-12-24 22:30:29 -08:00
Matt Nadareski
21263cf0fd Add BSP extraction 2022-12-24 22:02:30 -08:00
Matt Nadareski
1a62ac2006 Add BSP wrapper 2022-12-24 20:15:58 -08:00
Matt Nadareski
8f3d4d5fb2 Use new VPK extraction code 2022-12-24 15:31:38 -08:00
Matt Nadareski
81902455ff Fix VPK extraction 2022-12-24 15:25:56 -08:00
Matt Nadareski
fbb33d9ef8 Add VPK to info 2022-12-24 14:24:34 -08:00
Matt Nadareski
b025c7a7fa Add VPK wrapper (nw) 2022-12-24 13:49:03 -08:00
Matt Nadareski
d8aec5aa97 Migrate recent HLLib work to BOS
It's better suited for the model of BOS and not an update to HLLibSharp
2022-12-24 12:57:10 -08:00
Matt Nadareski
3a862f343c Add headers and extensions for Valve 2022-12-23 23:27:14 -08:00
Matt Nadareski
69724cfb1c Sync to newest HLLibSharp 2022-12-23 23:13:45 -08:00
Matt Nadareski
f75cdea678 Sync to newest HLLibSharp 2022-12-23 23:04:40 -08:00
Matt Nadareski
32e650eff5 Update README comment 2022-12-23 15:46:42 -08:00
Matt Nadareski
ab022f9049 Sync to newest HLLibSharp 2022-12-23 15:26:48 -08:00
Matt Nadareski
7962e148fa Sync to newest HLLibSharp 2022-12-23 14:29:34 -08:00
Matt Nadareski
efdf3a0691 Sync to newest HLLibSharp 2022-12-23 13:13:57 -08:00
Matt Nadareski
000ab5e856 Sync to newest HLLibSharp 2022-12-23 12:36:48 -08:00
Matt Nadareski
4873133c92 Sync to newest HLLibSharp 2022-12-23 11:10:55 -08:00
Matt Nadareski
360bbef43a Remove redundant note in README 2022-12-22 22:31:45 -08:00
Matt Nadareski
9a21f4987d Sync to newest HLLibSharp 2022-12-22 22:24:13 -08:00
Matt Nadareski
63948767ef Use more granular file opening 2022-12-22 22:03:32 -08:00
Matt Nadareski
e2098f6f71 Disable StormLibSharp for .NET 6.0 2022-12-22 21:58:26 -08:00
Matt Nadareski
609c30da38 Sync to newest HLLibSharp 2022-12-22 21:51:33 -08:00
Matt Nadareski
0896842268 Builder -> Builders 2022-12-22 16:02:10 -08:00
Matt Nadareski
ec9506b9eb Sync to newest HLLibSharp 2022-12-22 15:53:50 -08:00
Matt Nadareski
526526975c Sync to newest HLLibSharp 2022-12-22 15:18:51 -08:00
Matt Nadareski
098734c471 Remove outdated note 2022-12-22 13:06:18 -08:00
Matt Nadareski
47df62534e Add back ActiveMARK entry point checks 2022-12-20 14:40:43 -08:00
Matt Nadareski
7295001892 Read entry point data in safe way 2022-12-20 14:19:48 -08:00
Matt Nadareski
7c820b7fd2 Start re-adding entry point 2022-12-20 13:03:25 -08:00
Matt Nadareski
36429cc1e9 Fix build 2022-12-20 12:21:34 -08:00
Matt Nadareski
a1c22ca9da Make a couple things consistent 2022-12-20 11:52:50 -08:00
Matt Nadareski
0c37932631 Cleanup and notes for 3P-Lock 2022-12-20 11:26:22 -08:00
Matt Nadareski
dbf1f6dcca Integrate changes from TheRogueArchivist 2022-12-19 23:53:52 -08:00
Matt Nadareski
7bb26c0faf Add PE checks to ByteShield 2022-12-19 23:49:34 -08:00
Matt Nadareski
31a0b55556 Add new info to developer guide 2022-12-19 23:28:12 -08:00
Matt Nadareski
64cc4785ca Add PE checks to SoftLock 2022-12-19 21:39:24 -08:00
Matt Nadareski
28391de50c Confirm SoftLock path checks 2022-12-19 20:49:10 -08:00
Matt Nadareski
b2ed69ab78 Add 7-zip SFX detection 2022-12-18 14:18:35 -08:00
Matt Nadareski
2f08940927 Simplify Gefest checks 2022-12-17 22:52:35 -08:00
Matt Nadareski
70928227e4 Fix launch.json 2022-12-17 22:02:12 -08:00
Matt Nadareski
546bd70418 Start fixing MSZIP decoding (nw) 2022-12-17 22:01:00 -08:00
Matt Nadareski
f2521a0110 Write MS-CAB file extraction (nw) 2022-12-16 23:11:06 -08:00
Matt Nadareski
02b83513a1 Hook up MSZIP decompression (nw) 2022-12-16 22:58:07 -08:00
Matt Nadareski
f26b2ff61b Migrate MSZIP to wrapper, where possible 2022-12-16 22:41:36 -08:00
Matt Nadareski
4d26535d07 Add BitStream type and add BitArray extensions 2022-12-16 21:59:46 -08:00
Matt Nadareski
01a365033e MIgrate some Quantum stuff to models 2022-12-16 10:19:32 -08:00
Matt Nadareski
318a89a4bc Move some LZX things to models 2022-12-16 09:48:52 -08:00
Matt Nadareski
b20f22fb92 Subfolder it 2022-12-16 00:19:18 -08:00
Matt Nadareski
fedf76e534 Migrate some MSZIP pieces to Models 2022-12-15 23:51:12 -08:00
Matt Nadareski
e0e16292eb ASN.1 and OID to its own library 2022-12-15 22:07:12 -08:00
Matt Nadareski
b3c0e48bdd Address some MPQ issues 2022-12-15 16:50:24 -08:00
Matt Nadareski
aded5ee03a Stream safety and better streams 2022-12-15 14:20:27 -08:00
Matt Nadareski
16e71c910e Use MemoryStream in builders 2022-12-15 13:37:34 -08:00
Matt Nadareski
bbe234b459 Use MemoryStream in wrappers 2022-12-15 12:41:08 -08:00
Matt Nadareski
715b9eb156 Fix MS-CAB info 2022-12-15 12:20:06 -08:00
Matt Nadareski
4cc441afcf Get rid of code duplication 2022-12-15 00:13:24 -08:00
Matt Nadareski
f79cd759bd Add SFFS models, no encryption 2022-12-14 23:16:37 -08:00
Matt Nadareski
1b232e4405 Fix wrapper printing, add to info 2022-12-14 23:06:09 -08:00
Matt Nadareski
6d43afb258 Use wrapper in BFPK scans 2022-12-14 23:01:06 -08:00
Matt Nadareski
a47c778b0e Add BFPK wrapper 2022-12-14 22:58:18 -08:00
Matt Nadareski
ddb82842bc Add BFPK builder 2022-12-14 22:41:17 -08:00
Matt Nadareski
199914b19f Remove BinaryReader from BFPK 2022-12-14 22:28:35 -08:00
Matt Nadareski
9fadd84597 Add unused BFPK models 2022-12-14 22:25:35 -08:00
Matt Nadareski
95dd670c7c Add format note to SFFS, fix magic 2022-12-14 22:15:43 -08:00
Matt Nadareski
adc9def0c9 Slight MoPaQ builder cleanup 2022-12-14 22:07:11 -08:00
Matt Nadareski
8dcc9d9b0e Add BET/HET parsign to MoPaQ (nw) 2022-12-14 22:06:31 -08:00
Matt Nadareski
b5177b16ea Add hi-block table parsing to MoPaQ (nw) 2022-12-14 21:59:48 -08:00
Matt Nadareski
f9b4693aae Add block table parsing to MoPaQ (nw) 2022-12-14 21:48:22 -08:00
Matt Nadareski
f2a479e35c Update SafeDisc with better finding 2022-12-14 21:31:51 -08:00
Matt Nadareski
1f40c2e052 Fix locking exception 2022-12-14 21:30:53 -08:00
Matt Nadareski
b5c8d05814 Add CodeView debug parsing/finding 2022-12-14 21:07:02 -08:00
Matt Nadareski
f99634bc08 Add generic debug check to SafeDisc 2022-12-14 20:57:26 -08:00
Matt Nadareski
ab88e2f553 Add NB10 debug data type 2022-12-14 20:56:13 -08:00
Matt Nadareski
5465abe1ac Add RSDS debug data type 2022-12-14 20:47:18 -08:00
Matt Nadareski
b0df7a8f3b Add debug data to PE wrapper 2022-12-14 20:46:24 -08:00
Matt Nadareski
0d4fab100d Add PE table data/string caching 2022-12-14 17:24:14 -08:00
Matt Nadareski
8c5e10fd88 First attempt at MoPaQ hash table parsing 2022-12-14 17:03:34 -08:00
Matt Nadareski
e8aef1596b Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2022-12-14 16:48:52 -08:00
Matt Nadareski
386c86f04f Add placeholders for parsing in MoPaQ 2022-12-14 16:47:45 -08:00
TheRogueArchivist
3f5a66f170 Add support for Gefest Protection System (#185)
* Add detection and notes for Gefest Protection System.

* Add Gefest Protection System to README.
2022-12-14 16:37:06 -08:00
TheRogueArchivist
a961d9534c Add SafeDisc "SD0XXX.dll" checks (#184)
* Add detection and notes for SafeDisc "SD0XXX.dll" files.
2022-12-14 16:35:54 -08:00
TheRogueArchivist
d1919c18f5 Add nProtect DRM (#183)
* Add nProtect DRM

* Add detection and notes for nProtect GameGuard and nProtect KeyCrypt.

* Add nProtect DRM to README.

* Fix missing "|" character

* Fix missing "|" character.
2022-12-14 16:35:01 -08:00
Matt Nadareski
afa8b24ba9 Add MoPaQ printing skeleton 2022-12-14 16:33:26 -08:00
Matt Nadareski
b793b74b32 Add MoPaQ builder (nw) 2022-12-14 16:29:07 -08:00
Matt Nadareski
65499d1f46 Add MoPaQ models 2022-12-14 15:43:13 -08:00
Matt Nadareski
5f387cdb74 Migrate WIP MS-CAB to model/builder/wrapper 2022-12-14 15:23:54 -08:00
Matt Nadareski
ed2e88c781 Start filling out LZX 2022-12-14 13:57:54 -08:00
Matt Nadareski
1cb3157110 Add more notes, including Quantum 2022-12-14 13:17:29 -08:00
TheRogueArchivist
a480b53787 CD-Cops is a mess why don't we have more samples (#182)
* Confirm CD-Cops check.

* Add a few notes for CD-Cops.
2022-12-14 12:12:12 -08:00
TheRogueArchivist
405c895352 Add support for CD-Guard (#181)
* why is there so much DRM I am going to cry

* Add support for detecting CD-Guard, as well as notes.

* Add CD-Guard to README.

* Address PR comments

* Ignore case in import/export checks.
2022-12-14 12:11:23 -08:00
Matt Nadareski
53dc251a0c Continue with deflate 2022-12-14 12:10:59 -08:00
Matt Nadareski
d715072cbc Start writing Inflate implementation 2022-12-14 10:55:56 -08:00
Matt Nadareski
aaee56f44e Start adding MSZIP notes 2022-12-14 00:05:49 -08:00
Matt Nadareski
27ceb4ed48 Streamline MS-CAB reading, add to info printing 2022-12-13 23:06:49 -08:00
Matt Nadareski
2d51bd8f37 Implement MS-CAB checksum routine 2022-12-13 22:11:19 -08:00
Matt Nadareski
645a366dc5 Split MS-CAB into subfiles, LibMSPackSharp for .NET 6 2022-12-13 21:51:24 -08:00
Matt Nadareski
756a74eda6 Disable printing by default again 2022-12-13 21:13:20 -08:00
Matt Nadareski
8052ee2afb Some PE resource handling cleanup 2022-12-13 21:05:52 -08:00
Matt Nadareski
6171c0defd Fix SFFS not being scanned 2022-12-13 11:55:12 -08:00
TheRogueArchivist
56c27d0b8f Why is there so much StarForce, geez (#180)
* Begin work on overhauling StarForce detection, and to add notes.

* Attempt to add SFFS file detection.

* Fix minor TAGES issue.
2022-12-13 11:42:55 -08:00
Matt Nadareski
9c173fd3a1 Update extension method for AddD section 2022-12-12 21:40:29 -08:00
Matt Nadareski
fa3ccf9953 Add more SecuROM AddD section notes 2022-12-12 21:25:00 -08:00
Matt Nadareski
229e645c8e Bump version 2022-12-12 13:35:36 -08:00
Matt Nadareski
b4606a8b84 Merge pull request #179 from TheRogueArchivist/TTR-Subchannel-Notes
Add notes about various TTR protections
2022-12-12 00:11:20 -08:00
SilasLaspada
d11782a87b Add notes about various TTR protections
* Add notes about DiscGuard's use of subchannels.

* Add reference to DiscAudit and MusicGuard.
2022-12-12 00:13:22 -07:00
Matt Nadareski
4faaf3d251 Add notes to README 2022-12-11 15:42:39 -08:00
Matt Nadareski
89dea30211 Update everything to support .NET 6.0 correctly 2022-12-11 15:32:09 -08:00
Matt Nadareski
7563634f53 Add coding guide 2022-12-11 14:55:29 -08:00
Matt Nadareski
d74dcc1d26 Proofreading 2022-12-11 01:28:17 -08:00
Matt Nadareski
cd75fd0329 Add developer guide 2022-12-11 01:09:49 -08:00
Matt Nadareski
0390ee3363 Don't print section table on debug now 2022-12-10 22:42:03 -08:00
Matt Nadareski
64f52698c3 Add extension to convert byte array 2022-12-10 22:36:02 -08:00
Matt Nadareski
8bd4a87f0b Add SecuROM AddD overlay data models 2022-12-10 22:26:42 -08:00
Matt Nadareski
4ad7c60443 Add overlay string finding 2022-12-10 20:10:25 -08:00
Matt Nadareski
d1a6c9be00 Add header padding strings 2022-12-10 17:31:41 -08:00
Matt Nadareski
3c3fd1be50 Add more guards to PE builder 2022-12-10 14:21:37 -08:00
Matt Nadareski
92c5745ac8 Remove obsolete PE helper method 2022-12-10 14:11:25 -08:00
Matt Nadareski
792de5cda2 Fix Macrovision and SafeCast checks 2022-12-10 14:11:10 -08:00
Matt Nadareski
bfe2381896 Slight tweaks and addtions to AegiSoft 2022-12-09 22:48:23 -08:00
Matt Nadareski
ded5ecb3ed Fix broken COFF string table reading 2022-12-09 22:37:35 -08:00
Matt Nadareski
7895b40a77 Convert LabelGate to string finding 2022-12-09 22:36:21 -08:00
Matt Nadareski
3f2319093f Partially convert SolidShield to string finding 2022-12-09 21:54:21 -08:00
Matt Nadareski
3806b383c9 Convert SmartE to string finding 2022-12-09 21:40:25 -08:00
Matt Nadareski
84a65e998f Partially convert ActiveMARK to string finding 2022-12-09 21:37:33 -08:00
Matt Nadareski
88049affb5 Add note to Macrovision 2022-12-09 21:36:12 -08:00
Matt Nadareski
5fd59b9eac Convert WTM to string finding 2022-12-09 21:34:00 -08:00
Matt Nadareski
8288cc7f96 Convert Wise Installer to string finding 2022-12-09 21:30:37 -08:00
Matt Nadareski
063b643597 Convert Themida to string finding 2022-12-09 21:28:21 -08:00
Matt Nadareski
ff4214877e Convert Sysiphus to string finding 2022-12-09 21:25:19 -08:00
Matt Nadareski
847fa7d2ad Partially convert SecuROM to string finding 2022-12-09 21:22:55 -08:00
Matt Nadareski
886284b42f Convert Rainbow Sentinel to string finding 2022-12-09 20:56:27 -08:00
Matt Nadareski
46776b461b Partially convert ProtectDISC to string finding 2022-12-09 20:53:25 -08:00
Matt Nadareski
e0150d7bb5 Convert NSIS to string finding 2022-12-09 15:18:17 -08:00
Matt Nadareski
ad16260c53 Convert Microsoft CAB SFX to string finding 2022-12-09 15:13:46 -08:00
Matt Nadareski
3cfe9138a5 Convert MediaMax CD-3 to string finding 2022-12-09 15:10:23 -08:00
Matt Nadareski
9e43babe0c Convert SafeCast to string finding 2022-12-09 15:06:17 -08:00
Matt Nadareski
4efbf54edf Convert C-Dilla to string finding 2022-12-09 15:04:14 -08:00
Matt Nadareski
37121a1fd2 Convert Installer VISE to string finding 2022-12-09 15:01:23 -08:00
Matt Nadareski
8a77a8a009 Simplify PE section helper methods 2022-12-09 14:59:31 -08:00
Matt Nadareski
1aef137cb2 Convert Inno Setup to string finding 2022-12-09 14:58:12 -08:00
Matt Nadareski
86707b28c0 Convert Impulse Reactor to string finding 2022-12-09 14:36:43 -08:00
Matt Nadareski
5e42bae77f Convert Gentee Installer to string finding 2022-12-09 14:22:59 -08:00
Matt Nadareski
9396804379 Convert EA to string finding 2022-12-09 14:21:18 -08:00
Matt Nadareski
5613cf9aae Convert dotFuscator to string finding 2022-12-09 14:17:01 -08:00
Matt Nadareski
ff44c717a2 Convert Code-Lock to string finding 2022-12-09 13:55:28 -08:00
Matt Nadareski
42faeb1402 Convert CDSHiELD SE to string finding 2022-12-09 13:41:09 -08:00
Matt Nadareski
7c243ac6ff Convert CDS to string finding 2022-12-09 13:09:27 -08:00
Matt Nadareski
85c6353cba Convert Armadillo to string finding 2022-12-09 12:53:19 -08:00
Matt Nadareski
c7d049efe2 Convert Alpha-ROM to string finding 2022-12-09 12:49:22 -08:00
Matt Nadareski
97aabdca33 Convert Advanced Installer to string finding 2022-12-09 12:35:58 -08:00
Matt Nadareski
9562a0eaf7 Convert XCP to string finding 2022-12-09 12:33:12 -08:00
Matt Nadareski
04bffd4889 Add string finding to wrapper base 2022-12-09 12:32:44 -08:00
Matt Nadareski
a9c71ced47 Add skelton for section string finding 2022-12-09 11:53:58 -08:00
Matt Nadareski
a03bf60ca5 Tweak LaserLok check to pre-screen 2022-12-09 11:21:27 -08:00
Matt Nadareski
02ee94f732 Reduce WMDS checks 2022-12-09 11:18:07 -08:00
Matt Nadareski
0a5ffd247c Slightly update JoWooD 2022-12-09 11:11:18 -08:00
Matt Nadareski
3f4f6a2d07 Update C-Dilla checks 2022-12-09 10:37:09 -08:00
Matt Nadareski
0d22f78b10 Remove standalone ExecutableTest 2022-12-08 23:40:39 -08:00
Matt Nadareski
ca1e3e8e63 Port ExecutableTest to Test 2022-12-08 23:38:28 -08:00
Matt Nadareski
debb7cde2d Start preparing ExecutableTest 2022-12-08 23:31:48 -08:00
Matt Nadareski
40c35a5d50 Update Nuget packages 2022-12-08 23:01:28 -08:00
Matt Nadareski
971f769031 Update project files 2022-12-08 22:53:07 -08:00
Matt Nadareski
a1a3adfafa Move psxt001z to own library 2022-12-08 22:50:42 -08:00
Matt Nadareski
912b49ecc4 Cleanup ProtectionProgress 2022-12-08 22:37:57 -08:00
Matt Nadareski
dcccd6e313 Remove System.Diagnostics dependency 2022-12-08 22:33:02 -08:00
Matt Nadareski
005529f959 Add stream support to Matching 2022-12-08 22:27:17 -08:00
Matt Nadareski
3d5904c997 Move Matching to own library 2022-12-08 22:07:14 -08:00
Matt Nadareski
dcc8915dd2 FileTypes -> SupportedFileType 2022-12-08 21:46:22 -08:00
Matt Nadareski
4a589603e4 Clarify the IScannable description 2022-12-08 21:43:39 -08:00
Matt Nadareski
4f16b9d9e8 Fix misnamed paramter in IScannable 2022-12-08 21:42:24 -08:00
Matt Nadareski
ce7ecc78cc Remove now-useless ShouldScan, part 2 2022-12-08 21:41:33 -08:00
Matt Nadareski
9c8a677f13 Remove now-useless ShouldScan 2022-12-08 21:37:11 -08:00
Matt Nadareski
39f2dd88aa Make it easier to support new file types 2022-12-08 21:32:52 -08:00
Matt Nadareski
3b4929a368 Invert if in ProtectDisc check for clarity 2022-12-08 17:19:42 -08:00
Matt Nadareski
f290b8462d Fix getting last section for SmartE 2022-12-08 17:18:09 -08:00
Matt Nadareski
5ce4f3be56 Clean up awkard SolidShield check 2022-12-08 17:16:47 -08:00
Matt Nadareski
a5158f04db Add ActiveMARK resource check 2022-12-08 17:16:12 -08:00
Matt Nadareski
2aa9f088a4 Add WTM legal trademarks check 2022-12-08 17:06:36 -08:00
Matt Nadareski
bb37c3f94c Add missing commented out print 2022-12-08 17:06:05 -08:00
Matt Nadareski
92b3f14d7b Add SolidShield import directory table checks 2022-12-08 16:53:43 -08:00
Matt Nadareski
2dcd30e346 Make unknown resource printing better 2022-12-08 16:40:04 -08:00
Matt Nadareski
c4ed59dc46 Replace 2 SecuROM PA checks 2022-12-08 16:38:56 -08:00
Matt Nadareski
81141fb2fa Fix Impulse Reactor 2022-12-08 16:08:58 -08:00
Matt Nadareski
58c6f00a6c Add another embedded resource check 2022-12-08 16:01:53 -08:00
Matt Nadareski
c1f4e42219 Convert one MediaMax CD-3 check 2022-12-08 16:01:30 -08:00
Matt Nadareski
02b16843e5 Add note to LaserLok 2022-12-08 15:46:01 -08:00
Matt Nadareski
86bfcc15f9 Add note to JoWooD 2022-12-08 15:37:11 -08:00
Matt Nadareski
4fab4e71f7 Convert one Impulse Reactor check 2022-12-08 15:24:53 -08:00
Matt Nadareski
b230462860 Update GFWL with new framework 2022-12-08 15:16:43 -08:00
Matt Nadareski
5f49b56c3d Add a couple more named version fields 2022-12-08 14:59:15 -08:00
Matt Nadareski
51482dd225 Rename Code-Lock file 2022-12-08 14:55:07 -08:00
Matt Nadareski
c1cb1a41a9 Add note to CDSHiELD SE 2022-12-08 14:53:59 -08:00
Matt Nadareski
b496c79b34 Remove previously added note 2022-12-08 14:49:39 -08:00
Matt Nadareski
fcad7db5ab Add note to CD-Lock 2022-12-08 14:44:15 -08:00
Matt Nadareski
e1bd26f712 Add undocumented version info keys 2022-12-08 14:40:30 -08:00
Matt Nadareski
0518184786 Make CD-Check simpler 2022-12-08 14:36:43 -08:00
Matt Nadareski
576e234216 Remove last explicit .rsrc section read 2022-12-08 14:22:32 -08:00
Matt Nadareski
3c750dac86 Clean up most of the StarForce rsrc checks 2022-12-08 14:01:12 -08:00
Matt Nadareski
714cee586d Make printing methods for all PE resources 2022-12-08 14:00:29 -08:00
Matt Nadareski
cf9cc30d5e Handle casting exceptions 2022-12-08 13:13:16 -08:00
Matt Nadareski
713b1c83e1 Fix yet more string data reading 2022-12-08 10:38:46 -08:00
Matt Nadareski
ddf289d747 Force reading name table as ASCII 2022-12-08 10:17:26 -08:00
Matt Nadareski
ae1edf0f21 Sync resource building in byte and stream paths 2022-12-08 10:13:22 -08:00
Matt Nadareski
38665ddbf3 Add note to Wise installer 2022-12-08 00:07:11 -08:00
Matt Nadareski
2af0f166eb Update WinRAR SFX with new framework 2022-12-07 23:56:38 -08:00
Matt Nadareski
aeaeff28d3 Better alignment in PE resources 2022-12-07 23:56:08 -08:00
Matt Nadareski
28948a0511 Add back truncation 2022-12-07 23:09:18 -08:00
Matt Nadareski
52d190e339 Rewrite WinZip SFX PE checks 2022-12-07 22:52:49 -08:00
Matt Nadareski
d47707c433 Add some CD-Cops notes 2022-12-06 22:31:46 -08:00
Matt Nadareski
11bc46ae86 Update WZ-SFX NE checks a little 2022-12-06 21:04:52 -08:00
Matt Nadareski
d229b23ea6 Fix CD-Cops table checks 2022-12-06 21:04:08 -08:00
Matt Nadareski
41dc7f7b14 Add notes for NE Inno detection 2022-12-05 20:44:34 -08:00
Matt Nadareski
dbd8b14cd2 Convert arbitray data read for NE WZ-SFX 2022-12-05 19:57:57 -08:00
Matt Nadareski
3af8adb067 Add unversioned CD-Cops NE checks 2022-12-05 17:05:41 -08:00
Matt Nadareski
5baa470d54 Use header padding data for UPX 2022-12-05 16:50:18 -08:00
Matt Nadareski
1f5d5215f7 Clean up SafeDisc checks, add header padding check 2022-12-05 15:58:44 -08:00
Matt Nadareski
24c77ecd07 Add header padding data for searching 2022-12-05 15:56:37 -08:00
Matt Nadareski
2264fc0172 Fix IIF product name check 2022-12-05 15:46:56 -08:00
Matt Nadareski
00028ac59b Add notes to EXE Stealth 2022-12-05 14:11:52 -08:00
Matt Nadareski
ad5735a559 Update printing for import tables 2022-12-05 13:57:07 -08:00
Matt Nadareski
9411f5044a Fix import table parsing 2022-12-05 13:52:15 -08:00
Matt Nadareski
7293d55239 Fix PlayJ description check 2022-12-05 12:57:54 -08:00
Matt Nadareski
31dc347df4 Merge pull request #178 from mnadareski/new-exe-framework
Use new executable framework for protection checks
2022-12-05 11:30:54 -08:00
Matt Nadareski
d72d0d4dc2 Clean up printing methods 2022-12-05 11:01:22 -08:00
Matt Nadareski
7181dc9d5b Fix formatting/tagging 2022-12-05 10:47:17 -08:00
Matt Nadareski
95fa8681fe Make SmartE checks even simpler 2022-12-05 10:21:15 -08:00
Matt Nadareski
3a63755d96 Simplify section printing 2022-12-05 10:06:37 -08:00
Matt Nadareski
5390970054 Fix segment validity checks 2022-12-05 10:04:37 -08:00
Matt Nadareski
2b43f2b261 Add notes to SmartE 2022-12-04 23:21:18 -08:00
Matt Nadareski
4330cd1aac Fix WTM code/CODE section 2022-12-04 23:20:22 -08:00
Matt Nadareski
fc9dd8a34d Add base relocation table to passthrough 2022-12-04 23:17:34 -08:00
Matt Nadareski
c64abc15c9 Move .rsrc StarForce checks to new file 2022-12-04 23:00:30 -08:00
Matt Nadareski
fca12c639c Update CDS PlayJ check 2022-12-04 22:44:59 -08:00
Matt Nadareski
a1522aabd6 Add relocation real address 2022-12-04 22:37:59 -08:00
Matt Nadareski
9be4b339f8 Add PE base relocation table parsing and printing 2022-12-04 22:32:41 -08:00
Matt Nadareski
ce1c74aec3 Update DiscGuard 2022-12-04 21:11:55 -08:00
Matt Nadareski
e824428e0f String tables are always Unicode 2022-12-04 13:38:04 -08:00
Matt Nadareski
2fd4a8a9b1 Fix launch.json 2022-12-04 00:41:56 -08:00
Matt Nadareski
82de7e8b8e Off by one 2022-12-03 23:58:20 -08:00
Matt Nadareski
26831b4732 Both, both is good 2022-12-03 23:48:48 -08:00
Matt Nadareski
a6862925ca Use endOffset instead of EOF 2022-12-03 23:46:19 -08:00
Matt Nadareski
1e2ce169af Fix unaligned end-of-file certificates 2022-12-03 23:41:42 -08:00
Matt Nadareski
dda9b3551a Cast certificate length more safely 2022-12-03 23:32:11 -08:00
Matt Nadareski
b1760d3541 Fix version printing 2022-12-03 23:30:32 -08:00
Matt Nadareski
2c1e087bc6 Include negative numbers 2022-12-03 23:23:55 -08:00
Matt Nadareski
90d5bd52a2 Check more EA resources 2022-12-03 23:17:29 -08:00
Matt Nadareski
26db75853b Fix malformed PE certificates 2022-12-03 23:09:45 -08:00
Matt Nadareski
fe5a674518 Add more safety around resource finding 2022-12-03 22:55:03 -08:00
Matt Nadareski
2fe56cd6af Register encoding provider in scanner 2022-12-03 22:47:57 -08:00
Matt Nadareski
f26e82d2bc Fix some PE resource caching issues 2022-12-03 22:47:32 -08:00
Matt Nadareski
65892f067a Merge remote-tracking branch 'origin' into new-exe-framework 2022-12-03 22:29:12 -08:00
Matt Nadareski
768717d7b3 Remove old executable framework 2022-12-03 22:28:03 -08:00
Matt Nadareski
f78b3daf8b Attempt to use new executable framework 2022-12-03 22:17:48 -08:00
Matt Nadareski
8a6f481118 Fix indexed section data read 2022-12-03 22:13:17 -08:00
Matt Nadareski
f420434fd3 Handle overlay data better 2022-12-03 21:59:21 -08:00
Matt Nadareski
8e73d7970f Fix PE data locking, add offset read helper 2022-12-03 21:37:32 -08:00
Matt Nadareski
9699af93bc Add temporary helper method for NE 2022-12-03 21:37:05 -08:00
Matt Nadareski
44ca0a94b7 Better PE wrapper section handling 2022-12-03 20:56:06 -08:00
Matt Nadareski
b3bf008e31 Fix NE table printing 2022-12-03 14:44:18 -08:00
Matt Nadareski
ce5e2982d2 Fix resource entry handling 2022-12-03 14:22:54 -08:00
Matt Nadareski
7d2edd315c Add stub data to cache, resource/section finding 2022-12-03 13:17:29 -08:00
Matt Nadareski
8ae0452873 Fix reading 0-length string data 2022-12-03 13:08:10 -08:00
Matt Nadareski
e0efc0d9ab Cache PE overlay data 2022-12-02 22:44:55 -08:00
Matt Nadareski
3ce3b7ca2b Cache PE resource, version, manifest 2022-12-02 22:24:22 -08:00
Matt Nadareski
6997608b63 Split printing into methods, add notes 2022-12-02 21:20:52 -08:00
Matt Nadareski
cee7f12974 Add printing regions 2022-12-02 20:09:55 -08:00
Matt Nadareski
5b4a8d5775 Ceeate wrapper base class, PE raw reads 2022-12-02 20:05:20 -08:00
Matt Nadareski
a59bedec5d Add data source to all wrappers, add note 2022-12-02 17:19:59 -08:00
Matt Nadareski
3c5d670924 Add printing to wrappers, remove from test exe 2022-12-02 16:39:49 -08:00
Matt Nadareski
7bab251915 Add PE passthrough properties 2022-12-02 16:16:12 -08:00
Matt Nadareski
57e47eee5d Add LE passthrough properties 2022-12-02 15:58:06 -08:00
Matt Nadareski
4f09c57755 Add NE passthrough properties 2022-12-02 15:44:33 -08:00
Matt Nadareski
7c1edab6ca Add DOS stub passthrough for LE/NE/PE 2022-12-02 15:35:10 -08:00
Matt Nadareski
f24004c949 Add proof-of-concept MS-DOS wrapper 2022-12-02 15:29:10 -08:00
Matt Nadareski
c4bf3931e2 Add skeletons for all wrappers 2022-12-02 15:20:44 -08:00
Matt Nadareski
fe13562f3e Add notes about ByteShield (TheRogueArchivist) 2022-12-02 15:06:31 -08:00
Matt Nadareski
64334d72ea Improve SolidShield detection
- Add new SolidShield executable and file checks.
- Fix false positives in file name checks due to not using a directory separator in the check.
- Add a few notes and reorganize slightly.
2022-12-02 15:02:45 -08:00
Matt Nadareski
a915980187 Improve SafeDisc detection
- Add support for detecting 4.60.000's drvmgt.
- Add version checks for Diag.exe to remove one case of "SafeCast/SafeDisc" ambiguity.
2022-12-02 14:59:26 -08:00
Matt Nadareski
af882fa588 Properly differentiate between Code-Lock and CopyLok (TheRogueArchivist) 2022-12-02 14:56:08 -08:00
Matt Nadareski
7fcaa16835 Add first Themida check (TheRogueArchivist) 2022-12-02 14:52:28 -08:00
Matt Nadareski
4d640f3cf2 Add Wrapper skeleton project 2022-12-02 14:44:06 -08:00
Matt Nadareski
25d495b1d0 ASN.1 OID parsing (nw) 2022-12-02 14:18:15 -08:00
Matt Nadareski
7fd936c4a8 Handle empty resource name strings 2022-12-02 14:15:50 -08:00
Matt Nadareski
fe753fc4fd Try to fix null resource types issue 2022-12-02 14:15:44 -08:00
Matt Nadareski
0a4763fcc1 Fix PE dialog item printing 2022-12-02 14:15:37 -08:00
Matt Nadareski
e281faf664 Add first attempt at PE certificate parsing 2022-11-12 21:56:24 -08:00
Matt Nadareski
dcb291c1c6 Disable printing raw PE cert data 2022-11-11 16:23:25 -08:00
Matt Nadareski
ecd1c93bb9 Add PE message resource printing, fix parsing 2022-11-11 16:20:17 -08:00
Matt Nadareski
eeb555a6ce Add PE message resource data to parser 2022-11-11 15:52:05 -08:00
Matt Nadareski
27d53abd10 Add PE message resource models 2022-11-11 15:41:37 -08:00
Matt Nadareski
91eef55173 Fix tiny formatting issue 2022-11-11 15:33:41 -08:00
Matt Nadareski
f9e1518da6 Add PE menu resource reading and writing 2022-11-11 15:31:00 -08:00
Matt Nadareski
5b974260cc Add PE extended dialog templates 2022-11-11 14:22:53 -08:00
Matt Nadareski
554374b710 Add PE dialog template extended 2022-11-11 14:12:03 -08:00
Matt Nadareski
475669ac1b Add PE standard dialog parsing and writing 2022-11-11 13:56:23 -08:00
Matt Nadareski
623d1e6a40 Invert PE "if" logic where possible 2022-11-11 10:08:15 -08:00
Matt Nadareski
08fa4a997f Fix PE resource data entry printing 2022-11-11 09:58:50 -08:00
Matt Nadareski
4e21cf8494 Make PE RVA checks simpler 2022-11-11 09:58:19 -08:00
Matt Nadareski
2ebbda6852 Don't trust PE sections 2022-11-10 23:38:59 -08:00
Matt Nadareski
010a6d6e42 Safeguard all PE virtual address uses 2022-11-10 23:06:21 -08:00
Matt Nadareski
3b1481879a Fix PE debug table parsing 2022-11-10 22:39:10 -08:00
Matt Nadareski
3ddcc3884b Fix PE printing typo 2022-11-10 22:23:36 -08:00
Matt Nadareski
260ab1ec89 Safeguard PE invalid virtual addresses 2022-11-10 22:19:58 -08:00
Matt Nadareski
69803a999f Fix PE virtual address for section-aligned RVAs 2022-11-10 22:09:58 -08:00
Matt Nadareski
d4a75ed871 Reorganize PE notes for delay-load 2022-11-10 21:41:42 -08:00
Matt Nadareski
7394f14218 Add missing PE notes for sections 2022-11-10 21:38:52 -08:00
Matt Nadareski
23cd7b9ebd Add note for PE .drectve section 2022-11-10 21:32:01 -08:00
Matt Nadareski
477cfee78e Add PE debug section printing 2022-11-10 21:29:17 -08:00
Matt Nadareski
750cecfdaf Add PE partial debug table parsing 2022-11-10 21:24:28 -08:00
TheRogueArchivist
32a28fba32 Add more checks for Rainbow Sentinel (#171)
* Add more checks for Rainbow Sentinel.

* Make comments more consistent.

* Add more notes on versions/
2022-11-10 16:23:10 -08:00
Matt Nadareski
fe926cbf9a Rewrite PE accelerator table extension 2022-11-10 13:09:23 -08:00
Matt Nadareski
d18e65ca6c Add PE debug directory skeleton, notes 2022-11-10 12:57:41 -08:00
Matt Nadareski
ec67ca605c Fix PE virtual directory size issues 2022-11-10 12:57:19 -08:00
Matt Nadareski
9cb3c963a1 Add PE .sxdata section notes 2022-11-10 12:42:34 -08:00
Matt Nadareski
8a4caf82bb Add PE .coremeta section note 2022-11-10 12:40:18 -08:00
Matt Nadareski
7a5941cfa9 Add PE import table printing 2022-11-10 12:16:48 -08:00
Matt Nadareski
690c49ae1f Fix PE import table parsing 2022-11-10 11:58:46 -08:00
Matt Nadareski
c77c095893 Add initial PE import table parsing 2022-11-10 11:31:06 -08:00
Matt Nadareski
98ddc65fa2 Add PE import table to model 2022-11-10 10:10:12 -08:00
Matt Nadareski
41a7c71b7d Fix PE bitmasks 2022-11-10 00:06:29 -08:00
Matt Nadareski
cb1d3d1db4 Add PE export table to printing 2022-11-09 23:27:06 -08:00
Matt Nadareski
5ba2a31d7d Add PE export table to builder 2022-11-09 23:06:52 -08:00
Matt Nadareski
0768a93bcb Fix Stream ReadString extension 2022-11-09 23:04:07 -08:00
Matt Nadareski
e690c6d0ff Add PE .edata components (not hooked up)
This also does a pretty major cleanup of TODOs
2022-11-09 22:23:40 -08:00
Matt Nadareski
0c6bf406c1 Fix PE delay-load directory 2022-11-09 22:02:38 -08:00
Matt Nadareski
95b5f12226 Add PE grouped sections note 2022-11-09 21:55:15 -08:00
Matt Nadareski
5b4b622834 Add PE delay-load directory to printing 2022-11-09 21:50:36 -08:00
Matt Nadareski
b908b77a34 Add PE delay-load directory table to builder 2022-11-09 21:47:39 -08:00
Matt Nadareski
dbba310385 Add console print to PE string table parsing 2022-11-09 21:32:36 -08:00
Matt Nadareski
0a0ca9ba93 Be slightly safer on PE string table parsing 2022-11-09 21:31:40 -08:00
Matt Nadareski
8aa574a7c4 Add PE COFF string table printing 2022-11-09 21:28:00 -08:00
Matt Nadareski
37ac8c038f Add PE COFF string table to builder 2022-11-09 21:22:29 -08:00
Matt Nadareski
9b6456a80f Register encoding provider for ExecutableTest 2022-11-09 21:09:37 -08:00
Matt Nadareski
f6ffd314b1 Add PE attribute certificate table printing 2022-11-09 21:08:33 -08:00
Matt Nadareski
b569c6a6dd Add PE certificate attribute table to builder 2022-11-09 21:02:02 -08:00
Matt Nadareski
c84f416973 Add better TODO 2022-11-09 20:44:14 -08:00
Matt Nadareski
6ebc476d2b Remove partially completed TODO 2022-11-09 20:43:47 -08:00
Matt Nadareski
98c340d94d Add unused PE font group parser 2022-11-09 20:39:20 -08:00
Matt Nadareski
78d80918aa Remove incorrect console statement 2022-11-09 20:38:37 -08:00
Matt Nadareski
e8d7d6b4e7 Print nonstandard PE manifest items 2022-11-09 20:07:03 -08:00
Matt Nadareski
53341b0dc0 Add remaining unused PE manifest types 2022-11-09 20:04:06 -08:00
Matt Nadareski
f64c7d81ad Add full PE assembly manifest printing 2022-11-09 19:59:39 -08:00
Matt Nadareski
197de59089 Add PE assembly manifest deserialization 2022-11-09 19:09:30 -08:00
Matt Nadareski
13eb37cc46 Add full PE file info printing 2022-11-09 16:17:40 -08:00
Matt Nadareski
c21c0ff411 Add PE version info parsing 2022-11-09 15:53:40 -08:00
Matt Nadareski
72f6af7019 Add PE version resource models 2022-11-09 15:17:53 -08:00
Matt Nadareski
6b14321505 Clean up PE string table parsing 2022-11-09 14:27:32 -08:00
Matt Nadareski
4fcb719613 Start adding PE cursor and icon resources 2022-11-09 14:19:23 -08:00
Matt Nadareski
50915d9100 Clean up PE accelerator table 2022-11-09 14:19:14 -08:00
Matt Nadareski
834792bc2d Print NE string table resources 2022-11-09 14:18:59 -08:00
Matt Nadareski
04b225711f Better add TODOs to PE resource printing 2022-11-09 13:29:30 -08:00
Matt Nadareski
eee4a75353 Remove PE resource header writing
This was incorrectly assuming all resources had this header. This is not correct, only a few do. Another debug statement to print out as Unicode characters helped solve this.
2022-11-09 13:22:07 -08:00
Matt Nadareski
15d0df1a12 Add PE resource tree printing (incomplete) 2022-11-09 13:17:14 -08:00
Matt Nadareski
5c3e8c35c4 Add PE accelerator table 2022-11-09 12:05:30 -08:00
Matt Nadareski
ac514fce30 Add PE resource header 2022-11-09 11:58:35 -08:00
Matt Nadareski
f7343ea305 Update PE resource type enum 2022-11-09 11:35:31 -08:00
Matt Nadareski
1435421c3c Add PE accelerator table resource 2022-11-09 11:26:21 -08:00
Matt Nadareski
735c0fe367 Add PE resource table parsing (incomplete) 2022-11-09 11:11:30 -08:00
Matt Nadareski
af99cfa6f9 Add PE virtual address extension 2022-11-08 22:05:30 -08:00
Matt Nadareski
525ff009b6 Fix COFF symbol table entries 2022-11-08 21:43:20 -08:00
Matt Nadareski
ee46167320 Add PE COFF symbol table parsing, printing 2022-11-08 21:36:46 -08:00
Matt Nadareski
6fc03403b4 Add PE section table parsing, writing 2022-11-08 16:31:30 -08:00
Matt Nadareski
760c481d39 Fix PE section header 2022-11-08 16:31:15 -08:00
Matt Nadareski
2c4906534b Enable PE headers writing 2022-11-08 15:11:18 -08:00
Matt Nadareski
2ed79f3f9c Safer PE optional header builder; start printing PE 2022-11-08 15:02:31 -08:00
Matt Nadareski
7e9be878c4 Add PE optional header parsing to builder 2022-11-08 14:27:41 -08:00
Matt Nadareski
0a5ae3b090 Add PE COFF file header parsing to builder 2022-11-08 14:12:05 -08:00
Matt Nadareski
70b8c1f9b7 NE nonresident table absolute, not relative 2022-11-08 14:07:02 -08:00
Matt Nadareski
b067b671db Add PE signature parsing to builder 2022-11-08 14:04:02 -08:00
Matt Nadareski
268ccac7e1 Add NE nonresident name table printing 2022-11-08 13:41:51 -08:00
Matt Nadareski
5e487bc4ba Add NE entry table printing 2022-11-08 13:41:37 -08:00
Matt Nadareski
ffeda7d60b Print NE imported-name table 2022-11-08 13:35:21 -08:00
Matt Nadareski
2779d1a489 Add NE module reference table printing 2022-11-08 13:30:12 -08:00
Matt Nadareski
1752815654 Print NE resident name table 2022-11-08 13:27:12 -08:00
Matt Nadareski
429ca400e5 Fix all NE table bounded offsets 2022-11-08 13:25:31 -08:00
Matt Nadareski
984853bd66 Add NE resource name/type string output 2022-11-08 13:16:24 -08:00
Matt Nadareski
ff2549d4b7 Fix NE resource table parsing 2022-11-08 13:16:00 -08:00
Matt Nadareski
d33bb6b4bb Add first part of NE printing 2022-11-08 13:07:48 -08:00
Matt Nadareski
a3c0eca063 Address some NE parsing issues 2022-11-08 11:52:24 -08:00
Matt Nadareski
10de4ac78e Create executable info test program 2022-11-08 11:35:39 -08:00
Matt Nadareski
895394ebb9 Fix MS-DOS header parsing 2022-11-08 11:30:23 -08:00
Matt Nadareski
349f8d936a Add NE nonresident table to builder 2022-11-08 10:36:16 -08:00
Matt Nadareski
bbc65391a1 Add NE entry table to builder 2022-11-08 10:09:23 -08:00
Matt Nadareski
751c248657 Update NE entry table bundle 2022-11-08 09:59:15 -08:00
Matt Nadareski
9052cd3cdd Add NE imported-name table to builder 2022-11-07 14:13:23 -08:00
Matt Nadareski
2736527fc1 NE imported name table needs indexes 2022-11-07 13:22:59 -08:00
Matt Nadareski
0c7001acf6 Add NE module-reference table parsing
This also fixes a critical flaw in the resident-name table parsing since there is no defined size, just the point where the next table begins.
2022-11-07 10:42:48 -08:00
Matt Nadareski
9e9be5bf09 Add NE resident-name table parsing to builder 2022-11-07 10:13:10 -08:00
Matt Nadareski
008e1ad27b Finalize NE resource table building 2022-11-07 09:38:34 -08:00
Matt Nadareski
3e5ae14a54 Implment first half of NE resource table builder 2022-11-07 09:18:20 -08:00
Matt Nadareski
0c28833b14 Fix offsets for NE tables in builder 2022-11-06 23:58:53 -08:00
Matt Nadareski
75ef95c6bf Fix missing LE/LX/PE stub setting in builders 2022-11-06 23:52:52 -08:00
Matt Nadareski
b906f3c654 Add NE resource table skeleton to builder 2022-11-06 23:49:46 -08:00
Matt Nadareski
4b274a454b Add extensions for NE resource entries 2022-11-06 23:47:54 -08:00
Matt Nadareski
6554005742 Return incomplete NE from builder 2022-11-06 21:48:53 -08:00
Matt Nadareski
9f04022afc Add regions for easier code navigation 2022-11-06 21:48:19 -08:00
Matt Nadareski
fbab512975 Add NE segment table parsing to builder 2022-11-06 21:45:37 -08:00
Matt Nadareski
c9b3a67c8b Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2022-11-06 21:39:02 -08:00
Matt Nadareski
3169cd6591 Let private methods be simpler 2022-11-06 21:38:42 -08:00
TheRogueArchivist
b116e487d3 Overhaul Freelock (#170)
* Overhaul Freelock

* Overhaul Freelock, including notes and new checks, along with confirming the existing checks.

* Add text-based checks for Freelock.

* Update README.

* Fix whitespace and re-add return

* Fix whitespace and re-add return
2022-11-06 21:30:59 -08:00
Matt Nadareski
aa57044bb8 Add skeleton for NE segment table parsing 2022-11-06 21:30:19 -08:00
TheRogueArchivist
fdd578dad9 Confirm existing WinLock checks (#169)
* Confirm existing WinLock checks

* Confirm existing WinLock checks.

* Add WinLock notes.

* Update README.

* Rename Winlock.cs to WinLock.cs
2022-11-06 21:22:58 -08:00
Matt Nadareski
2801520546 Add NE header parsing tp builder 2022-11-06 21:19:48 -08:00
TheRogueArchivist
caaf983b3d Overhaul TZCopyProtection (#168)
* Add notes and research relating to TZCopyProtection.

* Fix name (was previously "TZCopyProtector").

* Add new file check for "ZakMcCrack.Ghost".

* Update README.
2022-11-06 21:05:58 -08:00
TheRogueArchivist
aaba13530c Confirm existing CD-Protector checks (#167)
* Confirm existing CD-Protector checks, and add one new one.

* Add CD-Protector notes.

* Update README.
2022-11-06 21:04:03 -08:00
TheRogueArchivist
e05ec3bcee Initial addition of Rainbow Sentinel (#166)
* Initial addition of Rainbow Sentinel

* Basic detection based off of one sample, no specific research/notes.

* Update README.

* Add additional sample sources for Rainbow Sentinel

* Add additional sample sources for Rainbow Sentinel, with no new functionality.

* Add Rainbow Sentinel text checks

* Add Rainbow Sentinel text checks.
2022-11-06 21:03:23 -08:00
Matt Nadareski
703a132a61 Add missing MS-DOS stub to builders 2022-11-06 00:01:24 -07:00
Matt Nadareski
e55226e685 Add header skeletons for NE/LE/LX/PE builders 2022-11-05 23:59:38 -07:00
Matt Nadareski
9a4e6de5f9 Add MS-DOS stub to NE/LE/LX/PE builders 2022-11-05 23:52:16 -07:00
Matt Nadareski
a4e55a328c Add boilerplate for NE, LE, PE builders 2022-11-05 23:47:50 -07:00
Matt Nadareski
2705685f07 Add placeholder LE interface (fixes #165) 2022-11-05 23:42:55 -07:00
Matt Nadareski
b7fb17a79f Add skeletons for other executable types 2022-11-05 23:36:15 -07:00
Matt Nadareski
ffeb73ab7c Add proof-of-concept MS-DOS builder 2022-11-05 23:29:04 -07:00
Matt Nadareski
427dec56e4 Add COFF archive note 2022-11-05 22:46:50 -07:00
Matt Nadareski
94ce87d953 Add PE resource classes 2022-11-05 22:45:18 -07:00
Matt Nadareski
0dc4f0f11a Add PE load configuration directory; fix naming 2022-11-05 22:34:33 -07:00
Matt Nadareski
a1d7e65ffb Add PE TLS directory 2022-11-05 22:11:41 -07:00
Matt Nadareski
61702d9c2a Add PE base relocation blocks 2022-11-05 22:03:17 -07:00
Matt Nadareski
5b08bef53f Add "missing" pieces list, so far 2022-11-05 21:54:36 -07:00
Matt Nadareski
53a6588054 Add PE hint name table entries 2022-11-05 21:49:34 -07:00
Matt Nadareski
9855c0c13e Add PE import directory table entries 2022-11-05 21:41:33 -07:00
Matt Nadareski
c5d005bdeb Add PE export address table entries 2022-11-05 21:37:54 -07:00
Matt Nadareski
1eb844c75b Add PE export directory table 2022-11-05 21:34:30 -07:00
Matt Nadareski
7e177f3cbf Add PE debug directory 2022-11-05 21:29:36 -07:00
Matt Nadareski
eb91cfbda1 Add PE delay load directory 2022-11-05 21:12:41 -07:00
Matt Nadareski
54082c1fce Add PE attribute certificate table 2022-11-05 21:02:30 -07:00
Matt Nadareski
b5caf6dacf Add PE COFF string table 2022-11-05 15:40:48 -07:00
Matt Nadareski
f4d1ce5388 Add PE CLR token definition symbol 2022-11-05 15:37:25 -07:00
Matt Nadareski
7d7ec69dc1 Add PE auxiliary symbol record formats 2022-11-05 15:34:14 -07:00
Matt Nadareski
7208288c00 Add PE section numbers 2022-11-05 15:16:38 -07:00
Matt Nadareski
aff3745859 Add PE COFF symbol table 2022-11-05 00:17:26 -07:00
Matt Nadareski
e103ddd216 Add PE COFF line numbers to section headers 2022-11-05 00:08:00 -07:00
Matt Nadareski
41a4965775 Add PE COFF relocations to section headers 2022-11-05 00:04:17 -07:00
Matt Nadareski
49a06f513b Add PE section table 2022-11-04 23:56:56 -07:00
Matt Nadareski
1308f3684b Add PE data directories 2022-11-04 23:50:54 -07:00
Matt Nadareski
c51eccac38 Add PE optional header 2022-11-04 23:41:31 -07:00
Matt Nadareski
09157767bf Add PE COFF file header 2022-11-04 23:25:02 -07:00
Matt Nadareski
32cc2c708a Add PE enums 2022-11-04 23:19:28 -07:00
Matt Nadareski
7f2de233fc Add PE skeleton, change MZ stubs 2022-11-04 21:05:03 -07:00
Matt Nadareski
7cb150606c Add LE/LX fix-up record table 2022-11-04 21:00:02 -07:00
Matt Nadareski
87cac010eb Fill out most of FixupRecordTableEntry 2022-11-04 17:23:21 -07:00
Matt Nadareski
03926754e7 Add LE/LX FRT source offset/count 2022-11-04 16:59:29 -07:00
Matt Nadareski
65efda1a7a Add LE/LX entry table 2022-11-04 16:51:04 -07:00
Matt Nadareski
5941d4ca16 Add skeleton for FixupRecordTableEntry 2022-11-04 16:27:01 -07:00
Matt Nadareski
e77101af89 Add LE/LX fixup record enums 2022-11-04 16:19:42 -07:00
Matt Nadareski
e766be6af9 Add LE/LX Fix-up page table 2022-11-04 16:09:20 -07:00
Matt Nadareski
95d1658324 Add LE/LX debug information 2022-11-04 16:04:01 -07:00
Matt Nadareski
9b24550738 Add LE/LX verify record directive table 2022-11-04 15:56:08 -07:00
Matt Nadareski
7947568019 Add LE/LX entry table bundle type enum 2022-11-04 15:46:24 -07:00
Matt Nadareski
399ee98923 Add LE/LX import procedure name table 2022-11-04 15:36:38 -07:00
Matt Nadareski
7b3b4a2ec5 Add LE/LX imported module name table 2022-11-04 15:31:59 -07:00
Matt Nadareski
09177da620 Add LE/LX per-page checksum table 2022-11-04 15:29:04 -07:00
Matt Nadareski
8392cfb2fa Add LE/LX module format directives table 2022-11-04 15:24:15 -07:00
Matt Nadareski
01face7315 Add LE/LX resident name tables 2022-11-04 15:00:11 -07:00
Matt Nadareski
5b6f4d65bf Add LE/LX resource table 2022-11-04 14:51:57 -07:00
Matt Nadareski
cd6f8f3db3 Add LE/LX object page table 2022-11-04 13:48:30 -07:00
Matt Nadareski
a9b07ddf1d Add more thorough LE/LX notes 2022-11-04 13:42:58 -07:00
Matt Nadareski
f3710c575b Add LE object table 2022-11-04 12:59:50 -07:00
Matt Nadareski
1f5ab45a1e Add LE information block 2022-11-04 12:55:43 -07:00
Matt Nadareski
58d453db11 Update csproj 2022-11-04 10:35:26 -07:00
Matt Nadareski
bd426b763c Add skeleton for LE work 2022-11-04 10:34:49 -07:00
Matt Nadareski
2e42efa71f Add NE per segment data 2022-11-04 10:25:48 -07:00
Matt Nadareski
58181bd723 Add NE entry table 2022-11-04 10:09:09 -07:00
Matt Nadareski
dcef3115b8 Add NE resource table 2022-11-04 09:56:06 -07:00
Matt Nadareski
4bdc5dc90f Add simple NE tables 2022-11-04 09:40:29 -07:00
TheRogueArchivist
e33d6b3a0a Add support for C-Dilla protections (#164)
* Fuck C-Dilla

* Add initial detection of C-Dilla LMS/CD-Secure.

* Add a few code comments for Macrovision.

* Update README.

* Reorganize C-Dilla NE Checks

* Reorganize C-Dilla NE Checks.
* Add NE skeleton for C-Dilla and other Macrovision protections.
* Add more detections for CD-Secure 1.

* Let Macrovision return multiple protections

* Let Macrovision return multiple protections.
* Add new C-Dilla and SafeCast checks.

* why is C-Dilla so confusing

* Add additional checks for C-Dilla and SafeCast.

* Add skeleton for NE checks for SafeCast.

* Address PR comments
2022-11-04 09:31:20 -07:00
Matt Nadareski
d8ddaccf07 Add OS/2 flags 2022-11-03 23:51:04 -07:00
Matt Nadareski
ca55ea16f0 Fill in missing NE header flags 2022-11-03 23:49:05 -07:00
Matt Nadareski
cb42330e22 Add missing operating systems to enum 2022-11-03 23:42:19 -07:00
Matt Nadareski
4e55cf0baa Add NE segment table 2022-11-03 23:38:51 -07:00
Matt Nadareski
abec45c492 Add skeleton of NE to Models project 2022-11-03 23:32:35 -07:00
Matt Nadareski
610e25b98a Add empty folders for future executable formats 2022-11-03 23:11:26 -07:00
Matt Nadareski
ad11e63338 Add template BurnOutSharp.Models project 2022-11-03 22:57:51 -07:00
TheRogueArchivist
440eb72ae4 The End Is Never The End Is Never The End Is (#163)
* Add notes about C-Dilla.
* Add related notes for other Macrovision/C-Dilla products.
2022-11-02 11:41:34 -07:00
TheRogueArchivist
c4553de302 Add Denuvo Anti-Cheat detection (#162)
* Add support for detecting Denuvo Anti-Cheat

* Update README
2022-10-28 23:10:08 -07:00
TheRogueArchivist
32904b75e4 Begin porting protections made by Macrovision to Macrovision sub-protections (#161)
* Fuck Macrovision

* Port SafeCast/SafeDisc checks to Macrovision sub-protections.

* Move generic checks into the main Macrovision checks.

* Add basic detection for FLEXnet.

* Add C-Dilla notes.

* Add TODO's for porting CactusDataShield.

* Address PR comments
2022-10-27 16:40:16 -07:00
Matt Nadareski
35a4771f89 Split out obvious SafeCast from SafeDisc (nw) 2022-10-26 21:10:33 -07:00
Matt Nadareski
bbb2e9391e Add SafeDisc as Macrovision example (nw) 2022-10-26 21:03:33 -07:00
Matt Nadareski
9dc186f455 Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2022-10-26 20:38:39 -07:00
Matt Nadareski
82b9c03a7e Add skeleton for Macrovision sub-protections 2022-10-26 20:38:12 -07:00
TheRogueArchivist
f4f9ba9efa Add "SafeDisc.exe" checks (#160)
* Add checks for "SafeDisc.exe".
* Add note about previously unknown SafeDisc version.
* Add note about Drvmgt file with known sample but no known version.
* Add various SafeDisc/SafeCast resources.
* Add resource for CDS.
2022-10-25 10:31:17 -07:00
TheRogueArchivist
495864e8e0 Add summary for Denuvo (#159)
Add summary and various notes for Denuvo.
2022-10-23 20:46:58 -07:00
Matt Nadareski
ac917df519 Bump version to 2.3.4 2022-10-17 13:18:55 -07:00
Matt Nadareski
3b12fef948 Fix build, fix Nuget 2022-10-17 12:26:57 -07:00
Matt Nadareski
a74b769aef Bump version to 2.3.3 2022-10-17 10:26:21 -07:00
Matt Nadareski
ea17ad1eae Bump version to 2.3.2 2022-10-17 10:10:01 -07:00
Matt Nadareski
6087e6e0c6 Update Nuget packages 2022-10-14 09:28:27 -07:00
TheRogueArchivist
535f14bc54 Cleanup CDS detections (#158)
* Specify CDS-200 for more detections

* Fix the use of GetVersion
2022-10-13 22:14:50 -07:00
Matt Nadareski
6b3d42b81f Include exception in dictionary during debug 2022-10-13 21:52:16 -07:00
TheRogueArchivist
e645b8e1c0 Add notes to unconfirmed detections (#157)
* Add notes to unconfirmed detections

* Add notes to unconfirmed detections to let users know what detections we need samples of.

* Add sources for some protections to confirm them.

* Confirm some RingPROTECH checks

* Confirm some RingPROTECH checks.

* Add notes for IndyVCD.
2022-10-13 20:58:49 -07:00
TheRogueArchivist
955c8685b6 Overhaul CD-Lock (#156)
* Confirm CD-Lock's current detections

* Reduce possibility for CD-Lock false positives

* Add notes about CD-Lock
2022-10-13 11:50:43 -07:00
Matt Nadareski
6184ae22aa Fix StartsWith equal length issue 2022-10-11 21:14:58 -07:00
TheRogueArchivist
4e2c5313f3 Add support for PlayJ (#155)
* Add support for PlayJ

* Add support for detecting PlayJ format audio files and PlayJ Music Player related files.
* Add an incomplete summary for Cactus Data Shield.
* Improve Cactus Data Shield Detection for CDS-200 (PlayJ).

* Address PlayJ PR comments

Fix whitespace and improve safety of PlayJ header check.

* Address further PlayJ PR comments

Reduce redundancy and further improve safety.
2022-10-11 21:00:30 -07:00
TheRogueArchivist
137f7fcc01 Add Doc.loc as an undetected protection and add notes (#153)
* Add Doc.loc as an undetected copy protection, along with basic notes.
* Fix link in "Special Thanks" in README.md
2022-09-26 09:35:04 -07:00
TheRogueArchivist
d771c4931c Add grouped name check for mcp.dll (#149) 2022-08-30 21:32:21 -07:00
TheRogueArchivist
61a2181924 Improve Alpha-ROM detection (#152)
Check for an existing string in the overlay as well as the .data section.
2022-08-30 20:10:56 -07:00
Matt Nadareski
baad181e5f Bump version to 2.3.1 2022-08-25 10:03:48 -07:00
Matt Nadareski
1b0fcdcf32 Bump version to 2.3.0 2022-08-22 09:30:49 -07:00
Matt Nadareski
9fa1ef1d2e Slight cleanup of CDS 2022-08-21 20:36:37 -07:00
Matt Nadareski
e2492c9e5b Consolidate field string checks (fixes #141) 2022-08-21 20:34:59 -07:00
TheRogueArchivist
297514ef17 Overhaul SafeDisc detection (#133)
* Begin overhauling SafeDisc detection

* A new utility method for obtaining the SHA1 hash of a file.
* SHA1-based detection for drvmgt.dll, which is vastly more accurate than the existing size checks.
* (Currently nonfunctional) PEX based checks for newer secdrv.sys versions.
* General clean-up and minor additions.

* Address PR review comments

* Address further PR comments and remove some file size checks

Remove file size checks that are now redundant.

* Add CLCD16 hash based version detection

Add support for detecting a rough version range from the hash of CLCD16.dll, as well as general cleanup.

* Add CLCD32 hash based version detection

Add hash based version checks for CLCD32.dll, which provides reliable detection for 1.X, much more than CLCD16.dll.

* Add CLOKSPL hash based version detection

Add CLOKSPL hash based version detection, which is an excellent indicator of version within 1.X.

* Add detailed SafeDisc version notes, address PR reviews

* Add a note that includes every known SafeDisc and SafeCast version.

* General cleanup and minor detection additions.

* Address PR reviews.

* Various SafeDisc detection improvements

* Add broad version checks for 00000001.TMP.

* Add a few SafeDisc Lite specific CLCD32.DLL checks.

* Remove unneeded dplayerx.dll size checks that were already covered by executable string checks.

* Improve DPlayerX version size checks

Improve DPlayerX existing version size checks and add new ones.

Add new hash checks for previously undetected files.

* Improve secdrv.sys version detection

Improve secdrv.sys version detection using both file size checks and product version checks.

* Fix various false positives

Fix various false positives, as well as incomplete detections.

* Address PR comments

* Properly set check for File Description
2022-08-21 20:20:28 -07:00
SilasLaspada
2acb0a037c Add initial support for detecting Easy Anti-Cheat (#148)
Add initial support for detecting Easy Anti-Cheat. This is presumed to only detect later versions, though it's possible that older versions may be detected as well.
2022-08-13 20:09:46 -07:00
SilasLaspada
c38eb0cc71 Various note additions and minor fixes (#147)
- Add notes for Alpha-ROM, Hexalock, SVKP, and Zzxzz.

- Fix typos for Intenium.
2022-08-13 20:04:26 -07:00
SilasLaspada
258369bb9e Add support for detecting "AegiSoft License Manager" (#144)
Add support for "AegiSoft License Manager", as well as notes about what's known about the protection so far.
2022-08-13 20:03:27 -07:00
SilasLaspada
8cba9529d7 Overhaul DiscGuard detection (#140)
* Overhaul DiscGuard detection

* Verify some existing checks for DiscGuard.
* Add several new executable and path checks.
* Add note for remaining unconfirmed check.
* Add notes and research in comments.
* Add related protection "DiscAudit" to list of protections that needs to be added.

* Address DiscGuard PR review comments
2022-07-28 23:31:04 -07:00
SilasLaspada
10296f40b4 Improve Alpha-ROM detection for Siglus and RealLive games (#139)
* Improve Alpha-ROM detection for Siglus and RealLive games

Improve Alpha-ROM detection for games that use the Siglus and RealLive VN game engines.

* Address PR comments for Alpha-ROM

* Add comment and address PR review
2022-07-28 21:03:35 -07:00
Matt Nadareski
73d085deac Update stream path for VersionInfo 2022-07-27 22:31:15 -07:00
SilasLaspada
c7261c342a Vastly improve MediaCloQ checks (#138)
* Vastly improve MediaCloQ checks

Vastly improve MediaCloQ checks, including adding content and text file checks. Also, add notes relating to it in comments.

* Add comment to MediaCloQ

Add mention of "sunncomm.ico" being used as a previous check.
2022-07-27 13:06:52 -07:00
Matt Nadareski
14d905fba3 Handle alternatively named version resources 2022-07-27 11:28:24 -07:00
Matt Nadareski
1f66edc201 Fix some resource finding 2022-07-27 11:10:46 -07:00
SilasLaspada
ade95c3210 Improve Hexalock AutoLock detection (#136)
* Improve Hexalock AutoLock detection

Verify the last remaining unverified checks, and add more checks and notes.

* Add special thanks for Hexalock
2022-07-26 20:17:28 -07:00
SilasLaspada
8a419f50db Fix MediaMax CD-3 false positive (#135)
The LaunchCD files checked for were completely unrelated to the copy protection.
2022-07-23 23:55:04 -07:00
Matt Nadareski
52efca767e Make sure start/end not reset 2022-07-17 21:14:20 -07:00
Matt Nadareski
421dfa2591 Add LibMSPackSharp note in readme 2022-07-13 14:02:52 -07:00
Matt Nadareski
76183c529c Remove explicit .NET Core 3.1 and .NET 5.0 builds 2022-07-13 13:46:30 -07:00
Matt Nadareski
463506d1e8 Add note to Wise Installer 2022-07-13 12:54:42 -07:00
Matt Nadareski
1f2a187f55 Add note to UPX 2022-07-13 12:54:08 -07:00
Matt Nadareski
40de5c4d0a Add note to Shrinker 2022-07-13 12:53:28 -07:00
Matt Nadareski
5e72acb44c Add note to Setup Factory 2022-07-13 12:53:08 -07:00
Matt Nadareski
fab3c935f8 Add note to PEtite 2022-07-13 12:52:41 -07:00
Matt Nadareski
a498513662 Add note to PECompact 2022-07-13 12:52:20 -07:00
Matt Nadareski
3eaebffff1 Add note to MS-CAB SFX 2022-07-13 12:51:54 -07:00
Matt Nadareski
280ae5babe Add note to Installer VISE 2022-07-13 12:51:21 -07:00
Matt Nadareski
5bf64d46bd Add note to InstallAnywhere 2022-07-13 12:51:00 -07:00
Matt Nadareski
6fb510c852 Add note to Inno Setup 2022-07-13 12:50:36 -07:00
Matt Nadareski
20223eea87 Add note to Gentee Installer 2022-07-13 12:50:06 -07:00
Matt Nadareski
3f796e4e0e Add note to EXEStealth 2022-07-13 12:49:34 -07:00
Matt Nadareski
74732a1b50 Add note to CExe 2022-07-13 12:48:49 -07:00
Matt Nadareski
f252681364 Add note to Armadillo 2022-07-13 12:48:16 -07:00
Matt Nadareski
82cb0f934d Add note to Themida 2022-07-13 12:47:06 -07:00
Matt Nadareski
7dc5644d21 Add note to SVKP 2022-07-13 12:46:17 -07:00
Matt Nadareski
313cc2bfb8 Add another source for CrypKey 2022-07-13 12:18:39 -07:00
Matt Nadareski
f24a8763fd Add missing ActiveMark entry point checks 2022-07-13 12:14:39 -07:00
Matt Nadareski
47845a2409 Split Cucko into separate file 2022-07-13 11:52:49 -07:00
SilasLaspada
42da9f4a82 Add support for detecting Cucko (#132)
* Add support for detecting Cucko

Add support for detecting Cucko.

* Remove debug line
2022-07-12 22:57:22 -07:00
Matt Nadareski
e9fa86343a Add special thanks 2022-07-11 22:45:12 -07:00
Matt Nadareski
8ccf2272d6 Add proper SecuROM White Label detection 2022-07-11 22:38:02 -07:00
Matt Nadareski
8fb42bc12d Add ASPack 2022-07-11 21:23:44 -07:00
Matt Nadareski
6202ee5d5c Print sections during debug 2022-07-11 16:10:52 -07:00
Matt Nadareski
655a8adb1c Disable .shr section for SecuROM for now 2022-07-11 10:08:08 -07:00
Matt Nadareski
d7fcc99fc2 Add volitile by default 2022-07-08 14:50:03 -07:00
Matt Nadareski
8601f373bd More work on MS-CAB and MSZIP 2022-07-08 13:07:03 -07:00
Matt Nadareski
3c783fdc68 Re-format some info data 2022-07-07 17:02:43 -07:00
Matt Nadareski
a75388cf17 Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2022-07-07 16:34:52 -07:00
Matt Nadareski
7968a79fe6 Add start of decompression 2022-07-07 16:33:15 -07:00
SilasLaspada
8502924083 Overhaul existing and add new audio CD protections (#131)
* Overhaul existing and add new audio CD protections

Overhaul many audio CD protections, on top of adding a few more. Unfortunately, disable key2Audio detection as well, as the existing checks were simply detecting OpenMG components and not key2Audio itself.

* Fix issues from PR review
2022-07-07 15:24:41 -07:00
Matt Nadareski
af95ca08c3 Add dec/hex outputs 2022-07-07 15:24:24 -07:00
Matt Nadareski
06995b75d6 Move printing to classes 2022-07-07 15:16:34 -07:00
Matt Nadareski
1a2be26c72 Finalize intial info output 2022-07-07 15:10:31 -07:00
Matt Nadareski
a2a583e317 Add GetInfo, fix MSCABCabinet deserialize 2022-07-07 14:55:29 -07:00
Matt Nadareski
289a55ca21 Seek to file header location instead 2022-07-07 14:20:31 -07:00
Matt Nadareski
598f625ed1 Add some deserialization for MPQ 2022-07-07 14:15:44 -07:00
Matt Nadareski
d604a6d784 Fix endregion tags 2022-07-07 13:27:31 -07:00
Matt Nadareski
cf34a0adee Add Quantum framework 2022-07-07 13:27:13 -07:00
Matt Nadareski
083ded8a7e Add framework for extract all files 2022-07-07 12:43:35 -07:00
Matt Nadareski
1517a66724 Add cabinet finding 2022-07-07 12:11:23 -07:00
Matt Nadareski
5385de0f0a Add MS-ZIP deserialize and deflate 2022-07-07 12:02:25 -07:00
Matt Nadareski
43729b53f0 Add multi-cabinet note 2022-07-07 11:51:27 -07:00
Matt Nadareski
e4aa618f0b Add automatic date-time translation 2022-07-07 11:49:57 -07:00
Matt Nadareski
4953673caf Start writing CAB extraction 2022-07-07 11:37:53 -07:00
Matt Nadareski
35c6d4f36c Add unused cabinet-level deserialization 2022-07-07 11:24:21 -07:00
Matt Nadareski
9108fa5a11 Add unused MS-CAB deserialization 2022-07-07 11:15:35 -07:00
Matt Nadareski
dc97feae39 Add unused structures for MS-CAB 2022-07-07 10:39:41 -07:00
Matt Nadareski
beac29c650 Re-add Wix for MS-CAB
Added until LibMSPackSharp can be fixed
2022-07-06 15:33:10 -07:00
Matt Nadareski
21a041dad6 Update MS-CAB 2022-07-06 15:17:02 -07:00
Matt Nadareski
26eee23511 Update LibMSPackSharp to newest Git version 2022-07-06 15:16:41 -07:00
Matt Nadareski
0915c7eccd Add a few more things for MPQ 2022-07-06 13:39:54 -07:00
Matt Nadareski
e2e65bfbdf Add some unused structures for MPQ 2022-07-05 22:35:58 -07:00
SilasLaspada
033f2e0a4e Improve Cenega ProtectDVD detection (#129)
Improve detection of Cenega ProtectDVD and add notes.
2022-07-05 21:03:27 -07:00
SilasLaspada
71ee0863eb Add shell for Themida (#128)
Add shell and a few notes for Themida/WinLicense/Code Virtualize.
2022-07-04 21:15:18 -07:00
SilasLaspada
3203c3ac83 Improve Steam detection (#127)
Improve Steam detection. Fixes #114
2022-06-30 16:18:12 -07:00
SilasLaspada
1733f60a0f Improve HexaLock AutoLock detection (#126)
Vastly improve HexaLock AutoLock detection, and add notes.
2022-06-30 16:17:18 -07:00
SilasLaspada
b01abdcce3 Improve Bitpool detection (#125)
Improve Bitpool detection on top of adding many more notes.
2022-06-30 16:14:09 -07:00
SilasLaspada
53c90533e3 Improve ProtectDISC checks (#124)
This check is known to be used in at ProtectDISC 6.8.
2022-06-29 14:30:13 -07:00
SilasLaspada
03bd7bd1f5 Add initial detection of WMDS, and improve detection of MediaMax CD-3 (#122)
* Add initial detection of WMDS, and improve detection of MediaMax CD-3

Add initial detection of Windows Media Data Session, and improve detection of MediaMax CD-3.

* Fix whitespace

Fix whitespace as requested in PR review.

* Add comment to WMDS

* Further improve MediaMax CD-3 checks
2022-06-29 14:28:46 -07:00
Matt Nadareski
637579b0fc Remove Wix 2022-06-23 14:02:18 -07:00
Matt Nadareski
bd40ca6d9d Use OpenMcdf for MSI 2022-06-23 13:58:48 -07:00
Matt Nadareski
3c1623cb22 Remove libgsf/libmsi/ComHandler 2022-06-23 13:57:41 -07:00
SilasLaspada
66da32b5c1 Initial Detection of "Dinamic Multimedia Protection"/LockBlocks (#121)
Add initial detection of and notes about "Dinamic Multimedia Protection"/LockBlocks.

Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2022-06-22 13:52:58 -07:00
Matt Nadareski
c21b64b5bd Add initial Denuvo checks (fixes #75) 2022-06-22 13:40:28 -07:00
Matt Nadareski
e968eeea96 Add entry point note to Tages 2022-06-22 10:17:56 -07:00
Matt Nadareski
c14005dcc4 Add entry point note to Steam 2022-06-22 10:03:14 -07:00
Matt Nadareski
84c6c257df Add missing protection note for StarForce 2022-06-22 10:00:35 -07:00
Matt Nadareski
e28b930f85 Add entry point note to SafeDisc 2022-06-22 09:57:12 -07:00
Matt Nadareski
9dac2eb91b Add entry point note to LaserLok 2022-06-22 09:54:38 -07:00
Matt Nadareski
2209f362fa Add notes for CrypKey 2022-06-22 09:50:16 -07:00
Matt Nadareski
614413ab76 Add missing checks for ActiveMARK 2022-06-22 09:42:52 -07:00
Matt Nadareski
35c15d0ff8 Add missing SecuROM 8 check 2022-06-22 09:35:41 -07:00
Matt Nadareski
b521df2ad4 Fix finding, update SecuROM 2022-06-22 09:29:29 -07:00
Matt Nadareski
778fe106f9 Read overlay data (unused) 2022-06-22 09:13:01 -07:00
Matt Nadareski
9171014dcd Add missing PKZIP signature 2022-06-21 23:39:19 -07:00
Matt Nadareski
7ee74d9b81 Port remaining psxt001z code 2022-06-21 23:38:30 -07:00
Matt Nadareski
826011f532 More small cleanup (nw) 2022-06-20 22:37:46 -07:00
Matt Nadareski
adb349932f Start overlay framework 2022-06-20 21:39:19 -07:00
Matt Nadareski
f4f13c03fe Update SharpCompress 2022-06-20 21:38:01 -07:00
Matt Nadareski
e29e87444e ComHandler 2022-06-20 21:36:53 -07:00
Matt Nadareski
b233b3c17b Add data at PE entry point 2022-06-19 22:40:07 -07:00
Matt Nadareski
a8fca77331 Minor cleanup attempt 2022-06-18 23:19:57 -07:00
Matt Nadareski
5e4ee07646 Partially fix caching 2022-06-18 21:59:13 -07:00
Matt Nadareski
52b2a9bef8 Fix name decode 2022-06-18 21:49:06 -07:00
Matt Nadareski
dc7f8da52f Simplify and fix MS-OLE 2022-06-18 21:16:11 -07:00
Matt Nadareski
d46b0768a0 LibMSI cleanup and notes (nw) 2022-06-17 23:12:21 -07:00
Matt Nadareski
f136af3457 Cleanup and minor MS-OLE fixes 2022-06-17 23:11:12 -07:00
Matt Nadareski
2e2b6068c5 Fix SQL parsing 2022-06-17 21:22:06 -07:00
Matt Nadareski
7a29d8a8f9 Fix StringTable read/build 2022-06-17 14:53:18 -07:00
Matt Nadareski
ae0c47066e Safety and trimming 2022-06-17 14:47:32 -07:00
Matt Nadareski
11188c9488 Buffer pointer 2022-06-17 14:46:59 -07:00
Matt Nadareski
4d5d2d8690 Seek -> SeekImpl 2022-06-17 13:41:51 -07:00
SilasLaspada
1860a863b8 Add initial detection of phenoProtect (#120)
Add initial detection of phenoProtect, which is currently text file based only.
2022-06-16 22:13:02 -07:00
Matt Nadareski
8dbf6d9362 GsfInfileZip second pass 2022-06-16 22:11:09 -07:00
Matt Nadareski
e3e8a1170f GsfInfileTar second pass 2022-06-16 16:43:07 -07:00
Matt Nadareski
b22641b7ab GsfInfileStdio second pass 2022-06-16 15:43:10 -07:00
Matt Nadareski
d3d75b9e58 GsfInfileMSVBA second pass 2022-06-16 15:35:58 -07:00
Matt Nadareski
6083c64b87 GsfInfileMSOle second pass 2022-06-16 15:17:38 -07:00
Matt Nadareski
4185367a49 GsfBlob, GsfClipData, and GsfDocMetaData second pass 2022-06-16 13:54:35 -07:00
Matt Nadareski
c027b1798a Ensure consistent reads and writes 2022-06-16 13:07:41 -07:00
Matt Nadareski
76e52292a8 Fix GsfInputMemory.ReadImpl 2022-06-15 23:43:59 -07:00
Matt Nadareski
dd5f7c4e0b Fix GsfInputStdio.MakeLocalCopy 2022-06-15 23:35:06 -07:00
Matt Nadareski
6e70991e86 Fix build, add export all (nw) 2022-06-15 23:31:08 -07:00
Matt Nadareski
f1803b9f3a Organize files, clean up formatting, add TODOs 2022-06-15 22:53:41 -07:00
Matt Nadareski
9e0abe7c5d Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2022-06-15 17:19:59 -07:00
Matt Nadareski
1a01102252 LibMSI (nw) 2022-06-15 17:19:44 -07:00
Matt Nadareski
ce849d312f Fix naming in GSF 2022-06-15 17:19:25 -07:00
SilasLaspada
71da8a53c0 Add detection for new TAGES variant and add references to Redump entries (#119)
Add detection for new TAGES variant found in Robocop, and add the Redump entries for every listed file.
2022-06-13 00:12:34 -07:00
Matt Nadareski
b8f7d00dc9 Fix LibGSF build 2022-06-10 20:59:36 -07:00
Matt Nadareski
7e69a56892 LibGSF (nw) 2022-06-10 15:29:58 -07:00
Matt Nadareski
8cedd3e469 Handle malformed IS-CAB filenames 2022-06-04 22:02:13 -07:00
Matt Nadareski
d5e60439fb Remove now-unused Wix references 2022-06-04 22:01:02 -07:00
Matt Nadareski
1a0e16a05b Use new debug flag 2022-06-01 12:01:14 -07:00
Matt Nadareski
bee6d2b885 Update LibMSPackSharp 2022-06-01 11:50:49 -07:00
Matt Nadareski
5d515c7ddd Update LibMSPackSharp 2022-06-01 11:34:50 -07:00
Matt Nadareski
013301fa99 Update LibMSPackSharp 2022-05-31 16:40:12 -07:00
Matt Nadareski
152ae8bf6c Update LibMSPackSharp 2022-05-31 16:38:21 -07:00
Matt Nadareski
439f71ef5c Migrate to external libmspack 2022-05-31 16:34:40 -07:00
Matt Nadareski
66c7afe20b Use placeholder cabinet to start at first file 2022-05-31 16:06:29 -07:00
Matt Nadareski
6a609ea3f5 Break and set fixing flags if there's an appending error 2022-05-31 16:04:20 -07:00
Matt Nadareski
6752d0cfa3 Append cabinets where needed 2022-05-31 15:54:53 -07:00
Matt Nadareski
1d26b06592 Fix 0 checks, fix aligned table creation 2022-05-31 15:44:22 -07:00
Matt Nadareski
9ca24a3053 LZX and MSCAB format cleanup 2022-05-26 23:12:53 -07:00
Matt Nadareski
76ca87f7a9 Fully fix LZX 2022-05-25 23:52:26 -07:00
Matt Nadareski
c913b10286 Partially fix CAB decompression 2022-05-25 23:46:45 -07:00
Matt Nadareski
768c77a6bc Attempts at changes 2022-05-25 21:55:36 -07:00
Matt Nadareski
20d90d6a60 More reorganization 2022-05-24 21:39:33 -07:00
Matt Nadareski
54a48e0729 Less static methods 2022-05-24 17:05:34 -07:00
Matt Nadareski
15c05c65e7 Partial classes 2022-05-24 16:52:41 -07:00
Matt Nadareski
4dbaa415c5 Constants 2022-05-24 15:52:05 -07:00
Matt Nadareski
1a0b83a9f1 Split last MSB/LSB 2022-05-24 09:35:11 -07:00
Matt Nadareski
4316427980 Nearly finish demacroificiation 2022-05-24 00:47:09 -07:00
Matt Nadareski
21ecaca761 Properly implement methods from macros 2022-05-23 23:31:55 -07:00
Matt Nadareski
7022a957d6 Start making common read methods again 2022-05-23 22:36:24 -07:00
Matt Nadareski
32cfcbff0a Rename a couple things 2022-05-23 21:21:13 -07:00
Matt Nadareski
283fa425d4 Partial OAB cleanup 2022-05-23 21:16:42 -07:00
Matt Nadareski
d37eac02d5 KWAJ objectification and cleanup 2022-05-23 21:13:55 -07:00
Matt Nadareski
c3e0b99002 CHM decompressor cleanup 2022-05-23 20:48:34 -07:00
Matt Nadareski
9e49703bc5 Clean up some CAB decompressor code 2022-05-23 15:05:03 -07:00
Matt Nadareski
98a7149cc9 CHM objectification cleanup 2022-05-23 14:25:22 -07:00
Matt Nadareski
394b4e70fb libmspack cleanup 2022-05-23 13:36:19 -07:00
Matt Nadareski
b29198b3d4 CHM objectification 2022-05-22 23:46:29 -07:00
Matt Nadareski
044c398fa7 Minor LZX cleanup (nw) 2022-05-22 20:41:41 -07:00
Matt Nadareski
ae7087e8d0 Attempts at LZX fixes 2022-05-22 13:57:33 -07:00
Matt Nadareski
90bc78982c Safer opening of CABs 2022-05-22 13:13:42 -07:00
Matt Nadareski
2ce8bda394 Skip other null writes for other packages 2022-05-22 10:21:54 -07:00
Matt Nadareski
ac0ed050dc Partially fix MSZIP decompression 2022-05-22 10:17:41 -07:00
Matt Nadareski
e4aadb3794 Minor tweaks that might help 2022-05-22 00:19:04 -07:00
Matt Nadareski
80626f3b9e Cleanup to get some things working 2022-05-21 23:44:03 -07:00
Matt Nadareski
93263dfeb9 Finish KWAJ LZH and get building 2022-05-21 22:38:28 -07:00
Matt Nadareski
51e5f82cc2 Complete LZX implementation 2022-05-21 22:18:25 -07:00
Matt Nadareski
b69c5cf928 Huffman tree implement MSZIP (nw) 2022-05-21 21:47:54 -07:00
Matt Nadareski
e45b593c7b LZH KWAJ demacroificiation (nw) 2022-05-21 21:26:42 -07:00
Matt Nadareski
c245ebb80a Focus on QTM for demacroification 2022-05-21 21:00:26 -07:00
Matt Nadareski
cd38407e4e Continue demacroization (nw) 2022-05-20 23:33:57 -07:00
Matt Nadareski
f34b1ba5cf Continue demacroification (nw) 2022-05-20 22:29:43 -07:00
Matt Nadareski
08c97d291e Start of de-macro-ification 2022-05-19 23:52:08 -07:00
Matt Nadareski
3ea294f4d9 DefaultFileImpl -> FileStream 2022-05-19 22:32:58 -07:00
Matt Nadareski
ddb2be278a More CAB-specific cleanup 2022-05-19 22:19:06 -07:00
Matt Nadareski
b6b5dc4efa Make more objects for CAB 2022-05-19 20:46:58 -07:00
Matt Nadareski
218fecb273 Start cleanup on libmspack 2022-05-19 17:07:08 -07:00
Matt Nadareski
b23a504c7b Missed a few cast->as 2022-05-19 12:11:23 -07:00
Matt Nadareski
c54181e9ac "as" not "cast" 2022-05-19 11:56:38 -07:00
Matt Nadareski
7e100a261c Make Library static 2022-05-19 11:24:34 -07:00
Matt Nadareski
38e1154bad Add initial port of libmspack (nw) 2022-05-19 11:20:44 -07:00
Matt Nadareski
901804e9e4 Add OS limitation note to readme 2022-05-15 21:09:03 -07:00
Matt Nadareski
dfee4a8d76 Use debug flag for exception printing 2022-05-15 20:58:27 -07:00
Matt Nadareski
295b86fbd0 Replace HLExtract with HLLibSharp 2022-05-15 14:18:45 -07:00
Matt Nadareski
177543a51c Use WixToolset for MS-CAB 2022-05-14 21:25:41 -07:00
Matt Nadareski
dfff702e5d Remove x86 build limitation on test program 2022-05-14 14:37:52 -07:00
Matt Nadareski
c0ad427b5e Remove x86 build limitation on library 2022-05-14 14:33:15 -07:00
Matt Nadareski
2f68f95d80 Path.DirectorySeparatorChar 2022-05-13 21:03:13 -07:00
Matt Nadareski
a8ba104d0f Better interface comments 2022-05-01 21:06:52 -07:00
Matt Nadareski
6f9e92d222 Clean up interface comments 2022-05-01 21:03:48 -07:00
Matt Nadareski
1e20c1b147 Ensure packer consistency 2022-05-01 21:02:59 -07:00
Matt Nadareski
c16946ace7 Add IScannable to GenteeInstaller 2022-05-01 20:44:45 -07:00
Matt Nadareski
9d7cc4012c Move interfaces to own namespace 2022-05-01 17:41:50 -07:00
Matt Nadareski
a44bdf9013 Reorder inherited interfaces 2022-05-01 17:23:00 -07:00
Matt Nadareski
f9f2e0d932 Better naming 2022-05-01 17:17:15 -07:00
Matt Nadareski
2dd3e21ea6 Separate stream helper 2022-05-01 17:06:46 -07:00
Matt Nadareski
81bb47b634 Progress tracker doesn't need to be public 2022-05-01 16:59:03 -07:00
Matt Nadareski
c8efc1430a Get fancy 2022-05-01 14:46:01 -07:00
Matt Nadareski
7883638f0a Make helper method easier to read 2022-05-01 14:28:28 -07:00
Matt Nadareski
e930be12c8 Reduce parameters for helper 2022-05-01 14:27:04 -07:00
Matt Nadareski
c45ae4b693 Consolidate ShouldAddProtection checks 2022-05-01 14:24:46 -07:00
Matt Nadareski
478f28b513 Create Initializer class 2022-05-01 14:16:53 -07:00
Matt Nadareski
aac3c391db Simplify construction and access in Scanner 2022-05-01 14:11:09 -07:00
Matt Nadareski
802734b515 Remove another implicit assignment 2022-05-01 14:04:21 -07:00
Matt Nadareski
ef212fc8d9 Remove library-level assignments in Scanner 2022-05-01 14:00:20 -07:00
Matt Nadareski
ee85f2f6f0 Remove useless all files flag 2022-05-01 13:58:43 -07:00
Matt Nadareski
9a160b3127 Split Directory/File checks for SafeDisc 2022-04-25 12:34:39 -07:00
Matt Nadareski
4486c5ed62 ICD overmatches for SafeDisc, somehow 2022-04-25 12:27:07 -07:00
Matt Nadareski
d6feab3958 Bump version to 2.1.0 2022-04-17 22:02:43 -07:00
Matt Nadareski
2d2207a1ee Update UnshieldSharp to 1.6.8 2022-04-17 13:35:14 -07:00
Matt Nadareski
89b86630d8 Update WiseUnpacker to 1.0.3 2022-04-17 13:33:59 -07:00
Matt Nadareski
bcb1571a23 Use .NET Standard 2.0, add .NET 6.0 2022-04-16 21:58:54 -07:00
Matt Nadareski
5d658ebe4a Upgrade to VS2022 for AppVeyor 2022-04-16 21:56:45 -07:00
Matt Nadareski
7d2de80e77 Fix over-matching SafeLock 2022-04-02 21:36:29 -07:00
Matt Nadareski
b933249ff7 Add resource finding on creation 2022-04-02 16:12:23 -07:00
Matt Nadareski
61c09e3c97 Move resource helpers to PortableExecutable 2022-04-02 15:54:51 -07:00
SilasLaspada
32695ee6dd Add support for detecting AutoPlay Media Studio (#116)
* Add support for detecting AutoPlay Media Studio

* Comment out too vague AutoPlay Media Studio check

* Tweak comment
2022-04-01 23:23:32 -07:00
Matt Nadareski
4b66cd8cd2 Update file version resource handling 2022-04-01 10:16:31 -07:00
SilasLaspada
edc4cc1706 Refactor Setup Factory detection (#115)
* Refactor Setup Factory detection

* Address Setup Factory PR comments

* Fix whitespace
2022-04-01 09:58:02 -07:00
Matt Nadareski
35acb77bf7 Bump version to 2.0.0 2022-03-27 20:43:49 -07:00
Matt Nadareski
e970a7b4d9 Clean up SafeDisc a little more 2022-03-18 21:05:09 -07:00
Matt Nadareski
f155291139 Update comments after confirmation of existence 2022-03-17 12:18:08 -07:00
Matt Nadareski
b0293419e1 Add note to Tages for future research 2022-03-17 12:16:41 -07:00
Matt Nadareski
09db225929 Simplify TAGES version checking 2022-03-17 12:13:11 -07:00
Matt Nadareski
0c52b4e236 Update to UnshieldSharp 1.6.7 2022-03-17 10:03:39 -07:00
Matt Nadareski
5dc30942ff Add missing TAGES version byte 2022-03-15 23:04:10 -07:00
Matt Nadareski
cab200e893 Add Shrinker PE detection 2022-03-15 22:44:10 -07:00
Matt Nadareski
c349f3a3c4 Add Gentee Installer detection (fixes #93) 2022-03-15 22:35:44 -07:00
Matt Nadareski
0acb29f2e9 Add Steam Client Engine check 2022-03-15 22:23:23 -07:00
Matt Nadareski
b66e01f7b4 Fix SLL comment 2022-03-15 22:11:37 -07:00
Matt Nadareski
8d6d215e57 Remove commented debug code 2022-03-15 22:10:13 -07:00
Matt Nadareski
d54a90a034 Add some missing SecuROM checks 2022-03-15 22:09:28 -07:00
Matt Nadareski
e1e7172561 Make ReadArbitraryRange safer 2022-03-15 21:30:46 -07:00
Matt Nadareski
6606b388f6 Remove duplicate comment 2022-03-15 15:48:05 -07:00
Matt Nadareski
b6c6c01358 Slightly rearrange generic content check invocation 2022-03-15 15:47:37 -07:00
Matt Nadareski
6886c5a4a2 Convert SVKP to PE content check 2022-03-15 15:39:35 -07:00
Matt Nadareski
87546a3dc8 Remove lingering unconfirmed TAGES check 2022-03-15 15:37:13 -07:00
Matt Nadareski
6e3028639a Fix one TAGES PE check 2022-03-15 15:05:08 -07:00
Matt Nadareski
386da02e27 Convert CExe to PE content check 2022-03-15 13:19:06 -07:00
Matt Nadareski
ec8c395ffa Streams 2022-03-15 12:39:22 -07:00
Matt Nadareski
9b98215fc9 Make SourceArray private in NE 2022-03-15 11:18:53 -07:00
Matt Nadareski
40e037fb2a Make SourceStream private 2022-03-15 11:11:54 -07:00
Matt Nadareski
17f8569a7e Only read resource in WinZipSFX 2022-03-15 11:11:44 -07:00
Matt Nadareski
1105f36cee Add hacky thing for Inno for now 2022-03-15 11:11:22 -07:00
Matt Nadareski
f9fcd8749b Add arbitrary reads to NE 2022-03-15 10:50:40 -07:00
Matt Nadareski
eef76d362a Fix arbitrary reads, update SecuROM check 2022-03-15 10:39:06 -07:00
Matt Nadareski
3b0e3693eb Add arbitrary range reading 2022-03-15 10:26:29 -07:00
Matt Nadareski
ba4c56997a Add Relocation section skeleton 2022-03-15 10:15:05 -07:00
Matt Nadareski
ca4d08567d Fix resetting position for DebugSection 2022-03-15 10:02:10 -07:00
Matt Nadareski
3211149996 Remove NE Inno check from PE path 2022-03-15 09:01:54 -07:00
Matt Nadareski
5a7e60cabb Use backward read for UPX 2022-03-15 00:30:33 -07:00
Matt Nadareski
46ff4b6ef9 Remove one use of SourceArray in SecuROM 2022-03-14 23:44:17 -07:00
Matt Nadareski
dc252e8d86 Add comments around remaining SourceArray usages 2022-03-14 23:32:19 -07:00
Matt Nadareski
133e29dc2e Add NameString to SectionHeader 2022-03-14 23:28:31 -07:00
Matt Nadareski
368cec4fc6 Remove more explicit content array usages 2022-03-14 23:17:45 -07:00
Matt Nadareski
65eea4301d Hide section complexity from content checks 2022-03-14 23:01:06 -07:00
Matt Nadareski
ceae505f4d Switch order of interface parameters 2022-03-14 22:51:17 -07:00
Matt Nadareski
a7e9164f4f Use SourceArray for PE checks 2022-03-14 22:49:35 -07:00
Matt Nadareski
3820546c07 Use SourceArray for NE checks 2022-03-14 22:43:26 -07:00
Matt Nadareski
0fa6673d21 Add debug section (nw) 2022-03-14 15:27:42 -07:00
Matt Nadareski
0a486c2195 Add another Uplay check, note 2022-03-14 15:08:27 -07:00
Matt Nadareski
a723fbefc3 Add some resource checks for WTM 2022-03-14 15:00:20 -07:00
Matt Nadareski
70e64e57dd Add PE content checks for Uplay 2022-03-14 14:56:41 -07:00
Matt Nadareski
edfc3c6c5d Add PE checks for Steam 2022-03-14 12:16:38 -07:00
Matt Nadareski
c4447fc505 Modernize path check for SolidShield a little 2022-03-14 12:09:03 -07:00
Matt Nadareski
a1d2292381 Add content checks for key2AudioXS 2022-03-14 12:08:35 -07:00
Matt Nadareski
033fb0c1ac Add utility checks to ImpulseReactor 2022-03-14 11:56:18 -07:00
Matt Nadareski
e80034abf1 Simplify CDS code a bit 2022-03-14 11:52:49 -07:00
Matt Nadareski
27e4a6c452 Add comment to old interface 2022-03-14 11:31:57 -07:00
Matt Nadareski
914497b76f Slightly safer checks before invoking 2022-03-14 11:26:10 -07:00
Matt Nadareski
513e799aa3 Migrate protections to new interfaces 2022-03-14 11:20:11 -07:00
Matt Nadareski
fcbf006e4e Migrate packers to new interfaces 2022-03-14 11:00:17 -07:00
Matt Nadareski
bef26e0fd7 Add more helpers for NE/PE 2022-03-14 10:49:02 -07:00
Matt Nadareski
3dde84f683 Add new helpers for NE/PE specific 2022-03-14 10:45:01 -07:00
Matt Nadareski
74c6aa06e0 Add new interfaces 2022-03-14 10:43:08 -07:00
Matt Nadareski
ffb529edb3 Granularly separate out executable types 2022-03-14 10:40:44 -07:00
Matt Nadareski
d1279a471c Add NE Resident Name table structures 2022-03-14 10:01:01 -07:00
Matt Nadareski
a7f406537e Add more SecuROM checks (fixes #70) 2022-03-14 09:03:43 -07:00
Matt Nadareski
df7d5150c1 Add yet another Steam exe (fixes #92) 2022-03-14 08:54:58 -07:00
Matt Nadareski
73e4569b3b Clean up recent TAGES change 2022-03-09 14:35:38 -08:00
SilasLaspada
30c249ce74 Massively overhaul TAGES detection (#87)
* Massively overhaul TAGES detection

* Address TAGES PR comments

* Address further PR comments
2022-03-09 14:00:33 -08:00
Matt Nadareski
ec83669d7d Create Executable constructors 2022-03-08 23:03:26 -08:00
Matt Nadareski
e765fb6c0b Simplify PSX Anti-Modchip a little 2022-03-08 22:33:39 -08:00
Matt Nadareski
76465d30ec Change fileContent to sectionContent in SmartE 2022-03-08 22:30:12 -08:00
Matt Nadareski
71d3771c1d Add "check disc" to LaserLok 2022-03-07 13:44:10 -08:00
Matt Nadareski
bfd9c12163 Update nuget packages 2022-03-07 13:39:04 -08:00
Matt Nadareski
eb57065562 Aggregate paths instead of relying on breaking 2022-03-03 16:36:32 -08:00
Matt Nadareski
3875f3b8fb Fix potential off-by-one error 2022-03-02 14:58:29 -08:00
Matt Nadareski
8c2bedd21e Add test program parameters 2022-03-02 10:17:50 -08:00
Matt Nadareski
b199a6aa54 Update README 2022-03-02 09:12:59 -08:00
Matt Nadareski
1b1f64c2de Lock unknown checks behind debug flag
This also re-enables some previously commented out checks that could not be verified.
2022-03-02 08:56:26 -08:00
Matt Nadareski
7b73cc9d9b Add alternate checks for StarForce (fixes #79) 2022-02-10 11:06:35 -08:00
Matt Nadareski
d9d84a01e5 Fix crash in SolidShield scanning (fixes #76) 2022-02-10 10:37:57 -08:00
Matt Nadareski
56f009ac56 Fail slower on resource parsing (fixes #81) 2022-02-10 10:28:59 -08:00
Matt Nadareski
96daf90ae8 Update protection notes in README 2022-02-04 15:24:41 -08:00
Matt Nadareski
b581cb3124 Disable content checks for RPT/ProRing 2022-02-04 15:24:05 -08:00
Matt Nadareski
4b0e39b950 Add Steam API DLLs to detection 2022-02-04 15:19:24 -08:00
Matt Nadareski
3a1c476edc Remove StarForce directory checks for now (fixes #77) 2022-01-30 21:07:35 -08:00
Matt Nadareski
0d62d5336c Add older Uplay installer to file list 2022-01-15 11:46:38 -08:00
Matt Nadareski
cf87279dfc Add content notes to SafeLock 2021-11-24 21:59:54 -08:00
Matt Nadareski
0006f7932a Remove overly-broad CDS checks 2021-11-22 20:30:58 -08:00
Matt Nadareski
841a39c6c7 Overhaul SafeLock checks 2021-11-21 21:18:56 -08:00
Matt Nadareski
60b12f25a6 Disable SafeLock content check for now 2021-11-21 14:04:16 -08:00
SilasLaspada
f2b96b6c50 Fix InstallAnywhere reporting (#71)
* Fix InstallAnywhere reporting

* Fix formatting

* Fix formatting again
2021-11-20 23:22:10 -08:00
Matt Nadareski
d2fad1ab29 Fix Alpha-ROM... again (fixes #69) 2021-10-29 15:19:50 -07:00
Matt Nadareski
6f6755b218 Remove over-matching TAGES check 2021-10-27 23:08:16 -07:00
SilasLaspada
9a2f2e6f17 Add initial detection for InstallAnywhere (#67) 2021-10-26 10:23:08 -07:00
Matt Nadareski
d9ca550e3b Add ProRing path checks; add note (fixes #68) 2021-10-26 10:12:21 -07:00
Matt Nadareski
ec66e87ee6 Remove one note from Alpha-ROM 2021-10-21 21:33:50 -07:00
Matt Nadareski
53ce3aee74 Refine Alpha-ROM checks; add notes 2021-10-20 21:06:43 -07:00
Matt Nadareski
1ecb06f020 Bump AppVeyor version 2021-09-24 10:44:55 -07:00
Matt Nadareski
3ce4ac785e Comment out probable NE-only check 2021-09-23 15:13:57 -07:00
Matt Nadareski
1df157434d Remove debug print 2021-09-23 15:05:46 -07:00
Matt Nadareski
594f001dda Add NE check for CD-Cops; add notes 2021-09-23 15:05:30 -07:00
Matt Nadareski
c2c6bc268e Update README 2021-09-23 13:51:28 -07:00
Matt Nadareski
7aa2207edd Add PEtite detection; add notes 2021-09-23 13:43:57 -07:00
Matt Nadareski
22aa1642a6 Partial cleanup of CD/DVD-Cops; add notes 2021-09-23 13:33:48 -07:00
Matt Nadareski
844a9686af Bump version to 1.8.0 2021-09-22 11:06:34 -07:00
Matt Nadareski
8f929366b3 Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2021-09-20 21:02:40 -07:00
Matt Nadareski
dfa0fab979 Update README 2021-09-20 21:02:14 -07:00
dependabot[bot]
415d6c587f Bump SharpCompress from 0.28.3 to 0.29.0 in /BurnOutSharp (#65)
Bumps [SharpCompress](https://github.com/adamhathcock/sharpcompress) from 0.28.3 to 0.29.0.
- [Release notes](https://github.com/adamhathcock/sharpcompress/releases)
- [Commits](https://github.com/adamhathcock/sharpcompress/compare/0.28.3...0.29)

---
updated-dependencies:
- dependency-name: SharpCompress
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-09-20 12:59:41 -07:00
Matt Nadareski
b1034b964e Update success/failure tracking in AppVeyor 2021-09-16 14:40:49 -07:00
Matt Nadareski
85d2382680 Only use extra checks for NE executables in Wise 2021-09-15 15:44:26 -07:00
Matt Nadareski
a6e694fe5d Convert EXE Stealth to section based; add notes 2021-09-15 14:52:11 -07:00
Matt Nadareski
a579bfea1f Fix reading generic sections 2021-09-15 11:47:12 -07:00
Matt Nadareski
ba97abed44 Convert dotFuscator to section based 2021-09-15 11:45:35 -07:00
Matt Nadareski
9fe6b101bd Convert CDSHiELD SE to section-based 2021-09-15 11:01:51 -07:00
Matt Nadareski
1345182eea Update small things about Ring PROTECH 2021-09-14 23:01:44 -07:00
Matt Nadareski
a84ac8d3cc Update CodeLock with a few things; notes 2021-09-14 22:57:17 -07:00
Matt Nadareski
1eb07c52e5 Address comments in SolidShield 2021-09-14 14:31:03 -07:00
Matt Nadareski
0d75ee135c Combine SafeCast into SafeDisc; improvements 2021-09-14 13:56:43 -07:00
Matt Nadareski
c915f29c05 Minor formatting changes 2021-09-14 13:53:30 -07:00
Matt Nadareski
38d35d1991 Convert remaining XCP full content check 2021-09-14 11:51:01 -07:00
Matt Nadareski
dbc841cb7f Clean up formatting of remaining full content checks 2021-09-14 11:33:53 -07:00
Matt Nadareski
46f53221c9 Clean up ActiveMARK 2021-09-14 00:51:44 -07:00
Matt Nadareski
708fd01d1e Clean up CD Check 2021-09-13 23:46:59 -07:00
Matt Nadareski
4aa3ba0545 Disable possibly overly-broad CD Check check 2021-09-13 23:43:24 -07:00
Matt Nadareski
57499002d2 Use beginning seek 2021-09-13 23:16:57 -07:00
Matt Nadareski
630f628598 Update UnshieldSharp to 1.6.6 2021-09-13 11:19:55 -07:00
Matt Nadareski
d66c890b71 Combine SafeDisc and Lite 2021-09-12 16:03:58 -07:00
Matt Nadareski
2b5649588a Remove errant content checks from Bitpool 2021-09-12 13:43:08 -07:00
Matt Nadareski
5425578f78 Clean up Origin 2021-09-12 13:40:29 -07:00
Matt Nadareski
8c39adcc04 Simplify EA checks, add note to Registration 2021-09-11 23:03:56 -07:00
Matt Nadareski
7773b32847 Update comment in Inno 2021-09-11 22:48:05 -07:00
Matt Nadareski
143b261a67 Fix NE reading for Inno 2021-09-11 22:40:01 -07:00
Matt Nadareski
25fc2b9b04 Last easilly-removed fileContent reliance removal 2021-09-11 22:31:06 -07:00
Matt Nadareski
d6fd0c4d2c Reduce reliance on fileContent; add notes 2021-09-11 22:27:52 -07:00
Matt Nadareski
44c44be412 Read MS-DOS stub data directly; use with CExe 2021-09-11 21:54:38 -07:00
Matt Nadareski
7b71d7b4bf Fix resource parsing, fix MS-CAB SFX 2021-09-11 21:41:17 -07:00
Matt Nadareski
2c2aee6797 Add a couple of sections to raw read with notes 2021-09-11 21:10:29 -07:00
Matt Nadareski
afdd032f73 Start using more methods to make life easier 2021-09-11 21:03:36 -07:00
Matt Nadareski
9d52ca4b4c Fix invalid reads 2021-09-11 20:58:44 -07:00
Matt Nadareski
1bc8fe7ff6 Fix version finding for MS-CAB SFX 2021-09-11 16:58:05 -07:00
Matt Nadareski
6ab7a06dd5 Lock stream when reading raw section 2021-09-11 16:54:00 -07:00
Matt Nadareski
7195ed3587 Combine CD-Cops and DVD-Cops 2021-09-11 16:49:54 -07:00
Matt Nadareski
214e8d41c7 Pre-read 3 most commonly-used section data
This also adds comprehensive notes around the sections used in various protections, how they're used, and what we can do with them. It also adds a couple of various notes based on the findings from the protection audit
2021-09-11 16:47:25 -07:00
Matt Nadareski
bd9f583659 Add sections note to PE 2021-09-11 15:22:17 -07:00
Matt Nadareski
abbf0b7ff5 Work on PE export data section 2021-09-11 15:08:58 -07:00
Matt Nadareski
f2b9e3a31b Clean up a significant number of TODOs 2021-09-11 00:32:48 -07:00
Matt Nadareski
73dd669c20 Add and use byte array extension methods 2021-09-10 22:35:32 -07:00
Matt Nadareski
32390149f3 Identify and use .rsrc item for 321 Studios 2021-09-10 22:19:24 -07:00
Matt Nadareski
9e73d8762e Clean up StarForce; add notes 2021-09-10 22:15:49 -07:00
Matt Nadareski
09854b469e Remove redundant .rsrc check in SolidShield 2021-09-10 22:09:03 -07:00
Matt Nadareski
e817063e53 Remove debug write from NSIS 2021-09-10 22:05:43 -07:00
Matt Nadareski
7cdf6a8c79 Identify and use .rsrc item for MediaMax 2021-09-10 22:02:57 -07:00
Matt Nadareski
d87087dcfb Add note to Itenium 2021-09-10 21:58:27 -07:00
Matt Nadareski
7c27fcd8a4 Identify and use .rsrc item for EA 2021-09-10 21:52:31 -07:00
Matt Nadareski
56408ed9f4 Add note and future code in Executable 2021-09-10 21:45:34 -07:00
Matt Nadareski
bf385f0bbf Identify and use .rsrc item for NSIS 2021-09-10 21:45:14 -07:00
Matt Nadareski
2a6a2930c1 Handle some TODOs 2021-09-10 21:42:42 -07:00
Matt Nadareski
9f676732a4 A little extra safety 2021-09-10 20:59:39 -07:00
Matt Nadareski
44fac8cc92 Fix reading resource table from stream 2021-09-10 16:21:55 -07:00
Matt Nadareski
e510915098 Add note about streams 2021-09-10 16:15:20 -07:00
Matt Nadareski
b779f2f546 Only deserialze a file once per round of checks 2021-09-10 16:10:15 -07:00
Matt Nadareski
5344de96b2 Cleanup and bugfixes; additional notes 2021-09-10 15:32:37 -07:00
Matt Nadareski
1e70d960ba Remove unnecessary trims 2021-09-10 13:59:35 -07:00
Matt Nadareski
e03808fbc5 Fix trailing whitespace in resource strings 2021-09-10 13:54:12 -07:00
Matt Nadareski
373268a6a8 Convert resource checks to header checks 2021-09-10 13:51:32 -07:00
Matt Nadareski
905d440367 Process file info resources; cleanup; refactors 2021-09-10 02:58:59 -07:00
Matt Nadareski
5628cf8d73 Add Wix to the solution 2021-09-09 18:51:04 -07:00
Matt Nadareski
4aaea417f0 Make generic resource finding methods 2021-09-09 18:45:50 -07:00
Matt Nadareski
892886b730 Ensure CodePage package is installed 2021-09-09 16:32:24 -07:00
Matt Nadareski
1028050464 Use resource section to find assembly manifest 2021-09-09 16:29:17 -07:00
Matt Nadareski
af79b00bd6 Finalize resource reading 2021-09-09 16:05:17 -07:00
Matt Nadareski
dc9a581e1c Fix resource entry checking 2021-09-09 15:10:22 -07:00
Matt Nadareski
4d800fd644 Fix ResourceDirectoryString reading 2021-09-09 11:25:02 -07:00
Matt Nadareski
126e8827de Move WixToolset to separate folder now that it supports .NET Standard 2021-09-09 10:51:18 -07:00
Matt Nadareski
23c79d4452 Update WixToolset version 2021-09-09 10:44:05 -07:00
Matt Nadareski
173fc69a08 Update UnshieldSharp to 1.6.5 2021-09-09 10:37:51 -07:00
Matt Nadareski
0411278f1d Remove unused and outdated classes 2021-09-08 10:33:28 -07:00
Matt Nadareski
bb1f9bdcdc Remove search result 2021-09-08 10:28:50 -07:00
Matt Nadareski
9d5ab935de Add .txt2 check to SafeDisc 2021-09-08 10:28:35 -07:00
Matt Nadareski
1df9d145e4 Add another note to IContentCheck 2021-09-08 10:22:28 -07:00
Matt Nadareski
fba30949bd Fix one ActiveMARK check; add note 2021-09-08 10:14:02 -07:00
Matt Nadareski
198c320ad8 Convert XCP to section based; add note 2021-09-08 09:58:11 -07:00
Matt Nadareski
e798ba1104 Convert WTM to section based 2021-09-08 00:51:25 -07:00
Matt Nadareski
f8f02a54f6 Combine VOB into ProtectDISC; add notes
This also means that EVORE is no longer relevant to the code and has been fully removed.
2021-09-07 23:53:05 -07:00
Matt Nadareski
da01668cbe Convert 321Studios Activation to section based 2021-09-07 21:08:25 -07:00
Matt Nadareski
95770c63af Convert 3PLock to section based 2021-09-07 21:02:52 -07:00
Matt Nadareski
af6e5d7441 Partially convert TAGES to section based; add notes 2021-09-07 21:02:19 -07:00
Matt Nadareski
ae5bdcc97a Convert Sisiphus to section based 2021-09-06 13:58:16 -07:00
Matt Nadareski
0fc415fb34 Convert SVKP to header based; add note 2021-09-05 23:31:10 -07:00
Matt Nadareski
0fe30392d8 Add note to Steam 2021-09-05 23:29:48 -07:00
Matt Nadareski
77fc11289c Convert StarForce to section-based; add notes 2021-09-05 23:22:48 -07:00
Matt Nadareski
9d3969d4ce Remove debug output 2021-09-05 23:08:41 -07:00
Matt Nadareski
2ba2756a8f Partially convert SolidShield to section based; add notes 2021-09-05 23:02:55 -07:00
Matt Nadareski
53088b4e60 Convert SmartE to section based 2021-09-03 13:26:52 -07:00
Matt Nadareski
0dc83739e7 Add v8 white label notes to SecuROM 2021-09-03 11:16:15 -07:00
Matt Nadareski
e8a205b221 Convert SecuROM to section based; add notes 2021-09-02 22:32:06 -07:00
Matt Nadareski
02c3d3fb4a Add note to SafeLock 2021-09-02 16:09:29 -07:00
Matt Nadareski
2d3d66f077 Convert SafeDisc to section based; add notes 2021-09-02 15:22:33 -07:00
Matt Nadareski
a5f21adeee Add content matches to SafeCast; add notes 2021-09-02 09:30:37 -07:00
Matt Nadareski
cbb4cdddfa Add note to Ring PROTECH 2021-09-02 00:54:36 -07:00
Matt Nadareski
e6b898882d Add notes to PSX Anti-modchip 2021-09-01 23:12:16 -07:00
Matt Nadareski
3bd7f5c890 Convert ProtectDisc to section based 2021-09-01 23:09:01 -07:00
Matt Nadareski
39c20fd0cd Wrap file scanning in try/catch for more safety 2021-09-01 22:22:14 -07:00
Matt Nadareski
21117e81a3 Fix EVORE IsPEExecutable check 2021-09-01 16:10:06 -07:00
Matt Nadareski
df172b49db Add note to Origin 2021-09-01 14:15:38 -07:00
Matt Nadareski
1ae0f694de Convert EReg to fvinfo and section based; add note 2021-09-01 14:10:12 -07:00
Matt Nadareski
040aa8daf6 Convert MediaMax CD-3 to section based 2021-09-01 14:06:19 -07:00
Matt Nadareski
3b9aa2d45c Convert LaserLok to section based; add notes 2021-09-01 13:46:08 -07:00
Matt Nadareski
8705cac648 Convert ImpulseReactor to section based; add notes 2021-09-01 10:27:16 -07:00
Matt Nadareski
5a4e3caea8 Add note to Key-Lock 2021-08-31 22:57:28 -07:00
Matt Nadareski
593d4a35b7 Convert JoWood to section based; add notes 2021-08-31 22:56:57 -07:00
SilasLaspada
801eef5f37 Improve Steam detection (#62) 2021-08-31 22:22:47 -07:00
Matt Nadareski
a0ac0ea189 Convert Itenium to section based 2021-08-31 21:15:56 -07:00
Matt Nadareski
f249455b00 Convert GFWL to fvinfo and section based; add note 2021-08-31 20:53:17 -07:00
Matt Nadareski
8dbb8d9fe1 Disable Valve scanning until further notice 2021-08-31 20:33:32 -07:00
Matt Nadareski
ed698e05d8 Partially convert EA to section based; add notes 2021-08-30 15:08:14 -07:00
Matt Nadareski
47b189bf87 Improve CD-Key matching 2021-08-30 14:24:16 -07:00
Matt Nadareski
460eb78ecd Improve Setup Factory matching 2021-08-30 14:18:15 -07:00
Matt Nadareski
ffcaf4d16b Improve MS-CAB SFX matching 2021-08-30 12:08:17 -07:00
Matt Nadareski
64de357257 Remove debug from IIF again 2021-08-30 12:06:29 -07:00
Matt Nadareski
cc3f6622b4 Improve IIF matching 2021-08-30 11:47:49 -07:00
Matt Nadareski
f0b66d4bfb Improve NSIS matching 2021-08-30 11:40:14 -07:00
Matt Nadareski
9c32f663b0 Add notes to DVD-Cops 2021-08-30 10:02:40 -07:00
Matt Nadareski
e0e22d91e1 Add notes to CopyKiller 2021-08-30 10:00:18 -07:00
Matt Nadareski
dbc72cb4c2 Fix typo in Wise Installer 2021-08-29 22:39:34 -07:00
Matt Nadareski
17d6c6aa6b Have exception dump all info 2021-08-29 22:39:04 -07:00
Matt Nadareski
7be5916041 Partially convert CodeLock to section based; add note 2021-08-29 22:26:58 -07:00
Matt Nadareski
2d8a25178e Fix Cenega naming 2021-08-29 22:20:25 -07:00
Matt Nadareski
5195025849 Remove leftover debug in CD Check 2021-08-29 22:14:46 -07:00
Matt Nadareski
c3c2fc6171 Convert Cenga to section based; add note 2021-08-29 21:51:43 -07:00
Matt Nadareski
6fa5e9a67f Add note to CDSHiELDSE 2021-08-29 21:49:56 -07:00
Matt Nadareski
834018b325 Convert CD-Lock to section based 2021-08-29 21:49:14 -07:00
Matt Nadareski
027388f587 Add fvinfo to CDKey; add note 2021-08-29 21:40:25 -07:00
Matt Nadareski
6452d39de1 Partially convert CD-Cops to section based; add note 2021-08-29 21:38:19 -07:00
Matt Nadareski
6d78e2fff7 Partially convert CD Check to section based; add notes 2021-08-29 21:13:50 -07:00
Matt Nadareski
56ae245305 Partially convert CDS to section based; add note 2021-08-29 21:07:26 -07:00
Matt Nadareski
76b16ca6d4 Add note to Bitpool 2021-08-29 20:58:10 -07:00
Matt Nadareski
d0a174d71c Add note to AlphaROM 2021-08-29 20:56:48 -07:00
Matt Nadareski
8e62f12f61 Add note to ActiveMARK 2021-08-29 20:55:36 -07:00
Matt Nadareski
621bcdf380 Convert Wise to section based; add note 2021-08-29 20:52:00 -07:00
Matt Nadareski
1b54dd92ab Convert WZ-SFX to section and header based 2021-08-29 11:43:43 -07:00
Matt Nadareski
2b0a43ca3e Disable possibility of ReadLine in Valve check 2021-08-29 11:31:17 -07:00
Matt Nadareski
81ce49c219 Fix manifest description finding 2021-08-29 11:15:37 -07:00
Matt Nadareski
b287c7236b Fix NE header deserialization 2021-08-28 15:55:08 -07:00
Matt Nadareski
b63d4a3da0 Update EVORE 2021-08-28 00:13:19 -07:00
Matt Nadareski
e652e43cba Add more NE structures 2021-08-27 23:29:34 -07:00
Matt Nadareski
e6b2be1738 Formalize New Executable classes; renames 2021-08-27 22:34:57 -07:00
Matt Nadareski
d2606e21fe Convert WinRAR SFX to section based 2021-08-27 21:58:05 -07:00
Matt Nadareski
22235cbe84 Add Setup Factory version info checks; add notes 2021-08-27 21:49:14 -07:00
Matt Nadareski
6bd5fae1cd Add TODO on IContentCheck 2021-08-27 21:42:05 -07:00
Matt Nadareski
ebb20bbe5e Fix overmatching on JoWooDX; add note 2021-08-27 21:37:46 -07:00
Matt Nadareski
82d7395b79 Make PE parsing attempts safer 2021-08-27 21:32:12 -07:00
Matt Nadareski
451cb04714 Reduce StarForce over-matching check 2021-08-27 21:12:09 -07:00
Matt Nadareski
15e5feafef Add helper methods to PE class for later 2021-08-27 14:30:03 -07:00
Matt Nadareski
4d19bd27f0 Make MachineType enum values consistent 2021-08-27 14:29:38 -07:00
Matt Nadareski
2400f2d0ad Convert PEC to section based; add notes 2021-08-27 14:28:17 -07:00
Matt Nadareski
ee0193eb71 Clean up some usings, add note to NSIS 2021-08-27 13:30:24 -07:00
Matt Nadareski
eb76acb767 Add note to MS-CAB SFX 2021-08-27 13:13:41 -07:00
Matt Nadareski
6c77cccf53 Clarify and comment out in IIF 2021-08-27 12:03:00 -07:00
Matt Nadareski
b4ab969f88 Reorganize and create Resource-related things 2021-08-27 10:38:42 -07:00
Matt Nadareski
2de4f3f808 Continue exe organization, start IIF migration 2021-08-27 09:42:05 -07:00
Matt Nadareski
4b5d0980f7 Convert Installer VISE to section based 2021-08-26 23:18:55 -07:00
Matt Nadareski
2bdbad1ba6 Convert Inno Setup to section based 2021-08-26 23:07:04 -07:00
Matt Nadareski
3b634877d0 Add note to EXE Stealth 2021-08-26 22:22:15 -07:00
Matt Nadareski
2996bbb18f Add note to dotFuscator 2021-08-26 22:13:14 -07:00
Matt Nadareski
c4ca27608b Convert Advanced Installer to section based 2021-08-26 21:57:56 -07:00
Matt Nadareski
5a85ff2ad3 Clean up Armadillo, fix edge case 2021-08-26 21:47:44 -07:00
Matt Nadareski
a27b3cc43f Add old version UPX detection 2021-08-26 20:43:58 -07:00
Matt Nadareski
ea8f557097 Start converting Armadillo checks 2021-08-26 20:38:01 -07:00
Matt Nadareski
7bbed5985b A little EVORE cleanup 2021-08-26 16:05:38 -07:00
Matt Nadareski
0ec6dfb287 Use UPX as a guinea pig for new exe handling 2021-08-26 15:50:38 -07:00
Matt Nadareski
3b753c137b Fill out and fix way more executable stuff 2021-08-26 15:48:56 -07:00
Matt Nadareski
6cde7b8bef Reduce redundant code in content matchers now 2021-08-25 20:26:43 -07:00
Matt Nadareski
d26a89b8ab Add time elapsed to debug output 2021-08-25 20:25:45 -07:00
Matt Nadareski
3ab0bcc0ae ContentMatchSets are now expected in IContentCheck 2021-08-25 19:37:32 -07:00
Matt Nadareski
7548646ba2 Create and use the Tools namespace 2021-08-25 15:09:42 -07:00
Matt Nadareski
0b75c6f046 What I like about EVORE... 2021-08-25 14:23:11 -07:00
SilasLaspada
958d306f42 Fix NullReferenceExceptions (#59) 2021-08-24 23:13:27 -07:00
Matt Nadareski
742b25e4dd Split manifest reading into helper methods 2021-08-24 15:28:23 -07:00
Matt Nadareski
43845cf722 Rename position flag -> debug flag 2021-08-24 15:19:23 -07:00
Matt Nadareski
a2a0e5c2ee Clean up TODOs in IContentCheck 2021-08-24 15:19:10 -07:00
Matt Nadareski
93e8322ba5 Add skeleton code to MS-CAB SFX 2021-08-24 14:29:30 -07:00
Matt Nadareski
8a07c9cf4e Add byte array checks for IIF 2021-08-24 09:26:27 -07:00
Matt Nadareski
6049eda580 Add byte array checks for MS-CAB SFX 2021-08-24 09:13:58 -07:00
Matt Nadareski
177641894e Clean up MS-CAB SFX a little 2021-08-23 23:09:05 -07:00
Matt Nadareski
dc49335ace Add notes for later 2021-08-23 23:04:01 -07:00
SilasLaspada
3dcce8a8ac Add support for Intel Installation Framework detection (#57)
* Add support for Intel Installation Framework detection

* Address reviews
2021-08-23 22:56:31 -07:00
Matt Nadareski
04651d46d8 Clean up usings 2021-08-23 22:07:24 -07:00
Matt Nadareski
56aeded8eb String and EVORE cleanups 2021-08-23 22:05:18 -07:00
SilasLaspada
97c9c7e5ed Add support for Microsoft SFX CAB detection (#56)
* Add support for Microsoft SFX CAB detection

* Address reviews

* Simplify GetVersion

* Fix GetVersion
2021-08-23 22:03:28 -07:00
Matt Nadareski
a891391879 Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2021-08-23 20:58:24 -07:00
Matt Nadareski
5aae9b01d4 Make file version finding safer 2021-08-23 20:45:13 -07:00
SilasLaspada
b74a370b11 Fix WTM false positive (#55) 2021-08-16 21:58:36 -07:00
Matt Nadareski
5e560661d4 Add extra try/catch around Valve archives 2021-07-31 21:41:06 -07:00
Matt Nadareski
93e450c2bf Update README for IS-Z 2021-07-21 14:21:39 -07:00
Matt Nadareski
cc762754c5 Add support of IS-Z archives 2021-07-21 13:40:32 -07:00
Matt Nadareski
7065436033 Update nuget packages 2021-07-21 13:33:52 -07:00
Matt Nadareski
debe091502 Update protection list 2021-07-19 23:02:04 -07:00
Matt Nadareski
80905b56cd Better attempt at narrowing down 2021-07-19 21:56:31 -07:00
Matt Nadareski
0a7cd8a69e Comment out broader UPX (NOS) check 2021-07-19 21:38:19 -07:00
Matt Nadareski
c3957977a2 Reorder StarForce 3-5 checks for accuracy 2021-07-18 21:56:15 -07:00
Matt Nadareski
ff602c77ed Improve GFWL support
Adds support for the previously-undetected Games for Windows LIVE - Zero Day Piracy Protection Module as well as adding a generic check for any other modules
2021-07-18 21:50:58 -07:00
Matt Nadareski
3667a5b57a Concurrent protection scans per file (#52)
* Move to ConcurrentDictionary

* Convert to ConcurrentQueue
2021-07-18 09:44:23 -07:00
Matt Nadareski
3ac57b1c0c Fix static matcher issues (fixes #51)
Note: This may result in slower, but more accurate, scans
2021-07-17 23:40:16 -07:00
Matt Nadareski
957d82b2f7 Add new SecuROM PA detection 2021-07-17 23:06:11 -07:00
Matt Nadareski
6d0817ad15 Path protections separator-agnostic (fixes #47) 2021-07-17 22:31:29 -07:00
Matt Nadareski
5b10e6d614 Add check note for Tivola 2021-07-17 21:54:47 -07:00
Matt Nadareski
2d39b8c532 Add more GFWL checks 2021-07-17 21:45:37 -07:00
Matt Nadareski
2ae860e8ca Add Tivola Ring Protection detection (fixes #45) 2021-07-17 21:17:45 -07:00
Matt Nadareski
9e21c28e52 Temporary fix for StarForce directory scan 2021-07-17 21:11:38 -07:00
Matt Nadareski
2f5053b49f Make StarForce checks AND not OR (fixes #46) 2021-07-17 17:08:15 -07:00
Matt Nadareski
7024136919 Make large file parsing safer (fixes #44) 2021-07-15 09:57:06 -07:00
SilasLaspada
c74b5b3d29 Improve Bitpool detection (#43)
* Improve Bitpool detection

* Address comments
2021-07-02 10:16:06 -07:00
SilasLaspada
9c3201aa4b Improve XCP detection to prevent false positives (#42)
* Improve XCP detection to prevent false positives

* Address comments
2021-07-01 20:51:40 -07:00
SilasLaspada
dfd1141635 Vastly improve WTM detection (#41)
* Fix false positive in WTM detection

* Forgot to improve part of the detection

* Vastly improve detection
2021-06-30 09:36:02 -07:00
SilasLaspada
1188cad5e6 Slightly improve PE Compact version detection (#40)
* Slightly improve PE Compact version detection

* Address comments

* Address comments
2021-06-24 11:58:38 -07:00
SilasLaspada
65fa2f8481 Greatly improve WinZip SFX version detection (#39) 2021-06-21 21:48:25 -07:00
SilasLaspada
475e0b9d91 Add support for detecting Installer VISE (#38)
* Add support for detecting Installer VISE

* Add comment about extraction
2021-06-05 15:08:10 -07:00
SilasLaspada
b76d09aa20 Improve Inno Setup detection (#37)
* Improve Inno Setup detection

* Split "GetVersion"

* Remove unneeded check

* Make version detection more robust

* Add Unicode version detection

* Address review comments
2021-06-05 11:46:04 -07:00
Matt Nadareski
9a931eae67 Bump version 2021-04-14 09:09:48 -07:00
Matt Nadareski
47caa714c4 Remove pause from Valve entirely 2021-04-14 09:08:36 -07:00
Matt Nadareski
6740011c11 Bump version to 1.6.0 2021-04-09 09:32:12 -07:00
SilasLaspada
732078f24d Improve Steam detection (#35)
* Improve Steam detection

* Alphabetize the order of detections
2021-04-03 21:25:22 -07:00
Matt Nadareski
5218aaaeb1 Remove all uses of Pause in Valve 2021-04-03 20:49:53 -07:00
Matt Nadareski
6b9df94613 Remove unused library from README 2021-04-02 16:02:39 -07:00
Matt Nadareski
b1ac88fc20 Use submodule for WixToolset.Dtf 2021-04-02 16:01:23 -07:00
Matt Nadareski
ca83019a58 Move wix mirror to backup 2021-04-02 15:51:46 -07:00
Matt Nadareski
d68272a5bb Use submodule for stormlibsharp
This also gets CascLibSharp for free. Maybe keep that in mind
2021-04-02 15:48:57 -07:00
Matt Nadareski
6ab0fd5e3b Move StormLibSharp mirror to backup 2021-04-02 15:38:59 -07:00
Matt Nadareski
a0034b8fa8 Use submodule for hllib 2021-04-02 15:34:47 -07:00
Matt Nadareski
a884737242 Move HLLib mirror to backup 2021-04-02 15:19:00 -07:00
Matt Nadareski
c7b3776386 Use submodule for libmspack4n 2021-04-02 15:17:26 -07:00
Matt Nadareski
b387c77179 Move libmspack4n mirror to backup 2021-04-02 15:09:56 -07:00
Matt Nadareski
f6b58223de Replace LessIO code mirror with submodule 2021-04-02 15:08:05 -07:00
Matt Nadareski
8960ad3b16 Backup current LessIO code mirror 2021-04-02 14:54:46 -07:00
Matt Nadareski
28e95f9eb7 Slight SolidShield cleanup 2021-04-01 15:00:22 -07:00
Matt Nadareski
0bf5065cbc Add future work note 2021-04-01 14:53:25 -07:00
Matt Nadareski
579c9c0f84 Fix missing SafeDisc version 2021-04-01 11:38:32 -07:00
Matt Nadareski
b2e8b66eae Fix SmartE overmatching 2021-04-01 11:20:13 -07:00
Matt Nadareski
80e71f43de Try to make structs more marshalable 2021-04-01 11:09:20 -07:00
Matt Nadareski
e0497e6fee Migrate SafeDisc to modern path checking 2021-04-01 10:38:50 -07:00
Matt Nadareski
18a78f44c0 Make EVORE a bit safer 2021-04-01 10:36:03 -07:00
Matt Nadareski
f9e7fd5725 Fix marshalling issue 2021-04-01 10:34:53 -07:00
Matt Nadareski
e9c1a170ad Move generic CD key to own file 2021-03-31 19:07:00 -07:00
SilasLaspada
9ce84c75dd Comment out an overmatching definition (#34) 2021-03-31 18:58:34 -07:00
Matt Nadareski
0bd5339b78 Update to UnshieldSharp 1.5.0 2021-03-29 14:35:06 -07:00
Matt Nadareski
e06b1987b9 Update to WiseUnpacker 1.0.2 2021-03-29 14:17:42 -07:00
Matt Nadareski
c8b271ac76 Port over executable header code from Wise 2021-03-25 15:53:45 -07:00
Matt Nadareski
1672c73a57 Fix PECompact scanning
Thanks Silas for noticing the regression
2021-03-25 15:25:15 -07:00
Matt Nadareski
32f6e0e8fc Further making test executable a bit nicer 2021-03-24 20:50:58 -07:00
Matt Nadareski
5c21de5a0f Make the test executable a bit nicer 2021-03-23 16:55:19 -07:00
Matt Nadareski
9f40a8c4c0 Perform some post-removal cleanup 2021-03-23 16:43:23 -07:00
Matt Nadareski
c179f29e2e Remove .NET Framework 4.7.2, update SharpCompress 2021-03-23 16:37:21 -07:00
Matt Nadareski
f9d6fce3bd Reduce boilerplate for directory checks 2021-03-23 13:35:12 -07:00
Matt Nadareski
aa83896963 Final batch of first pass for path check conversions 2021-03-23 10:36:14 -07:00
Matt Nadareski
7d13b8c9db Optimize checking with better caching 2021-03-23 10:04:09 -07:00
Matt Nadareski
921292e077 Static list of content matchers
This also includes some more path matcher conversions that I couldn't reasonably split out
2021-03-23 09:52:09 -07:00
Matt Nadareski
c3e7f0b99f Return empty not null 2021-03-23 08:47:36 -07:00
Matt Nadareski
b9cc5e9ada Second batch of path check conversions 2021-03-22 23:02:01 -07:00
Matt Nadareski
76d76b2bf2 Convert a few more path checks 2021-03-22 22:23:55 -07:00
Matt Nadareski
532e912a2d Accidental comment issue 2021-03-22 22:12:25 -07:00
Matt Nadareski
8ada667dfe Be consistent with var naming 2021-03-22 22:11:01 -07:00
Matt Nadareski
28a4f7ce82 File path should only get first match 2021-03-22 22:09:35 -07:00
Matt Nadareski
3a66183d0e Convert AACS to use new matching 2021-03-22 22:07:14 -07:00
Matt Nadareski
7f91346878 Fix assumptions for path matching 2021-03-22 22:06:55 -07:00
Matt Nadareski
2af0dc4a8c Don't include PDBs 2021-03-22 21:38:49 -07:00
Matt Nadareski
5240f2eb70 Simplify util method naming 2021-03-22 21:32:58 -07:00
Matt Nadareski
f25800510b MatchSet is abstract 2021-03-22 21:30:30 -07:00
Matt Nadareski
6400c954ef Split matchers more cleanly, comment better 2021-03-22 21:25:14 -07:00
Matt Nadareski
e43423d2c9 Fix misleading version results 2021-03-22 16:25:40 -07:00
Matt Nadareski
bc613a0413 Fix build 2021-03-22 11:44:16 -07:00
Matt Nadareski
e47a52dbe0 Use framework in even more content protections 2021-03-22 11:43:51 -07:00
Matt Nadareski
da165345b6 Use framework for more content protections 2021-03-22 11:13:14 -07:00
Matt Nadareski
8ea54328ef Use framework for WZ-SFX v2 checks 2021-03-22 10:22:56 -07:00
Matt Nadareski
a4aeebee68 Split content and path version checks 2021-03-22 10:04:33 -07:00
Matt Nadareski
95a61c3b28 Add path matching to util 2021-03-22 09:55:29 -07:00
Matt Nadareski
9aadf5e948 Add invariance 2021-03-22 01:26:33 -07:00
Matt Nadareski
129ac1bb78 More granular on path search 2021-03-22 01:18:49 -07:00
Matt Nadareski
e4278c55b7 Add path matching matcher 2021-03-22 01:11:59 -07:00
Matt Nadareski
7aca58a6c9 Better split matching code, fix UPX name 2021-03-22 00:41:18 -07:00
Matt Nadareski
ea022de022 Fix a couple things:
- Fix PECompact 2 version string
- Fix UPX (NOS Variant) over-matching
2021-03-21 23:14:37 -07:00
Matt Nadareski
bb4f16d91f Use content matching helper, part 6 2021-03-21 22:45:06 -07:00
Matt Nadareski
f1c165845f Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2021-03-21 22:37:25 -07:00
Matt Nadareski
15ae2441c3 Use content matching helper, part 5 2021-03-21 22:37:16 -07:00
SilasLaspada
6b8f8957de Add Advanced Installer Detection (#32)
* Add Advanced Installer Detection

* Updated README

* Address comments
2021-03-21 22:36:01 -07:00
Matt Nadareski
cf9bd99f3d Use content matching helper, part 4 2021-03-21 22:19:38 -07:00
SilasLaspada
557114d92d Add CExe detection (#30)
* Add CExe detection

* Optimize check
2021-03-21 21:39:01 -07:00
Matt Nadareski
d01826ffa4 Use content matching helper, part 3 2021-03-21 15:34:19 -07:00
Matt Nadareski
7e3ef544f0 Use content matching helper, part 2 2021-03-21 15:24:23 -07:00
Matt Nadareski
ab07eb96ce Use content matching helper, part 1 2021-03-21 14:30:37 -07:00
Matt Nadareski
7c53eaab13 Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2021-03-21 14:04:19 -07:00
SilasLaspada
fad7d87282 Add WinRAR SFX detection and extraction (#31) 2021-03-21 14:03:47 -07:00
Matt Nadareski
578519169a Add helper method for simple content matches 2021-03-20 22:37:56 -07:00
Matt Nadareski
fe106d23ec Fix BD+ in reverse 2021-03-20 22:28:57 -07:00
Matt Nadareski
e1669f031f BD+ using BitConverter 2021-03-20 22:00:38 -07:00
SilasLaspada
9bff6d5fe1 Improve version detection (#29)
* Improve version detection

* Address comments

* Address comments

Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2021-03-20 21:29:19 -07:00
Matt Nadareski
544aaed9da Modify array finding, part 2 2021-03-20 20:47:56 -07:00
Matt Nadareski
b6b7a5e7aa Clean up terminology 2021-03-20 19:23:59 -07:00
Matt Nadareski
cdc4d509ee Modify array finding, part 1 2021-03-20 19:00:22 -07:00
Matt Nadareski
07882f7632 Create and use manifesr version utility 2021-03-20 17:34:31 -07:00
Matt Nadareski
deb3c01362 Merge branch 'master' of https://github.com/mnadareski/BurnOutSharp 2021-03-20 16:26:19 -07:00
SilasLaspada
a51b16aed2 Add Setup Factory detection (#28)
* Add Setup Factory detection

* Improve version detection

* Address comments
2021-03-20 16:18:17 -07:00
Matt Nadareski
0e7d98158e Split out generic online registration 2021-03-20 13:29:46 -07:00
Matt Nadareski
56d8518ee4 Comment out over-matching EA check 2021-03-19 17:31:53 -07:00
Matt Nadareski
8897fd8650 Make SafeDisc more like Tages path check 2021-03-19 15:56:07 -07:00
Matt Nadareski
6f811e43d0 Split method in Scanner too 2021-03-19 15:48:53 -07:00
Matt Nadareski
a2888e3371 Split IPathCheck method 2021-03-19 15:41:49 -07:00
SilasLaspada
30d3312d87 Added BD+ version detection (#26)
* Added BD+ version detection

* Address Comments

* Address comment

* Remove unnecessary assignment
2021-03-18 10:50:45 -07:00
SilasLaspada
854a257fd6 Implement AACS version detection (#25)
* Implement AACS version detection

* Address comments

* Address more comments

* Address comment
2021-03-17 22:27:37 -07:00
Matt Nadareski
28344c6e3f Add WinZip SFX to readme 2021-03-14 14:39:31 -07:00
SilasLaspada
11a82addd8 Implement BD+ detection (#24) 2021-03-14 14:23:24 -07:00
Matt Nadareski
152f6c7051 Reduce size of 32-bit checks 2021-03-14 14:16:54 -07:00
Matt Nadareski
bedbceafa7 Use NE headers for all 16-bit versions 2021-03-14 13:55:26 -07:00
Matt Nadareski
2f19bf7ceb SFX cleanup and add missing check 2021-03-14 00:19:10 -08:00
SilasLaspada
a39ae9facf Add support for WinZip SFX archives (#23)
* Add inital check for WinZip SFX archives

Every version of WinZip SFX has the string "WinZip Self-Extractor" in it,

* Add basic version detection

Versions 3+ and 2.x are identified radically differently, so make separate methods for them.

* Implement version 3+ detection

Should be very thorough detection, detects every 3+ file I have accurately.

* Cleanup code

General clanup

* Improve version 3+ detection

Use an XML string to determine the version.

* Harden against false positives

* Implement basic extraction

* Partial 2.X version detection

Very crude but effective 2.X detection for 2.0 versions

* Add version detection for 2.1 RC2 variants

* Add 2.1 version detection

* Add 2.2 version detection

Aside from clean-ups, this is the final functional addition

* Address comments
2021-03-13 20:18:03 -08:00
Matt Nadareski
56b234bc96 A little cleanup 2021-03-02 15:21:14 -08:00
Matt Nadareski
633fe23b80 Reflection
This change eliminates the need to explicitly list out every single protection in the same way now that we have interfaces that we can rely on.
2021-03-02 15:10:52 -08:00
Matt Nadareski
2867ce2e9a Add more EA CDKey checks (fixes #21) 2021-03-02 13:48:10 -08:00
Matt Nadareski
ac0e5e95a9 Add note to RPT scan 2021-03-02 13:34:15 -08:00
Matt Nadareski
e3bed19e79 Import WixToolset code as external 2021-03-02 13:09:15 -08:00
Matt Nadareski
73aae8118f Wrap in libmspack4n and LessIO as external code 2021-03-02 12:14:14 -08:00
Matt Nadareski
b3671a430e Swap order of params for IPathCheck 2021-02-26 11:02:10 -08:00
Matt Nadareski
54465ff4e7 Move both installers to packers 2021-02-26 09:34:07 -08:00
Matt Nadareski
52eef84374 Make InnoSetup like WiseInstaller 2021-02-26 09:32:41 -08:00
Matt Nadareski
f4310206e9 Add IScannable interface 2021-02-26 09:26:23 -08:00
Matt Nadareski
7cfa9649e4 Add IContentCheck interface 2021-02-26 01:26:49 -08:00
Matt Nadareski
c6eaafebbe Add IPathCheck interface 2021-02-26 00:32:09 -08:00
Matt Nadareski
df1e14b6c9 Rename NOS variant of UPX 2021-02-25 13:38:13 -08:00
Matt Nadareski
ad2d854969 Add versioned NOS check, fix naming 2021-02-25 11:27:08 -08:00
Matt Nadareski
61202a87fb Add UPX detection for odd cases 2021-02-25 11:13:57 -08:00
Matt Nadareski
9ebbeaed0f Make EA CDKey checks more robust 2021-02-23 13:16:25 -08:00
Matt Nadareski
aebc139d52 I lied, keep it separate 2021-02-20 22:13:48 -08:00
Matt Nadareski
0e82eea891 Origin is an EA protection 2021-02-20 22:06:18 -08:00
Matt Nadareski
7ec76acf2f Remove Cucko until more investigation 2021-02-20 13:16:52 -08:00
Matt Nadareski
21f17791ff No... that's not right 2021-02-19 21:26:49 -08:00
Matt Nadareski
fff5f2610a Add Cucko notes 2021-02-19 10:14:02 -08:00
Matt Nadareski
d574fb5e44 Fix link to MPF in README 2021-01-25 09:53:18 -08:00
Matt Nadareski
d8aacbcc5d Treat Wise internally a bit strangely 2021-01-25 09:51:16 -08:00
Matt Nadareski
6467ef97d5 Only scan Wise internals if scanning archives 2021-01-24 21:23:05 -08:00
Matt Nadareski
5ec4872b36 Fix framework string 2021-01-22 11:25:24 -08:00
Matt Nadareski
30bfff833f Bump version to 1.5.1 2021-01-22 11:10:19 -08:00
Matt Nadareski
e37d5a80ab Add .NET 5.0 target framework 2021-01-21 13:17:59 -08:00
Matt Nadareski
df1081507d Thrown exceptions get logged to file in Test 2021-01-21 13:17:34 -08:00
Matt Nadareski
a140e1c444 Fix MPQ extraction (fixes #16) 2021-01-21 11:54:05 -08:00
Matt Nadareski
f9a990b27b Rename this to be more accurate 2020-11-12 22:47:33 -08:00
Matt Nadareski
b841c7aa94 More comments in EVORE for later 2020-11-12 22:40:50 -08:00
Matt Nadareski
554520ae1f Less EVORE code to fail 2020-11-12 22:33:18 -08:00
Matt Nadareski
3155b3fe41 Move EVORE things around 2020-11-12 22:10:45 -08:00
Matt Nadareski
e52cfd244a Little more reorganization 2020-11-12 22:00:50 -08:00
Matt Nadareski
c52b22fb4e Split things out for executables 2020-11-12 21:58:06 -08:00
Matt Nadareski
b3f72bbbe1 Add EVORE note, VSCode stuff 2020-11-12 21:35:43 -08:00
Matt Nadareski
9ebcfdba53 Bump version to 1.5.0 2020-11-03 21:15:36 -08:00
Matt Nadareski
15f020cb06 Fix textfile content scan 2020-11-03 14:57:23 -08:00
Matt Nadareski
4b387f86c1 Hook up MediaMax to scan 2020-11-03 14:47:15 -08:00
Matt Nadareski
1a3a73a86d Add MediaMax CD-3 content detections 2020-11-03 14:41:05 -08:00
Matt Nadareski
31eff196e6 Forgot to fix path in Wise 2020-11-02 15:18:03 -08:00
Matt Nadareski
828d3403f1 Upgrade UnshieldSharp, re-enable ISCab for Core 2020-11-01 23:13:35 -08:00
Matt Nadareski
28d6d06033 Unshield uses zlib.net which isn't .NET Core... 2020-11-01 22:13:46 -08:00
Matt Nadareski
0a2477e1b3 Remove console writes, make LibCrypt read better 2020-11-01 21:53:42 -08:00
Matt Nadareski
01451d7009 A bit of cleanup 2020-11-01 16:01:45 -08:00
Matt Nadareski
cff9582bf5 Comment out one of the 3PLock checks 2020-11-01 14:30:32 -08:00
Matt Nadareski
428d839700 Remove outdated TODOs 2020-11-01 14:18:01 -08:00
Matt Nadareski
50db0044b0 Fix MSCab corner case of trailing periods 2020-11-01 14:06:46 -08:00
SilasLaspada
68b1ec7b3f Fix name of INTENIUM protection (#15)
Change ITENIUM to INTENIUM
2020-11-01 10:45:30 -08:00
Matt Nadareski
fd524e1d5c List archive formats in README 2020-10-31 23:57:02 -07:00
Matt Nadareski
ef581c3f36 Add note for MS-CAB 2020-10-31 23:29:27 -07:00
Matt Nadareski
64e67b8daa Gate non-core compatible includes 2020-10-31 23:17:03 -07:00
Matt Nadareski
8624350b82 Why am I doing this? 2020-10-31 22:44:26 -07:00
Matt Nadareski
e3b32fd974 Clear empty keys as you go 2020-10-31 21:20:16 -07:00
Matt Nadareski
6f789d2454 Fix output of in-archive files 2020-10-31 14:57:47 -07:00
Matt Nadareski
d365dd1164 More consistent naming 2020-10-31 14:48:25 -07:00
Matt Nadareski
aa3afd676b Add option for including packers in scan 2020-10-31 14:46:08 -07:00
Matt Nadareski
69a04ff825 Hook up ITENIUM 2020-10-31 14:43:27 -07:00
Matt Nadareski
938e1f94bb Fix CDCheck message (non-false-positive in XCP) 2020-10-31 14:19:41 -07:00
Matt Nadareski
3eda785a5a Skip files with no protection on output 2020-10-31 14:16:51 -07:00
Matt Nadareski
6b895fa7c8 Fix invalid UPX packing versions 2020-10-31 14:15:33 -07:00
Matt Nadareski
45e10a84ae Strip temp paths, add archive name as prefix 2020-10-31 14:14:35 -07:00
Matt Nadareski
81f0400790 Pass-thru scanner, better return types 2020-10-31 14:00:31 -07:00
Matt Nadareski
df90583f73 Scanner shouldn't include path intrinsically 2020-10-31 13:20:54 -07:00
Matt Nadareski
5b980e138a Add scanner to all archive signatures (nw) 2020-10-31 00:06:41 -07:00
Matt Nadareski
0dd71d72ca Add new, but unused, Scanner class 2020-10-30 23:56:27 -07:00
Matt Nadareski
a7364eab67 Comments and a better guard 2020-10-30 21:43:40 -07:00
Matt Nadareski
99013e3448 Separate out archive and non-archive in code 2020-10-30 21:14:07 -07:00
Matt Nadareski
7f5e93db95 Fix 3PLock scanning finally 2020-10-30 17:02:58 -07:00
Matt Nadareski
4aa9662ebe Update README 2020-10-30 09:16:16 -07:00
Matt Nadareski
68dc6c3139 Region-ize the executable scans 2020-10-30 09:11:17 -07:00
Matt Nadareski
8b99577c66 New namespace for packers 2020-10-30 09:09:16 -07:00
Matt Nadareski
20ea1b4427 Fix formatting 2020-10-30 09:02:48 -07:00
SilasLaspada
cf0c6126b9 Add support for detecting NSIS (#14) 2020-10-30 08:56:34 -07:00
Matt Nadareski
49eef3f45a Update README with changes 2020-10-29 11:07:00 -07:00
Matt Nadareski
23e852bbdb Add first ITENIUM check, not hooked up 2020-10-29 11:01:52 -07:00
Matt Nadareski
f182dccbf2 21 -> 321, add a couple protection notes 2020-10-29 10:05:56 -07:00
Matt Nadareski
2d2cff4d0e Add MSI extraction and scanning 2020-10-28 22:51:33 -07:00
Matt Nadareski
c0621a83cb Enable 21Studios scan 2020-10-28 22:51:14 -07:00
Matt Nadareski
3c5da9f7ec Add 21Studios activator detection 2020-10-28 22:49:55 -07:00
Matt Nadareski
a7b5288277 One more EA CDKey detection 2020-10-28 21:03:45 -07:00
Matt Nadareski
fee980e048 CD Check has a valid case again 2020-10-28 16:33:20 -07:00
Matt Nadareski
21eac43e9f Remove unused vars 2020-10-28 13:31:15 -07:00
Matt Nadareski
b55698004c Make it blindingly obvious the files are from an archive 2020-10-28 13:25:58 -07:00
Matt Nadareski
40bdb9b2d9 Use 'i+1' during progress 2020-10-28 13:21:42 -07:00
Matt Nadareski
dfabb1a244 Strip out temp paths from results and progress 2020-10-28 13:17:26 -07:00
Matt Nadareski
cd5f9561bf Pre-scan progress indicator 2020-10-28 13:06:45 -07:00
Matt Nadareski
d8d6fac67e Fix issue with libmspack4n, again 2020-10-28 12:34:33 -07:00
Matt Nadareski
965c32482d Remove note 2020-10-28 12:24:04 -07:00
Matt Nadareski
c8c9d4ac64 Scan whole path for Wise 2020-10-28 12:23:25 -07:00
Matt Nadareski
5f5abf8a14 Scan whole path for Valve 2020-10-28 12:22:29 -07:00
Matt Nadareski
406791b938 Scan whole path for TAP 2020-10-28 12:21:59 -07:00
Matt Nadareski
eb2e6e7029 Scan whole path for 7zip 2020-10-28 12:21:44 -07:00
Matt Nadareski
9f0b41c6aa Scan whole path for RAR 2020-10-28 12:21:26 -07:00
Matt Nadareski
e2b72d8a5b Scan whole folder for PKZIP 2020-10-28 12:20:55 -07:00
Matt Nadareski
9802840309 Scan whole folder for MPQ 2020-10-28 12:20:36 -07:00
Matt Nadareski
4a6e2fd62d Scan whole folder for MSCab 2020-10-28 12:20:07 -07:00
Matt Nadareski
79c706e35a Scan whole folder for ISCab 2020-10-28 12:19:10 -07:00
Matt Nadareski
9be19d6925 Scan whole folder for BFPK 2020-10-28 12:17:10 -07:00
Matt Nadareski
0cfb9907d0 Make archives use full scan, not just content 2020-10-28 12:05:48 -07:00
Matt Nadareski
c18e9b3538 Combine EA protection checks, add/fix reg checks 2020-10-28 11:13:26 -07:00
Matt Nadareski
3754c3a65b Ensure mspack.dll exists for libmspack4n 2020-10-28 10:49:24 -07:00
Matt Nadareski
513a64df4c Rename Cucko and cleanup misc 2020-10-28 10:42:54 -07:00
Matt Nadareski
d18a51c7fa Add SolidShield wrapper check 2020-10-28 10:23:57 -07:00
Matt Nadareski
b43433b9ed Add more EA protection checks 2020-10-28 10:10:43 -07:00
Matt Nadareski
d553395f3f Reorder CDS path checks, change one to fit code 2020-10-27 17:13:35 -07:00
Matt Nadareski
28dbe8542b Add CDS content checks, fix XCP over-detection 2020-10-27 17:01:33 -07:00
Matt Nadareski
912e605dc8 Clarify SafeDisc version identifiers 2020-10-27 14:47:02 -07:00
Matt Nadareski
039982d02d Cleanup on Cactus for future dev work 2020-10-27 14:37:14 -07:00
Matt Nadareski
1a40c6cbb2 SafeDisc... one OR greater 2020-10-27 13:35:57 -07:00
Matt Nadareski
35921e3cac Be smarter about SecuROM strings 2020-10-26 23:30:35 -07:00
Matt Nadareski
a42040d644 Add UPX checking 2020-10-26 23:30:06 -07:00
Matt Nadareski
e1fb1c7bcf Minor renaming 2020-10-26 22:22:46 -07:00
Matt Nadareski
095de1441d Add XCP content checks (thanks to Silas) 2020-10-26 21:08:39 -07:00
Matt Nadareski
43cbafc0f5 Add better detection for XCP2 2020-10-26 20:34:47 -07:00
Matt Nadareski
9143e4c02c Disable CD Check in source due to false positives 2020-10-09 23:12:11 -07:00
Matt Nadareski
2af0692ab4 Update libraries, fix deps, bump to 1.04.1 2020-09-30 23:19:50 -07:00
Matt Nadareski
e00c8786b9 Update version to 1.4.0 2020-09-10 22:32:00 -07:00
Matt Nadareski
ab6298ac67 Include all builds for Test 2020-09-10 21:59:50 -07:00
Matt Nadareski
aaad56794b Fix odd case for secdrv.sys (fixes #8) 2020-09-10 21:50:59 -07:00
Matt Nadareski
77d99460ab Fix exception in EVORE (fixes #9) 2020-09-10 21:49:46 -07:00
Matt Nadareski
4b222003d2 Minor cleanup 2020-09-10 21:47:14 -07:00
Matt Nadareski
517d5528c9 Remove .NET Framework 4.6.2 2020-09-10 21:44:42 -07:00
Matt Nadareski
0c137e97f0 Make protection location optional (default off) 2020-09-10 21:43:18 -07:00
Matt Nadareski
c4f8fa4b0d Location, Location, Location (#11)
* Add index to all content checks

* Get mostly onto byte arrays

* Migrate as much as possible to byte array

* Minor cleanup

* Cleanup comments, fix search

* Safer CABs and auto-log on test

* Comments and better SecuROM

* Cleanup, Wise Detection, archives

* Minor fixes

* Add externals, cleanup README

* Add WiseUnpacker

* Add Wise extraction

* Better separation of special file format handling

* Consistent licencing

* Add to README

* Fix StartsWith

* Fix Valve scanning

* Fix build

* Remove old TODO

* Fix BFPK extraction

* More free decompression formats

* Fix EVORE

* Fix LibCrypt detection

* Fix EVORE deletion
2020-09-10 21:10:32 -07:00
Matt Nadareski
0bc1d1efa6 CheckPath should not call CheckContents 2020-02-20 14:28:26 -08:00
Matt Nadareski
c78229c3cd Use Any() instead of Count() > 0 2020-02-20 14:23:39 -08:00
Matt Nadareski
aa41cb9edf Clean up some of the psxt001z code (fixes #6) 2020-02-18 13:54:39 -08:00
Matt Nadareski
11f1fec53c Update copyright 2020-01-29 11:32:05 -08:00
Matt Nadareski
249064580a Sync nuspec and assembly info 2020-01-29 11:31:44 -08:00
Matt Nadareski
72d29aabd2 Update version and add to README 2020-01-29 11:30:22 -08:00
Matt Nadareski
18a17d2579 Port LibCrypt sub detection from psxt001z 2020-01-29 11:28:03 -08:00
Matt Nadareski
4e664cbe0f Add *nix and Mac executable headers to checking 2019-10-30 12:06:40 -07:00
Matt Nadareski
99f4909e9b Missing or moved files shouldn't cause an issue (#5) 2019-10-25 23:25:30 -04:00
Matt Nadareski
482644af85 Fix Memory Issues (#4)
* Fix a couple of protection scans (possible mem issues)

* Don't open the file contents on path scan for antimodchip

* IS-CAB intermediate filtering to reduce scan times

* Update NuGet version
2019-10-24 16:09:43 -04:00
695 changed files with 103466 additions and 4700 deletions

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "BurnOutSharp/External/stormlibsharp"]
path = BurnOutSharp/External/stormlibsharp
url = https://github.com/robpaveza/stormlibsharp.git

27
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,27 @@
{
// Use IntelliSense to find out which attributes exist for C# debugging
// Use hover for the description of the existing attributes
// For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md
"version": "0.2.0",
"configurations": [
{
"name": ".NET Core Launch (Test)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
// If you have changed target frameworks, make sure to update the program path.
"program": "${workspaceFolder}/Test/bin/Debug/net6.0/Test.dll",
"args": [],
"cwd": "${workspaceFolder}/Test",
// For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": ".NET Core Attach",
"type": "coreclr",
"request": "attach",
"processId": "${command:pickProcess}"
}
]
}

42
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,42 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "build",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/BurnOutSharp.sln",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
],
"problemMatcher": "$msCompile"
},
{
"label": "publish",
"command": "dotnet",
"type": "process",
"args": [
"publish",
"${workspaceFolder}/BurnOutSharp.sln",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
],
"problemMatcher": "$msCompile"
},
{
"label": "watch",
"command": "dotnet",
"type": "process",
"args": [
"watch",
"run",
"${workspaceFolder}/BurnOutSharp.sln",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
],
"problemMatcher": "$msCompile"
}
]
}

View File

@@ -0,0 +1,31 @@
using System.Collections.Generic;
namespace BurnOutSharp.ASN1
{
/// <summary>
/// ASN.1 Parser
/// </summary>
public static class AbstractSyntaxNotationOne
{
/// <summary>
/// Parse a byte array into a DER-encoded ASN.1 structure
/// </summary>
/// <param name="data">Byte array representing the data</param>
/// <param name="pointer">Current pointer into the data</param>
/// <returns></returns>
public static List<TypeLengthValue> Parse(byte[] data, int pointer)
{
// Create the output list to return
var topLevelValues = new List<TypeLengthValue>();
// Loop through the data and return all top-level values
while (pointer < data.Length)
{
var topLevelValue = new TypeLengthValue(data, ref pointer);
topLevelValues.Add(topLevelValue);
}
return topLevelValues;
}
}
}

View File

@@ -0,0 +1,27 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BurnOutSharp.ASN1</Title>
<AssemblyName>BurnOutSharp.ASN1</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.7</Version>
<AssemblyVersion>2.7</AssemblyVersion>
<FileVersion>2.7</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\BurnOutSharp.Utilities\BurnOutSharp.Utilities.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,55 @@
using System;
namespace BurnOutSharp.ASN1
{
/// <summary>
/// ASN.1 type indicators
/// </summary>
[Flags]
public enum ASN1Type : byte
{
#region Modifiers
V_ASN1_UNIVERSAL = 0x00,
V_ASN1_PRIMITIVE_TAG = 0x1F,
V_ASN1_CONSTRUCTED = 0x20,
V_ASN1_APPLICATION = 0x40,
V_ASN1_CONTEXT_SPECIFIC = 0x80,
V_ASN1_PRIVATE = 0xC0,
#endregion
#region Types
V_ASN1_EOC = 0x00,
V_ASN1_BOOLEAN = 0x01,
V_ASN1_INTEGER = 0x02,
V_ASN1_BIT_STRING = 0x03,
V_ASN1_OCTET_STRING = 0x04,
V_ASN1_NULL = 0x05,
V_ASN1_OBJECT = 0x06,
V_ASN1_OBJECT_DESCRIPTOR = 0x07,
V_ASN1_EXTERNAL = 0x08,
V_ASN1_REAL = 0x09,
V_ASN1_ENUMERATED = 0x0A,
V_ASN1_UTF8STRING = 0x0C,
V_ASN1_SEQUENCE = 0x10,
V_ASN1_SET = 0x11,
V_ASN1_NUMERICSTRING = 0x12,
V_ASN1_PRINTABLESTRING = 0x13,
V_ASN1_T61STRING = 0x14,
V_ASN1_TELETEXSTRING = 0x14,
V_ASN1_VIDEOTEXSTRING = 0x15,
V_ASN1_IA5STRING = 0x16,
V_ASN1_UTCTIME = 0x17,
V_ASN1_GENERALIZEDTIME = 0x18,
V_ASN1_GRAPHICSTRING = 0x19,
V_ASN1_ISO64STRING = 0x1A,
V_ASN1_VISIBLESTRING = 0x1A,
V_ASN1_GENERALSTRING = 0x1B,
V_ASN1_UNIVERSALSTRING = 0x1C,
V_ASN1_BMPSTRING = 0x1E,
#endregion
}
}

View File

@@ -0,0 +1,25 @@
namespace BurnOutSharp.ASN1
{
#pragma warning disable IDE0011
/// <summary>
/// Methods related to Object Identifiers (OID) and ASN.1 notation
/// </summary>
public static partial class ObjectIdentifier
{
/// <summary>
/// Parse an OID in separated-value notation into ASN.1 notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <param name="index">Current index into the list</param>
/// <returns>ASN.1 formatted string, if possible</returns>
/// <remarks>
public static string ParseOIDToASN1Notation(ulong[] values, ref int index)
{
// TODO: Once the modified OID-IRI formatting is done, make an ASN.1 notation version
return null;
}
}
#pragma warning restore IDE0011
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,870 @@
using System.Linq;
using System.Text;
namespace BurnOutSharp.ASN1
{
#pragma warning disable IDE0011
/// <summary>
/// Methods related to Object Identifiers (OID) and OID-IRI formatting
/// </summary>
public static partial class ObjectIdentifier
{
/// <summary>
/// Parse an OID in separated-value notation into OID-IRI notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <param name="index">Current index into the list</param>
/// <returns>OID-IRI formatted string, if possible</returns>
/// <see href="http://www.oid-info.com/index.htm"/>
public static string ParseOIDToOIDIRINotation(ulong[] values)
{
// If we have an invalid set of values, we can't do anything
if (values == null || values.Length == 0)
return null;
// Set the initial index
int index = 0;
// Get a string builder for the path
var nameBuilder = new StringBuilder();
// Try to parse the standard value
string standard = ParseOIDToOIDIRINotation(values, ref index);
if (standard == null)
return null;
// Add the standard value to the output
nameBuilder.Append(standard);
// If we have no more items
if (index == values.Length)
return nameBuilder.ToString();
// Add trailing items as just values
nameBuilder.Append("/");
nameBuilder.Append(string.Join("/", values.Skip(index)));
// Create and return the string
return nameBuilder.ToString();
}
/// <summary>
/// Parse an OID in separated-value notation into OID-IRI notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <param name="index">Current index into the list</param>
/// <returns>OID-IRI formatted string, if possible</returns>
/// <see href="http://www.oid-info.com/index.htm"/>
private static string ParseOIDToOIDIRINotation(ulong[] values, ref int index)
{
// If we have an invalid set of values, we can't do anything
if (values == null || values.Length == 0)
return null;
// If we have an invalid index, we can't do anything
if (index < 0 || index >= values.Length)
return null;
#region Start
switch (values[index++])
{
case 0: goto oid_0;
case 1: goto oid_1;
case 2: goto oid_2;
default: return $"/{values[index - 1]}";
}
#endregion
// itu-t, ccitt, itu-r
#region 0.*
oid_0:
if (index == values.Length) return "/ITU-T";
switch (values[index++])
{
case 0: goto oid_0_0;
case 2: return "/ITU-T/Administration";
case 3: return "/ITU-T/Network-Operator";
case 4: return "/ITU-T/Identified-Organization";
case 5: return "/ITU-R/R-Recommendation";
case 9: return "/ITU-T/Data";
default: return $"/ITU-T/{values[index - 1]}";
};
// recommendation
#region 0.0.*
oid_0_0:
if (index == values.Length) return "/ITU-T/Recommendation";
switch (values[index++])
{
case 1: return "/ITU-T/Recommendation/A";
case 2: return "/ITU-T/Recommendation/B";
case 3: return "/ITU-T/Recommendation/C";
case 4: return "/ITU-T/Recommendation/D";
case 5: return "/ITU-T/Recommendation/E";
case 6: return "/ITU-T/Recommendation/F";
case 7: return "/ITU-T/Recommendation/G";
case 8: return "/ITU-T/Recommendation/H";
case 9: return "/ITU-T/Recommendation/I";
case 10: return "/ITU-T/Recommendation/J";
case 11: return "/ITU-T/Recommendation/K";
case 12: return "/ITU-T/Recommendation/L";
case 13: return "/ITU-T/Recommendation/M";
case 14: return "/ITU-T/Recommendation/N";
case 15: return "/ITU-T/Recommendation/O";
case 16: return "/ITU-T/Recommendation/P";
case 17: return "/ITU-T/Recommendation/Q";
case 18: return "/ITU-T/Recommendation/R";
case 19: return "/ITU-T/Recommendation/S";
case 20: return "/ITU-T/Recommendation/T";
case 21: return "/ITU-T/Recommendation/U";
case 22: return "/ITU-T/Recommendation/V";
case 24: return "/ITU-T/Recommendation/X";
case 25: return "/ITU-T/Recommendation/Y";
case 26: return "/ITU-T/Recommendation/Z";
default: return $"/ITU-T/Recommendation/{values[index - 1]}";
}
#endregion
#endregion
// iso
#region 1.*
oid_1:
if (index == values.Length) return "/ISO";
switch (values[index++])
{
case 0: return "/ISO/Standard";
case 1: return "/ISO/Registration-Authority";
case 2: goto oid_1_2;
case 3: return "/ISO/Identified-Organization";
default: return $"/ISO/{values[index - 1]}";
}
// member-body
#region 1.2.*
oid_1_2:
if (index == values.Length) return "/ISO/Member-Body";
switch (values[index++])
{
case 36: return "/ISO/Member-Body/AU";
case 40: return "/ISO/Member-Body/AT";
case 56: return "/ISO/Member-Body/BE";
case 124: return "/ISO/Member-Body/CA";
case 156: return "/ISO/Member-Body/CN";
case 203: return "/ISO/Member-Body/CZ";
case 208: return "/ISO/Member-Body/DK";
case 246: return "/ISO/Member-Body/FI";
case 250: return "/ISO/Member-Body/FR";
case 276: return "/ISO/Member-Body/DE";
case 300: return "/ISO/Member-Body/GR";
case 344: return "/ISO/Member-Body/HK";
case 372: return "/ISO/Member-Body/IE";
case 392: return "/ISO/Member-Body/JP";
case 398: return "/ISO/Member-Body/KZ";
case 410: return "/ISO/Member-Body/KR";
case 498: return "/ISO/Member-Body/MD";
case 528: return "/ISO/Member-Body/NL";
case 566: return "/ISO/Member-Body/NG";
case 578: return "/ISO/Member-Body/NO";
case 616: return "/ISO/Member-Body/PL";
case 643: return "/ISO/Member-Body/RU";
case 702: return "/ISO/Member-Body/SG";
case 752: return "/ISO/Member-Body/SE";
case 804: return "/ISO/Member-Body/UA";
case 826: return "/ISO/Member-Body/GB";
case 840: return "/ISO/Member-Body/US";
default: return $"/ISO/Member-Body/{values[index - 1]}";
}
#endregion
#endregion
// joint-iso-itu-t, joint-iso-ccitt
#region 2.*
oid_2:
if (index == values.Length) return "/Joint-ISO-ITU-T";
switch (values[index++])
{
case 1: return "/ASN.1";
case 16: goto oid_2_16;
case 17: return "/Joint-ISO-ITU-T/Registration-Procedures";
case 23: return "/Joint-ISO-ITU-T/International-Organizations";
case 25: goto oid_2_25;
case 27: return "/Tag-Based";
case 28: return "/Joint-ISO-ITU-T/ITS";
case 41: return "/BIP";
case 42: goto oid_2_42;
case 48: goto oid_2_48;
case 49: goto oid_2_49;
case 50: return "/OIDResolutionSystem";
case 51: return "/GS1";
case 52: return "/Joint-ISO-ITU-T/UAV";
case 999: return "/Joint-ISO-ITU-T/Example";
default: return $"/Joint-ISO-ITU-T/{values[index - 1]}";
}
// country
#region 2.16.*
oid_2_16:
if (index == values.Length) return "/Country";
switch (values[index++])
{
case 4: return "/Country/AF";
case 8: return "/Country/AL";
case 12: return "/Country/DZ";
case 20: return "/Country/AD";
case 24: return "/Country/AO";
case 28: return "/Country/AG";
case 31: return "/Country/AZ";
case 32: return "/Country/AR";
case 36: return "/Country/AU";
case 40: return "/Country/AT";
case 44: return "/Country/BS";
case 48: return "/Country/BH";
case 50: return "/Country/BD";
case 51: return "/Country/AM";
case 52: return "/Country/BB";
case 56: return "/Country/BE";
case 60: return "/Country/BM";
case 64: return "/Country/BT";
case 68: return "/Country/BO";
case 70: return "/Country/BA";
case 72: return "/Country/BW";
case 76: return "/Country/BR";
case 84: return "/Country/BZ";
case 90: return "/Country/SB";
case 96: return "/Country/BN";
case 100: return "/Country/BG";
case 104: return "/Country/MM";
case 108: return "/Country/BI";
case 112: return "/Country/BY";
case 116: return "/Country/KH";
case 120: return "/Country/CM";
case 124: return "/Country/CA";
case 132: return "/Country/CV";
case 140: return "/Country/CF";
case 144: return "/Country/LK";
case 148: return "/Country/TD";
case 152: return "/Country/CL";
case 156: return "/Country/CN";
case 158: return "/Country/TW";
case 170: return "/Country/CO";
case 174: return "/Country/KM";
case 178: return "/Country/CG";
case 180: return "/Country/CD";
case 188: return "/Country/CR";
case 191: return "/Country/HR";
case 192: return "/Country/CU";
case 196: return "/Country/CY";
case 203: return "/Country/CZ";
case 204: return "/Country/BJ";
case 208: return "/Country/DK";
case 212: return "/Country/DM";
case 214: return "/Country/DO";
case 218: return "/Country/EC";
case 222: return "/Country/SV";
case 226: return "/Country/GQ";
case 231: return "/Country/ET";
case 232: return "/Country/ER";
case 233: return "/Country/EE";
case 242: return "/Country/FJ";
case 246: return "/Country/FI";
case 250: return "/Country/FR";
case 262: return "/Country/DJ";
case 266: return "/Country/GA";
case 268: return "/Country/GE";
case 270: return "/Country/GM";
case 275: return "/Country/PS";
case 276: return "/Country/DE";
case 288: return "/Country/GH";
case 296: return "/Country/KI";
case 300: return "/Country/GR";
case 308: return "/Country/GD";
case 320: return "/Country/GT";
case 324: return "/Country/GN";
case 328: return "/Country/GY";
case 332: return "/Country/HT";
case 336: return "/Country/VA";
case 340: return "/Country/HN";
case 344: return "/Country/HK";
case 348: return "/Country/HU";
case 352: return "/Country/IS";
case 356: return "/Country/IN";
case 360: return "/Country/ID";
case 364: return "/Country/IR";
case 368: return "/Country/IQ";
case 372: return "/Country/IE";
case 376: return "/Country/IL";
case 380: return "/Country/IT";
case 384: return "/Country/CI";
case 388: return "/Country/JM";
case 392: return "/Country/JP";
case 398: return "/Country/KZ";
case 400: return "/Country/JO";
case 404: return "/Country/KE";
case 408: return "/Country/KP";
case 410: return "/Country/KR";
case 414: return "/Country/KW";
case 417: return "/Country/KG";
case 418: return "/Country/LA";
case 422: return "/Country/LB";
case 426: return "/Country/LS";
case 428: return "/Country/LV";
case 430: return "/Country/LR";
case 434: return "/Country/LY";
case 438: return "/Country/LI";
case 440: return "/Country/LT";
case 442: return "/Country/LU";
case 450: return "/Country/MG";
case 454: return "/Country/MW";
case 458: return "/Country/MY";
case 462: return "/Country/MV";
case 466: return "/Country/ML";
case 470: return "/Country/MT";
case 478: return "/Country/MR";
case 480: return "/Country/MU";
case 484: return "/Country/MX";
case 492: return "/Country/MC";
case 496: return "/Country/MN";
case 498: return "/Country/MD";
case 499: return "/Country/ME";
case 504: return "/Country/MA";
case 508: return "/Country/MZ";
case 512: return "/Country/OM";
case 516: return "/Country/NA";
case 520: return "/Country/NR";
case 524: return "/Country/NP";
case 528: return "/Country/NL";
case 530: return "/Country/AN";
case 548: return "/Country/VU";
case 554: return "/Country/NZ";
case 558: return "/Country/NI";
case 562: return "/Country/NE";
case 566: return "/Country/NG";
case 578: return "/Country/NO";
case 583: return "/Country/FM";
case 584: return "/Country/MH";
case 585: return "/Country/PW";
case 586: return "/Country/PK";
case 591: return "/Country/PA";
case 598: return "/Country/PG";
case 600: return "/Country/PY";
case 604: return "/Country/PE";
case 608: return "/Country/PH";
case 616: return "/Country/PL";
case 620: return "/Country/PT";
case 624: return "/Country/GW";
case 626: return "/Country/TL";
case 634: return "/Country/QA";
case 642: return "/Country/RO";
case 643: return "/Country/RU";
case 646: return "/Country/RW";
case 659: return "/Country/KN";
case 662: return "/Country/LC";
case 670: return "/Country/VC";
case 674: return "/Country/SM";
case 678: return "/Country/ST";
case 682: return "/Country/SA";
case 686: return "/Country/SN";
case 688: return "/Country/RS";
case 690: return "/Country/SC";
case 694: return "/Country/SL";
case 702: return "/Country/SG";
case 703: return "/Country/SK";
case 704: return "/Country/VN";
case 705: return "/Country/SI";
case 706: return "/Country/SO";
case 710: return "/Country/ZA";
case 716: return "/Country/ZW";
case 724: return "/Country/ES";
case 728: return "/Country/SS";
case 729: return "/Country/SD";
case 740: return "/Country/SR";
case 748: return "/Country/SZ";
case 752: return "/Country/SE";
case 756: return "/Country/CH";
case 760: return "/Country/SY";
case 762: return "/Country/TJ";
case 764: return "/Country/TH";
case 768: return "/Country/TG";
case 776: return "/Country/TO";
case 780: return "/Country/TT";
case 784: return "/Country/AE";
case 788: return "/Country/TN";
case 792: return "/Country/TR";
case 795: return "/Country/TM";
case 798: return "/Country/TV";
case 800: return "/Country/UG";
case 804: return "/Country/UA";
case 807: return "/Country/MK";
case 818: return "/Country/EG";
case 826: return "/Country/GB";
case 834: return "/Country/TZ";
case 840: return "/Country/US";
case 854: return "/Country/BF";
case 858: return "/Country/UY";
case 860: return "/Country/UZ";
case 862: return "/Country/VE";
case 882: return "/Country/WS";
case 887: return "/Country/YE";
case 894: return "/Country/ZM";
default: return $"/Country/{values[index - 1]}";
}
#endregion
// uuid [TODO: Requires 128-bit values]
#region 2.25.*
oid_2_25:
if (index == values.Length) return "/Joint-ISO-ITU-T/UUID";
switch (values[index++])
{
case 0: return "/Joint-ISO-ITU-T/UUID/00000000-0000-0000-0000-000000000000";
//case 288786655511405443130567505384701230: return "/Joint-ISO-ITU-T/UUID/00379e48-0a2b-1085-b288-0002a5d5fd2e";
//case 987895962269883002155146617097157934: return "/Joint-ISO-ITU-T/UUID/00be4308-0c89-1085-8ea0-0002a5d5fd2e";
//case 1858228783942312576083372383319475483: return "/Joint-ISO-ITU-T/UUID/0165e1c0-a655-11e0-95b8-0002a5d5c51b";
//case 2474299330026746002885628159579243803: return "/Joint-ISO-ITU-T/UUID/01dc8860-25fb-11da-82b2-0002a5d5c51b";
//case 3263645701162998421821186056373271854: return "/Joint-ISO-ITU-T/UUID/02748e28-08c4-1085-b21d-0002a5d5fd2e";
//case 3325839809379844461264382260940242222: return "/Joint-ISO-ITU-T/UUID/02808890-0ad8-1085-9bdf-0002a5d5fd2e";
// TODO: Left off at http://www.oid-info.com/get/2.25.3664154270495270126161055518190585115
default: return $"/Joint-ISO-ITU-T/UUID/{values[index - 1]}";
}
#endregion
// telebiometrics
#region 2.42.*
oid_2_42:
if (index == values.Length) return "/Telebiometrics";
switch (values[index++])
{
case 0: goto oid_2_42_0;
case 1: goto oid_2_42_1;
case 2: goto oid_2_42_2;
case 3: goto oid_2_42_3;
default: return $"/Telebiometrics/{values[index - 1]}";
}
// modules
#region 2.42.0.*
oid_2_42_0:
if (index == values.Length) return "/Telebiometrics/Modules";
switch (values[index++])
{
case 0: goto oid_2_42_0_0;
default: return $"/Telebiometrics/Modules/{values[index - 1]}";
}
// main
#region 2.42.0.0.*
oid_2_42_0_0:
if (index == values.Length) return "/Telebiometrics/Modules/Main_Module";
switch (values[index++])
{
case 1: return "/Telebiometrics/Modules/Main_Module/Version1";
default: return $"/Telebiometrics/Modules/Main_Module/{values[index - 1]}";
}
#endregion
#endregion
// tmm
#region 2.42.1.*
oid_2_42_1:
if (index == values.Length) return "/Telebiometrics/TMM";
switch (values[index++])
{
case 0: goto oid_2_42_1_0;
case 1: goto oid_2_42_1_1;
case 2: goto oid_2_42_1_2;
case 3: goto oid_2_42_1_3;
case 4: return "/Telebiometrics/TMM/Practitioners";
default: return $"/Telebiometrics/TMM/{values[index - 1]}";
}
// modules
#region 2.42.1.0.*
oid_2_42_1_0:
if (index == values.Length) return "/Telebiometrics/TMM/Modules";
switch (values[index++])
{
case 0: goto oid_2_42_1_0_0;
default: return $"/Telebiometrics/TMM/Modules/{values[index - 1]}";
}
// main
#region 2.42.1.0.0.*
oid_2_42_1_0_0:
if (index == values.Length) return "/Telebiometrics/TMM/Modules/Main";
switch (values[index++])
{
case 0: return "/Telebiometrics/TMM/Modules/Main/First_Version";
default: return $"/Telebiometrics/TMM/Modules/Main/{values[index - 1]}";
}
#endregion
#endregion
// measures, metric
#region 2.42.1.1.*
oid_2_42_1_1:
if (index == values.Length) return "/Telebiometrics/TMM/Measures";
switch (values[index++])
{
case 1: goto oid_2_42_1_1_1;
case 2: return "/Telebiometrics/TMM/Measures/Units";
case 3: return "/Telebiometrics/TMM/Measures/Symbols";
case 4: return "/Telebiometrics/TMM/Measures/Conditions";
case 5: goto oid_2_42_1_1_5;
default: return $"/Telebiometrics/TMM/Measures/{values[index - 1]}";
}
// quantities
#region 2.42.1.1.1.*
oid_2_42_1_1_1:
if (index == values.Length) return "/Telebiometrics/TMM/Measures/Quantities";
switch (values[index++])
{
case 1: return "/Telebiometrics/TMM/Measures/Quantities/Physics";
case 2: return "/Telebiometrics/TMM/Measures/Quantities/Chemistry";
case 3: return "/Telebiometrics/TMM/Measures/Quantities/Biology";
case 4: return "/Telebiometrics/TMM/Measures/Quantities/Culturology";
case 5: return "/Telebiometrics/TMM/Measures/Quantities/Psychology";
default: return $"/Telebiometrics/TMM/Measures/Quantities/{values[index - 1]}";
}
#endregion
// methods
#region 2.42.1.1.5.*
oid_2_42_1_1_5:
if (index == values.Length) return "/Telebiometrics/TMM/Measures/Methods";
switch (values[index++])
{
case 1: return "/Telebiometrics/TMM/Measures/Methods/Physics";
case 2: return "/Telebiometrics/TMM/Measures/Methods/Chemistry";
case 3: return "/Telebiometrics/TMM/Measures/Methods/Biology";
case 4: return "/Telebiometrics/TMM/Measures/Methods/Culturology";
case 5: return "/Telebiometrics/TMM/Measures/Methods/Psychology";
default: return $"/Telebiometrics/TMM/Measures/Methods/{values[index - 1]}";
}
#endregion
#endregion
// fields-of-study, scientific
#region 2.42.1.2.*
oid_2_42_1_2:
if (index == values.Length) return "/Telebiometrics/TMM/Fields_of_Study";
switch (values[index++])
{
case 1: return "/Telebiometrics/TMM/Fields_of_Study/Physics";
case 2: return "/Telebiometrics/TMM/Fields_of_Study/Chemistry";
case 3: return "/Telebiometrics/TMM/Fields_of_Study/Biology";
case 4: return "/Telebiometrics/TMM/Fields_of_Study/Culturology";
case 5: return "/Telebiometrics/TMM/Fields_of_Study/Psychology";
default: return $"/Telebiometrics/TMM/Fields_of_Study/{values[index - 1]}";
}
#endregion
// modalities, sensory
#region 2.42.1.3.*
oid_2_42_1_3:
if (index == values.Length) return "/Telebiometrics/TMM/Modalities";
switch (values[index++])
{
case 1: return "/Telebiometrics/TMM/Modalities/Tango";
case 2: return "/Telebiometrics/TMM/Modalities/Video";
case 3: return "/Telebiometrics/TMM/Modalities/Audio";
case 4: return "/Telebiometrics/TMM/Modalities/Chemo";
case 5: return "/Telebiometrics/TMM/Modalities/Radio";
case 6: return "/Telebiometrics/TMM/Modalities/Calor";
case 7: return "/Telebiometrics/TMM/Modalities/Electro";
default: return $"/Telebiometrics/TMM/Modalities/{values[index - 1]}";
}
#endregion
#endregion
// human-physiology
#region 2.42.2.*
oid_2_42_2:
if (index == values.Length) return "/Telebiometrics/Human_Physiology";
switch (values[index++])
{
case 0: goto oid_2_42_2_0;
case 1: goto oid_2_42_2_1;
case 2: return "/Telebiometrics/Human_Physiology/Symbol_Combinations";
default: return $"/Telebiometrics/Human_Physiology/{values[index - 1]}";
}
// modules
#region 2.42.2.0.*
oid_2_42_2_0:
if (index == values.Length) return "/Telebiometrics/Human_Physiology/Modules";
switch (values[index++])
{
case 0: goto oid_2_42_2_0_0;
default: return $"/Telebiometrics/Human_Physiology/Modules/{values[index - 1]}";
}
// main
#region 2.42.2.0.0.*
oid_2_42_2_0_0:
if (index == values.Length) return "/Telebiometrics/Human_Physiology/Modules/Main_Module";
switch (values[index++])
{
case 0: return "/Telebiometrics/Human_Physiology/Modules/Main_Module/First_Version";
default: return $"/Telebiometrics/Human_Physiology/Modules/Main_Module/{values[index - 1]}";
}
#endregion
#endregion
// symbols
#region 2.42.2.1.*
oid_2_42_2_1:
if (index == values.Length) return "/Telebiometrics/Human_Physiology/Symbols";
switch (values[index++])
{
case 1: return "/Telebiometrics/Human_Physiology/Symbols/Tango_in";
case 2: return "/Telebiometrics/Human_Physiology/Symbols/Video_in";
case 3: return "/Telebiometrics/Human_Physiology/Symbols/Audio_in";
case 4: return "/Telebiometrics/Human_Physiology/Symbols/Chemo_in";
case 5: return "/Telebiometrics/Human_Physiology/Symbols/Radio_in";
case 6: return "/Telebiometrics/Human_Physiology/Symbols/Calor_in";
case 7: return "/Telebiometrics/Human_Physiology/Symbols/Tango_out";
case 8: return "/Telebiometrics/Human_Physiology/Symbols/Video_out";
case 9: return "/Telebiometrics/Human_Physiology/Symbols/Audio_out";
case 10: return "/Telebiometrics/Human_Physiology/Symbols/Chemo_out";
case 11: return "/Telebiometrics/Human_Physiology/Symbols/Radio_out";
case 12: return "/Telebiometrics/Human_Physiology/Symbols/Calor_out";
case 13: return "/Telebiometrics/Human_Physiology/Symbols/Safe";
case 14: return "/Telebiometrics/Human_Physiology/Symbols/Threshold";
default: return $"/Telebiometrics/Human_Physiology/Symbols/{values[index - 1]}";
}
#endregion
#endregion
// obj-cat, telehealth, e-health-protocol, th
#region 2.42.3.*
oid_2_42_3:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol";
switch (values[index++])
{
case 0: goto oid_2_42_3_0;
case 1: return "/Telebiometrics/E_Health_Protocol/[Patient schemes]";
case 2: return "/Telebiometrics/E_Health_Protocol/[Medical staff schemes]";
case 3: return "/Telebiometrics/E_Health_Protocol/[Observer schemes]";
case 4: return "/Telebiometrics/E_Health_Protocol/[Pharmaceutical schemes]";
case 5: return "/Telebiometrics/E_Health_Protocol/[Laboratory schemes]";
case 6: return "/Telebiometrics/E_Health_Protocol/[Drug manufacturer schemes]";
case 7: return "/Telebiometrics/E_Health_Protocol/[Medical device schemes]";
case 8: return "/Telebiometrics/E_Health_Protocol/[Medical software schemes]";
case 9: return "/Telebiometrics/E_Health_Protocol/[Medical insurance schemes]";
case 10: return "/Telebiometrics/E_Health_Protocol/[Medical record schemes]";
default: return $"/Telebiometrics/E_Health_Protocol/{values[index - 1]}";
}
// obj-cat, telehealth, e-health-protocol, th
#region 2.42.3.0.*
oid_2_42_3_0:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules";
switch (values[index++])
{
case 0: goto oid_2_42_3_0_0;
case 1: goto oid_2_42_3_0_1;
case 2: goto oid_2_42_3_0_2;
case 3: goto oid_2_42_3_0_3;
case 4: goto oid_2_42_3_0_4;
case 5: goto oid_2_42_3_0_5;
default: return $"/Telebiometrics/E_Health_Protocol/Modules/{values[index - 1]}";
}
// identification
#region 2.42.3.0.0.*
oid_2_42_3_0_0:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Identification";
switch (values[index++])
{
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Identification/Version1";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Identification/{values[index - 1]}";
}
#endregion
// set-up
#region 2.42.3.0.1.*
oid_2_42_3_0_1:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Setup";
switch (values[index++])
{
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Setup/Version1";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Setup/{values[index - 1]}";
}
#endregion
// send-and-ack
#region 2.42.3.0.2.*
oid_2_42_3_0_2:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Send-and-ack";
switch (values[index++])
{
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Send-and-ack/Version1";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Send-and-ack/{values[index - 1]}";
}
#endregion
// command-response
#region 2.42.3.0.3.*
oid_2_42_3_0_3:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Command-response";
switch (values[index++])
{
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Command-response/Version1";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Command-response/{values[index - 1]}";
}
#endregion
// quantity-and-units
#region 2.42.3.0.4.*
oid_2_42_3_0_4:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Quantities_And_Units";
switch (values[index++])
{
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Quantities_And_Units/Version1";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Quantities_And_Units/{values[index - 1]}";
}
#endregion
// examples
#region 2.42.3.0.5.*
oid_2_42_3_0_5:
if (index == values.Length) return "/Telebiometrics/E_Health_Protocol/Modules/Examples";
switch (values[index++])
{
case 0: return "/Telebiometrics/E_Health_Protocol/Modules/Examples/Command_Response";
case 1: return "/Telebiometrics/E_Health_Protocol/Modules/Examples/Data_Message";
default: return $"/Telebiometrics/E_Health_Protocol/Modules/Examples/{values[index - 1]}";
}
#endregion
#endregion
#endregion
#endregion
// cybersecurity
#region 2.48.*
oid_2_48:
if (index == values.Length) return "/Cybersecurity";
switch (values[index++])
{
case 1: return "/Cybersecurity/Country";
case 2: return "/Cybersecurity/International-Org";
default: return $"/Cybersecurity/{values[index - 1]}";
}
#endregion
// alerting
#region 2.49.*
oid_2_49:
if (index == values.Length) return "/Alerting";
switch (values[index++])
{
case 0: return "/Alerting/WMO";
default: return $"/Alerting/{values[index - 1]}";
}
#endregion
#endregion
}
}
#pragma warning restore IDE0011
}

View File

@@ -0,0 +1,71 @@
using System;
using System.Collections.Generic;
namespace BurnOutSharp.ASN1
{
/// <summary>
/// Methods related to Object Identifiers (OID)
/// </summary>
public static partial class ObjectIdentifier
{
// TODO: ulong[] isn't going to work. If we can use .NET 7, we can use UInt128
// We might want to look into storing all values as GUID? I don't remember if
// you can do value comparisions between an integral value and a GUID, though.
/// <summary>
/// Parse an OID in DER-encoded byte notation into a list of values
/// </summary>
/// <param name="data">Byte array representing the data to read</param>
/// <param name="length">Total length of the data according to the DER TLV</param>
/// <returns>Array of values representing the OID</returns>
public static ulong[] ParseDERIntoArray(byte[] data, ulong length)
{
// The first byte contains nodes 1 and 2
int firstNode = Math.DivRem(data[0], 40, out int secondNode);
// Create a list for all nodes
List<ulong> nodes = new List<ulong> { (ulong)firstNode, (ulong)secondNode };
// All other nodes are encoded uniquely
int offset = 1;
while (offset < (long)length)
{
// If bit 7 is not set
if ((data[offset] & 0x80) == 0)
{
nodes.Add(data[offset]);
offset++;
continue;
}
// Otherwise, read the encoded value in a loop
ulong dotValue = 0;
bool doneProcessing = false;
do
{
// Shift the current encoded value
dotValue <<= 7;
// If we have a leading zero byte, we're at the end
if ((data[offset] & 0x80) == 0)
doneProcessing = true;
// Clear the top byte
unchecked { data[offset] &= (byte)~0x80; }
// Add the new value to the result
dotValue |= data[offset];
// Increment the offset
offset++;
} while (offset < data.Length && !doneProcessing);
// Add the parsed value to the output
nodes.Add(dotValue);
}
return nodes.ToArray();
}
}
}

View File

@@ -0,0 +1,26 @@
namespace BurnOutSharp.ASN1
{
#pragma warning disable IDE0011
/// <summary>
/// Methods related to Object Identifiers (OID) and dot notation
/// </summary>
public static partial class ObjectIdentifier
{
/// <summary>
/// Parse an OID in separated-value notation into dot notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <returns>List of values representing the dot notation</returns>
public static string ParseOIDToDotNotation(ulong[] values)
{
// If we have an invalid set of values, we can't do anything
if (values == null || values.Length == 0)
return null;
return string.Join(".", values);
}
}
#pragma warning restore IDE0011
}

View File

@@ -0,0 +1,259 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Numerics;
using System.Text;
using BurnOutSharp.Utilities;
namespace BurnOutSharp.ASN1
{
/// <summary>
/// ASN.1 type/length/value class that all types are based on
/// </summary>
public class TypeLengthValue
{
/// <summary>
/// The ASN.1 type
/// </summary>
public ASN1Type Type { get; private set; }
/// <summary>
/// Length of the value
/// </summary>
public ulong Length { get; private set; }
/// <summary>
/// Generic value associated with <see cref="Type"/>
/// </summary>
public object Value { get; private set; }
/// <summary>
/// Read from the source data array at an index
/// </summary>
/// <param name="data">Byte array representing data to read</param>
/// <param name="index">Index within the array to read at</param>
public TypeLengthValue(byte[] data, ref int index)
{
// Get the type and modifiers
this.Type = (ASN1Type)data[index++];
// If we have an end indicator, we just return
if (this.Type == ASN1Type.V_ASN1_EOC)
return;
// Get the length of the value
this.Length = ReadLength(data, ref index);
// Read the value
if (this.Type.HasFlag(ASN1Type.V_ASN1_CONSTRUCTED))
{
var valueList = new List<TypeLengthValue>();
int currentIndex = index;
while (index < currentIndex + (int)this.Length)
{
valueList.Add(new TypeLengthValue(data, ref index));
}
this.Value = valueList.ToArray();
}
else
{
// TODO: Get more granular based on type
this.Value = data.ReadBytes(ref index, (int)this.Length);
}
}
/// <summary>
/// Format the TLV as a string
/// </summary>
/// <param name="paddingLevel">Padding level of the item when formatting</param>
/// <returns>String representing the TLV, if possible</returns>
public string Format(int paddingLevel = 0)
{
// Create the left-padding string
string padding = new string(' ', paddingLevel);
// If we have an invalid item
if (this.Type == 0)
return $"{padding}UNKNOWN TYPE";
// Create the string builder
StringBuilder formatBuilder = new StringBuilder();
// Append the type
formatBuilder.Append($"{padding}Type: {this.Type}");
if (this.Type == ASN1Type.V_ASN1_EOC)
return formatBuilder.ToString();
// Append the length
formatBuilder.Append($", Length: {this.Length}");
if (this.Length == 0)
return formatBuilder.ToString();
// If we have a constructed type
if (this.Type.HasFlag(ASN1Type.V_ASN1_CONSTRUCTED))
{
var valueAsObjectArray = this.Value as TypeLengthValue[];
if (valueAsObjectArray == null)
{
formatBuilder.Append(", Value: [INVALID DATA TYPE]");
return formatBuilder.ToString();
}
formatBuilder.Append(", Value:\n");
for (int i = 0; i < valueAsObjectArray.Length; i++)
{
var child = valueAsObjectArray[i];
string childString = child.Format(paddingLevel + 1);
formatBuilder.Append($"{childString}\n");
}
return formatBuilder.ToString().TrimEnd('\n');
}
// Get the value as a byte array
byte[] valueAsByteArray = this.Value as byte[];
if (valueAsByteArray == null)
{
formatBuilder.Append(", Value: [INVALID DATA TYPE]");
return formatBuilder.ToString();
}
// If we have a primitive type
switch (this.Type)
{
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-boolean"/>
case ASN1Type.V_ASN1_BOOLEAN:
if (this.Length > 1 || valueAsByteArray.Length > 1)
formatBuilder.Append($" [Expected length of 1]");
bool booleanValue = valueAsByteArray[0] == 0x00 ? false : true;
formatBuilder.Append($", Value: {booleanValue}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer"/>
case ASN1Type.V_ASN1_INTEGER:
Array.Reverse(valueAsByteArray);
BigInteger integerValue = new BigInteger(valueAsByteArray);
formatBuilder.Append($", Value: {integerValue}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string"/>
case ASN1Type.V_ASN1_BIT_STRING:
// TODO: Read into a BitArray and print that out instead?
int unusedBits = valueAsByteArray[0];
formatBuilder.Append($", Value with {unusedBits} unused bits: {BitConverter.ToString(valueAsByteArray.Skip(1).ToArray()).Replace('-', ' ')}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-octet-string"/>
case ASN1Type.V_ASN1_OCTET_STRING:
formatBuilder.Append($", Value: {BitConverter.ToString(valueAsByteArray).Replace('-', ' ')}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier"/>
/// <see cref="http://snmpsharpnet.com/index.php/2009/03/02/ber-encoding-and-decoding-oid-values/"/>
case ASN1Type.V_ASN1_OBJECT:
// Derive array of values
ulong[] objectNodes = ObjectIdentifier.ParseDERIntoArray(valueAsByteArray, this.Length);
// Append the dot and modified OID-IRI notations
string dotNotationString = ObjectIdentifier.ParseOIDToDotNotation(objectNodes);
string oidIriString = ObjectIdentifier.ParseOIDToOIDIRINotation(objectNodes);
formatBuilder.Append($", Value: {dotNotationString} ({oidIriString})");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-utf8string"/>
case ASN1Type.V_ASN1_UTF8STRING:
formatBuilder.Append($", Value: {Encoding.UTF8.GetString(valueAsByteArray)}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-printablestring"/>
case ASN1Type.V_ASN1_PRINTABLESTRING:
formatBuilder.Append($", Value: {Encoding.ASCII.GetString(valueAsByteArray)}");
break;
//case ASN1Type.V_ASN1_T61STRING:
case ASN1Type.V_ASN1_TELETEXSTRING:
formatBuilder.Append($", Value: {Encoding.ASCII.GetString(valueAsByteArray)}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-ia5string"/>
case ASN1Type.V_ASN1_IA5STRING:
formatBuilder.Append($", Value: {Encoding.ASCII.GetString(valueAsByteArray)}");
break;
case ASN1Type.V_ASN1_UTCTIME:
string utctimeString = Encoding.ASCII.GetString(valueAsByteArray);
if (DateTime.TryParse(utctimeString, out DateTime utctimeDateTime))
formatBuilder.Append($", Value: {utctimeDateTime}");
else
formatBuilder.Append($", Value: {utctimeString}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bmpstring"/>
case ASN1Type.V_ASN1_BMPSTRING:
formatBuilder.Append($", Value: {Encoding.Unicode.GetString(valueAsByteArray)}");
break;
default:
formatBuilder.Append($", Value (Unknown Format): {BitConverter.ToString(this.Value as byte[]).Replace('-', ' ')}");
break;
}
// Return the formatted string
return formatBuilder.ToString();
}
/// <summary>
/// Reads the length field for a type
/// </summary>
/// <param name="data">Byte array representing data to read</param>
/// <param name="index">Index within the array to read at</param>
/// <returns>The length value read from the array</returns>
private static ulong ReadLength(byte[] data, ref int index)
{
// If we have invalid data, throw an exception
if (data == null || index < 0 && index >= data.Length)
throw new ArgumentException();
// Read the first byte, assuming it's the length
byte length = data[index++];
// If the bit 7 is not set, then use the value as it is
if ((length & 0x80) == 0)
return length;
// Otherwise, use the value as the number of remaining bytes to read
int bytesToRead = length & ~0x80;
byte[] bytesRead = data.ReadBytes(ref index, bytesToRead);
// TODO: Write extensions to read big-endian
// Reverse the bytes to be in big-endian order
Array.Reverse(bytesRead);
switch (bytesRead.Length)
{
case 1:
return bytesRead[0];
case 2:
return BitConverter.ToUInt16(bytesRead, 0);
case 3:
Array.Resize(ref bytesRead, 4);
goto case 4;
case 4:
return BitConverter.ToUInt32(bytesRead, 0);
case 5:
case 6:
case 7:
Array.Resize(ref bytesRead, 8);
goto case 8;
case 8:
return BitConverter.ToUInt64(bytesRead, 0);
default:
throw new InvalidOperationException();
}
}
}
}

View File

@@ -0,0 +1,470 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.AACS;
using BurnOutSharp.Utilities;
namespace BurnOutSharp.Builders
{
public class AACS
{
#region Byte Data
/// <summary>
/// Parse a byte array into an AACS media key block
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static MediaKeyBlock ParseMediaKeyBlock(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseMediaKeyBlock(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into an AACS media key block
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cmedia key block on success, null on error</returns>
public static MediaKeyBlock ParseMediaKeyBlock(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new media key block to fill
var mediaKeyBlock = new MediaKeyBlock();
#region Records
// Create the records list
var records = new List<Record>();
// Try to parse the records
while (data.Position < data.Length)
{
// Try to parse the record
var record = ParseRecord(data);
if (record == null)
return null;
// Add the record
records.Add(record);
// If we have an end of media key block record
if (record.RecordType == RecordType.EndOfMediaKeyBlock)
break;
// Align to the 4-byte boundary if we're not at the end
if (data.Position != data.Length)
{
while ((data.Position % 4) != 0)
_ = data.ReadByteValue();
}
else
{
break;
}
}
// Set the records
mediaKeyBlock.Records = records.ToArray();
#endregion
return mediaKeyBlock;
}
/// <summary>
/// Parse a Stream into a record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled record on success, null on error</returns>
private static Record ParseRecord(Stream data)
{
// TODO: Use marshalling here instead of building
// The first 4 bytes make up the type and length
byte[] typeAndLength = data.ReadBytes(4);
RecordType type = (RecordType)typeAndLength[0];
// Remove the first byte and parse as big-endian
typeAndLength[0] = 0x00;
Array.Reverse(typeAndLength);
uint length = BitConverter.ToUInt32(typeAndLength, 0);
// Create a record based on the type
switch (type)
{
// Recognized record types
case RecordType.EndOfMediaKeyBlock: return ParseEndOfMediaKeyBlockRecord(data, type, length);
case RecordType.ExplicitSubsetDifference: return ParseExplicitSubsetDifferenceRecord(data, type, length);
case RecordType.MediaKeyData: return ParseMediaKeyDataRecord(data, type, length);
case RecordType.SubsetDifferenceIndex: return ParseSubsetDifferenceIndexRecord(data, type, length);
case RecordType.TypeAndVersion: return ParseTypeAndVersionRecord(data, type, length);
case RecordType.DriveRevocationList: return ParseDriveRevocationListRecord(data, type, length);
case RecordType.HostRevocationList: return ParseHostRevocationListRecord(data, type, length);
case RecordType.VerifyMediaKey: return ParseVerifyMediaKeyRecord(data, type, length);
case RecordType.Copyright: return ParseCopyrightRecord(data, type, length);
// Unrecognized record type
default:
return null;
}
}
/// <summary>
/// Parse a Stream into an end of media key block record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled end of media key block record on success, null on error</returns>
private static EndOfMediaKeyBlockRecord ParseEndOfMediaKeyBlockRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.EndOfMediaKeyBlock)
return null;
// TODO: Use marshalling here instead of building
var record = new EndOfMediaKeyBlockRecord();
record.RecordType = type;
record.RecordLength = length;
if (length > 4)
record.SignatureData = data.ReadBytes((int)(length - 4));
return record;
}
/// <summary>
/// Parse a Stream into an explicit subset-difference record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled explicit subset-difference record on success, null on error</returns>
private static ExplicitSubsetDifferenceRecord ParseExplicitSubsetDifferenceRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.ExplicitSubsetDifference)
return null;
// TODO: Use marshalling here instead of building
var record = new ExplicitSubsetDifferenceRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
// Create the subset difference list
var subsetDifferences = new List<SubsetDifference>();
// Try to parse the subset differences
while (data.Position < initialOffset + length - 5)
{
var subsetDifference = new SubsetDifference();
subsetDifference.Mask = data.ReadByteValue();
subsetDifference.Number = data.ReadUInt32BE();
subsetDifferences.Add(subsetDifference);
}
// Set the subset differences
record.SubsetDifferences = subsetDifferences.ToArray();
// If there's any data left, discard it
if (data.Position < initialOffset + length)
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
return record;
}
/// <summary>
/// Parse a Stream into a media key data record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled media key data record on success, null on error</returns>
private static MediaKeyDataRecord ParseMediaKeyDataRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.MediaKeyData)
return null;
// TODO: Use marshalling here instead of building
var record = new MediaKeyDataRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
// Create the media key list
var mediaKeys = new List<byte[]>();
// Try to parse the media keys
while (data.Position < initialOffset + length)
{
byte[] mediaKey = data.ReadBytes(0x10);
mediaKeys.Add(mediaKey);
}
// Set the media keys
record.MediaKeyData = mediaKeys.ToArray();
return record;
}
/// <summary>
/// Parse a Stream into a subset-difference index record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled subset-difference index record on success, null on error</returns>
private static SubsetDifferenceIndexRecord ParseSubsetDifferenceIndexRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.SubsetDifferenceIndex)
return null;
// TODO: Use marshalling here instead of building
var record = new SubsetDifferenceIndexRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
record.Span = data.ReadUInt32BE();
// Create the offset list
var offsets = new List<uint>();
// Try to parse the offsets
while (data.Position < initialOffset + length)
{
uint offset = data.ReadUInt32BE();
offsets.Add(offset);
}
// Set the offsets
record.Offsets = offsets.ToArray();
return record;
}
/// <summary>
/// Parse a Stream into a type and version record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled type and version record on success, null on error</returns>
private static TypeAndVersionRecord ParseTypeAndVersionRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.TypeAndVersion)
return null;
// TODO: Use marshalling here instead of building
var record = new TypeAndVersionRecord();
record.RecordType = type;
record.RecordLength = length;
record.MediaKeyBlockType = (MediaKeyBlockType)data.ReadUInt32BE();
record.VersionNumber = data.ReadUInt32BE();
return record;
}
/// <summary>
/// Parse a Stream into a drive revocation list record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled drive revocation list record on success, null on error</returns>
private static DriveRevocationListRecord ParseDriveRevocationListRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.DriveRevocationList)
return null;
// TODO: Use marshalling here instead of building
var record = new DriveRevocationListRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
record.TotalNumberOfEntries = data.ReadUInt32BE();
// Create the signature blocks list
var blocks = new List<DriveRevocationSignatureBlock>();
// Try to parse the signature blocks
int entryCount = 0;
while (entryCount < record.TotalNumberOfEntries && data.Position < initialOffset + length)
{
var block = new DriveRevocationSignatureBlock();
block.NumberOfEntries = data.ReadUInt32BE();
block.EntryFields = new DriveRevocationListEntry[block.NumberOfEntries];
for (int i = 0; i < block.EntryFields.Length; i++)
{
var entry = new DriveRevocationListEntry();
entry.Range = data.ReadUInt16BE();
entry.DriveID = data.ReadBytes(6);
block.EntryFields[i] = entry;
entryCount++;
}
blocks.Add(block);
// If we have an empty block
if (block.NumberOfEntries == 0)
break;
}
// Set the signature blocks
record.SignatureBlocks = blocks.ToArray();
// If there's any data left, discard it
if (data.Position < initialOffset + length)
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
return record;
}
/// <summary>
/// Parse a Stream into a host revocation list record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled host revocation list record on success, null on error</returns>
private static HostRevocationListRecord ParseHostRevocationListRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.HostRevocationList)
return null;
// TODO: Use marshalling here instead of building
var record = new HostRevocationListRecord();
record.RecordType = type;
record.RecordLength = length;
// Cache the current offset
long initialOffset = data.Position - 4;
record.TotalNumberOfEntries = data.ReadUInt32BE();
// Create the signature blocks list
var blocks = new List<HostRevocationSignatureBlock>();
// Try to parse the signature blocks
int entryCount = 0;
while (entryCount < record.TotalNumberOfEntries && data.Position < initialOffset + length)
{
var block = new HostRevocationSignatureBlock();
block.NumberOfEntries = data.ReadUInt32BE();
block.EntryFields = new HostRevocationListEntry[block.NumberOfEntries];
for (int i = 0; i < block.EntryFields.Length; i++)
{
var entry = new HostRevocationListEntry();
entry.Range = data.ReadUInt16BE();
entry.HostID = data.ReadBytes(6);
block.EntryFields[i] = entry;
entryCount++;
}
blocks.Add(block);
// If we have an empty block
if (block.NumberOfEntries == 0)
break;
}
// Set the signature blocks
record.SignatureBlocks = blocks.ToArray();
// If there's any data left, discard it
if (data.Position < initialOffset + length)
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
return record;
}
/// <summary>
/// Parse a Stream into a verify media key record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled verify media key record on success, null on error</returns>
private static VerifyMediaKeyRecord ParseVerifyMediaKeyRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.VerifyMediaKey)
return null;
// TODO: Use marshalling here instead of building
var record = new VerifyMediaKeyRecord();
record.RecordType = type;
record.RecordLength = length;
record.CiphertextValue = data.ReadBytes(0x10);
return record;
}
/// <summary>
/// Parse a Stream into a copyright record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled copyright record on success, null on error</returns>
private static CopyrightRecord ParseCopyrightRecord(Stream data, RecordType type, uint length)
{
// Verify we're calling the right parser
if (type != RecordType.Copyright)
return null;
// TODO: Use marshalling here instead of building
var record = new CopyrightRecord();
record.RecordType = type;
record.RecordLength = length;
if (length > 4)
{
byte[] copyright = data.ReadBytes((int)(length - 4));
record.Copyright = Encoding.ASCII.GetString(copyright).TrimEnd('\0');
}
return record;
}
#endregion
}
}

View File

@@ -0,0 +1,95 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.BDPlus;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.BDPlus.Constants;
namespace BurnOutSharp.Builders
{
public class BDPlus
{
#region Byte Data
/// <summary>
/// Parse a byte array into a BD+ SVM
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled BD+ SVM on success, null on error</returns>
public static SVM ParseSVM(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseSVM(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into an BD+ SVM
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BD+ SVM on success, null on error</returns>
public static SVM ParseSVM(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Try to parse the SVM
return ParseSVMData(data);
}
/// <summary>
/// Parse a Stream into an SVM
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SVM on success, null on error</returns>
private static SVM ParseSVMData(Stream data)
{
// TODO: Use marshalling here instead of building
var svm = new SVM();
byte[] signature = data.ReadBytes(8);
svm.Signature = Encoding.ASCII.GetString(signature);
if (svm.Signature != SignatureString)
return null;
svm.Unknown1 = data.ReadBytes(5);
svm.Year = data.ReadUInt16BE();
svm.Month = data.ReadByteValue();
if (svm.Month < 1 || svm.Month > 12)
return null;
svm.Day = data.ReadByteValue();
if (svm.Day < 1 || svm.Day > 31)
return null;
svm.Unknown2 = data.ReadBytes(4);
svm.Length = data.ReadUInt32();
// if (svm.Length > 0)
// svm.Data = data.ReadBytes((int)svm.Length);
return svm;
}
#endregion
}
}

View File

@@ -0,0 +1,150 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.BFPK;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.BFPK.Constants;
namespace BurnOutSharp.Builders
{
public class BFPK
{
#region Byte Data
/// <summary>
/// Parse a byte array into a BFPK archive
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseArchive(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a BFPK archive
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the archive header
archive.Header = header;
#endregion
#region Files
// If we have any files
if (header.Files > 0)
{
var files = new FileEntry[header.Files];
// Read all entries in turn
for (int i = 0; i < header.Files; i++)
{
var file = ParseFileEntry(data);
if (file == null)
return null;
files[i] = file;
}
// Set the files
archive.Files = files;
}
#endregion
return archive;
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] magic = data.ReadBytes(4);
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.Version = data.ReadInt32();
header.Files = data.ReadInt32();
return header;
}
/// <summary>
/// Parse a Stream into a file entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file entry on success, null on error</returns>
private static FileEntry ParseFileEntry(Stream data)
{
// TODO: Use marshalling here instead of building
FileEntry fileEntry = new FileEntry();
fileEntry.NameSize = data.ReadInt32();
if (fileEntry.NameSize > 0)
{
byte[] name = data.ReadBytes(fileEntry.NameSize);
fileEntry.Name = Encoding.ASCII.GetString(name);
}
fileEntry.UncompressedSize = data.ReadInt32();
fileEntry.Offset = data.ReadInt32();
if (fileEntry.Offset > 0)
{
long currentOffset = data.Position;
data.Seek(fileEntry.Offset, SeekOrigin.Begin);
fileEntry.CompressedSize = data.ReadInt32();
data.Seek(currentOffset, SeekOrigin.Begin);
}
return fileEntry;
}
#endregion
}
}

View File

@@ -0,0 +1,250 @@
using System.IO;
using System.Linq;
using System.Text;
using BurnOutSharp.Models.BSP;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.BSP.Constants;
namespace BurnOutSharp.Builders
{
public static class BSP
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Level
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Level on success, null on error</returns>
public static Models.BSP.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Level
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level on success, null on error</returns>
public static Models.BSP.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new Half-Life Level to fill
var file = new Models.BSP.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the level header
file.Header = header;
#endregion
#region Lumps
// Create the lump array
file.Lumps = new Lump[HL_BSP_LUMP_COUNT];
// Try to parse the lumps
for (int i = 0; i < HL_BSP_LUMP_COUNT; i++)
{
var lump = ParseLump(data);
file.Lumps[i] = lump;
}
#endregion
#region Texture header
// Try to get the texture header lump
var textureDataLump = file.Lumps[HL_BSP_LUMP_TEXTUREDATA];
if (textureDataLump.Offset == 0 || textureDataLump.Length == 0)
return null;
// Seek to the texture header
data.Seek(textureDataLump.Offset, SeekOrigin.Begin);
// Try to parse the texture header
var textureHeader = ParseTextureHeader(data);
if (textureHeader == null)
return null;
// Set the texture header
file.TextureHeader = textureHeader;
#endregion
#region Textures
// Create the texture array
file.Textures = new Texture[textureHeader.TextureCount];
// Try to parse the textures
for (int i = 0; i < textureHeader.TextureCount; i++)
{
// Get the texture offset
int offset = (int)(textureHeader.Offsets[i] + file.Lumps[HL_BSP_LUMP_TEXTUREDATA].Offset);
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the texture
data.Seek(offset, SeekOrigin.Begin);
var texture = ParseTexture(data);
file.Textures[i] = texture;
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Level header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
// Only recognized versions are 29 and 30
header.Version = data.ReadUInt32();
if (header.Version != 29 && header.Version != 30)
return null;
return header;
}
/// <summary>
/// Parse a Stream into a lump
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled lump on success, null on error</returns>
private static Lump ParseLump(Stream data)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.Length = data.ReadUInt32();
return lump;
}
/// <summary>
/// Parse a Stream into a Half-Life Level texture header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level texture header on success, null on error</returns>
private static TextureHeader ParseTextureHeader(Stream data)
{
// TODO: Use marshalling here instead of building
TextureHeader textureHeader = new TextureHeader();
textureHeader.TextureCount = data.ReadUInt32();
var offsets = new uint[textureHeader.TextureCount];
for (int i = 0; i < textureHeader.TextureCount; i++)
{
offsets[i] = data.ReadUInt32();
}
textureHeader.Offsets = offsets;
return textureHeader;
}
/// <summary>
/// Parse a Stream into a texture
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="mipmap">Mipmap level</param>
/// <returns>Filled texture on success, null on error</returns>
private static Texture ParseTexture(Stream data, uint mipmap = 0)
{
// TODO: Use marshalling here instead of building
Texture texture = new Texture();
byte[] name = data.ReadBytes(16).TakeWhile(c => c != '\0').ToArray();
texture.Name = Encoding.ASCII.GetString(name);
texture.Width = data.ReadUInt32();
texture.Height = data.ReadUInt32();
texture.Offsets = new uint[4];
for (int i = 0; i < 4; i++)
{
texture.Offsets[i] = data.ReadUInt32();
}
// Get the size of the pixel data
uint pixelSize = 0;
for (int i = 0; i < HL_BSP_MIPMAP_COUNT; i++)
{
if (texture.Offsets[i] != 0)
{
pixelSize += (texture.Width >> i) * (texture.Height >> i);
}
}
// If we have no pixel data
if (pixelSize == 0)
return texture;
texture.TextureData = data.ReadBytes((int)pixelSize);
texture.PaletteSize = data.ReadUInt16();
texture.PaletteData = data.ReadBytes((int)(texture.PaletteSize * 3));
// Adjust the dimensions based on mipmap level
switch (mipmap)
{
case 1:
texture.Width /= 2;
texture.Height /= 2;
break;
case 2:
texture.Width /= 4;
texture.Height /= 4;
break;
case 3:
texture.Width /= 8;
texture.Height /= 8;
break;
}
return texture;
}
#endregion
}
}

View File

@@ -0,0 +1,28 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BurnOutSharp.Builders</Title>
<AssemblyName>BurnOutSharp.Builders</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.7</Version>
<AssemblyVersion>2.7</AssemblyVersion>
<FileVersion>2.7</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\BurnOutSharp.Models\BurnOutSharp.Models.csproj" />
<ProjectReference Include="..\BurnOutSharp.Utilities\BurnOutSharp.Utilities.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,391 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.CFB;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.CFB.Constants;
namespace BurnOutSharp.Builders
{
public class CFB
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Compound File Binary
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Compound File Binary on success, null on error</returns>
public static Binary ParseBinary(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseBinary(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Compound File Binary
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Compound File Binary on success, null on error</returns>
public static Binary ParseBinary(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new binary to fill
var binary = new Binary();
#region Header
// Try to parse the file header
var fileHeader = ParseFileHeader(data);
if (fileHeader == null)
return null;
// Set the file header
binary.Header = fileHeader;
#endregion
#region DIFAT Sector Numbers
// Create a DIFAT sector table
var difatSectors = new List<SectorNumber>();
// Add the sectors from the header
difatSectors.AddRange(fileHeader.DIFAT);
// Loop through and add the DIFAT sectors
SectorNumber currentSector = (SectorNumber)fileHeader.FirstDIFATSectorLocation;
for (int i = 0; i < fileHeader.NumberOfDIFATSectors; i++)
{
// If we have a readable sector
if (currentSector <= SectorNumber.MAXREGSECT)
{
// Get the new next sector information
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
if (sectorOffset < 0 || sectorOffset >= data.Length)
return null;
// Seek to the next sector
data.Seek(sectorOffset, SeekOrigin.Begin);
// Try to parse the sectors
var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift);
if (sectorNumbers == null)
return null;
// Add the sector shifts
difatSectors.AddRange(sectorNumbers);
}
// Get the next sector from the DIFAT
currentSector = difatSectors[i];
}
// Assign the DIFAT sectors table
binary.DIFATSectorNumbers = difatSectors.ToArray();
#endregion
#region FAT Sector Numbers
// Create a FAT sector table
var fatSectors = new List<SectorNumber>();
// Loop through and add the FAT sectors
currentSector = binary.DIFATSectorNumbers[0];
for (int i = 0; i < fileHeader.NumberOfFATSectors; i++)
{
// If we have a readable sector
if (currentSector <= SectorNumber.MAXREGSECT)
{
// Get the new next sector information
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
if (sectorOffset < 0 || sectorOffset >= data.Length)
return null;
// Seek to the next sector
data.Seek(sectorOffset, SeekOrigin.Begin);
// Try to parse the sectors
var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift);
if (sectorNumbers == null)
return null;
// Add the sector shifts
fatSectors.AddRange(sectorNumbers);
}
// Get the next sector from the DIFAT
currentSector = binary.DIFATSectorNumbers[i];
}
// Assign the FAT sectors table
binary.FATSectorNumbers = fatSectors.ToArray();
#endregion
#region Mini FAT Sector Numbers
// Create a mini FAT sector table
var miniFatSectors = new List<SectorNumber>();
// Loop through and add the mini FAT sectors
currentSector = (SectorNumber)fileHeader.FirstMiniFATSectorLocation;
for (int i = 0; i < fileHeader.NumberOfMiniFATSectors; i++)
{
// If we have a readable sector
if (currentSector <= SectorNumber.MAXREGSECT)
{
// Get the new next sector information
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
if (sectorOffset < 0 || sectorOffset >= data.Length)
return null;
// Seek to the next sector
data.Seek(sectorOffset, SeekOrigin.Begin);
// Try to parse the sectors
var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift);
if (sectorNumbers == null)
return null;
// Add the sector shifts
miniFatSectors.AddRange(sectorNumbers);
}
// Get the next sector from the DIFAT
currentSector = binary.DIFATSectorNumbers[i];
}
// Assign the mini FAT sectors table
binary.MiniFATSectorNumbers = miniFatSectors.ToArray();
#endregion
#region Directory Entries
// Get the offset of the first directory sector
long firstDirectoryOffset = (long)(fileHeader.FirstDirectorySectorLocation * Math.Pow(2, fileHeader.SectorShift));
if (firstDirectoryOffset < 0 || firstDirectoryOffset >= data.Length)
return null;
// Seek to the first directory sector
data.Seek(firstDirectoryOffset, SeekOrigin.Begin);
// Create a directory sector table
var directorySectors = new List<DirectoryEntry>();
// Get the number of directory sectors
uint directorySectorCount = 0;
switch (fileHeader.MajorVersion)
{
case 3:
directorySectorCount = int.MaxValue;
break;
case 4:
directorySectorCount = fileHeader.NumberOfDirectorySectors;
break;
}
// Loop through and add the directory sectors
currentSector = (SectorNumber)fileHeader.FirstDirectorySectorLocation;
for (int i = 0; i < directorySectorCount; i++)
{
// If we have an end of chain
if (currentSector == SectorNumber.ENDOFCHAIN)
break;
// If we have a readable sector
if (currentSector <= SectorNumber.MAXREGSECT)
{
// Get the new next sector information
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
if (sectorOffset < 0 || sectorOffset >= data.Length)
return null;
// Seek to the next sector
data.Seek(sectorOffset, SeekOrigin.Begin);
// Try to parse the sectors
var directoryEntries = ParseDirectoryEntries(data, fileHeader.SectorShift, fileHeader.MajorVersion);
if (directoryEntries == null)
return null;
// Add the sector shifts
directorySectors.AddRange(directoryEntries);
}
// Get the next sector from the DIFAT
currentSector = binary.DIFATSectorNumbers[i];
}
// Assign the Directory sectors table
binary.DirectoryEntries = directorySectors.ToArray();
#endregion
return binary;
}
/// <summary>
/// Parse a Stream into a file header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file header on success, null on error</returns>
private static FileHeader ParseFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
FileHeader header = new FileHeader();
header.Signature = data.ReadUInt64();
if (header.Signature != SignatureUInt64)
return null;
header.CLSID = data.ReadGuid();
header.MinorVersion = data.ReadUInt16();
header.MajorVersion = data.ReadUInt16();
header.ByteOrder = data.ReadUInt16();
if (header.ByteOrder != 0xFFFE)
return null;
header.SectorShift = data.ReadUInt16();
if (header.MajorVersion == 3 && header.SectorShift != 0x0009)
return null;
else if (header.MajorVersion == 4 && header.SectorShift != 0x000C)
return null;
header.MiniSectorShift = data.ReadUInt16();
header.Reserved = data.ReadBytes(6);
header.NumberOfDirectorySectors = data.ReadUInt32();
if (header.MajorVersion == 3 && header.NumberOfDirectorySectors != 0)
return null;
header.NumberOfFATSectors = data.ReadUInt32();
header.FirstDirectorySectorLocation = data.ReadUInt32();
header.TransactionSignatureNumber = data.ReadUInt32();
header.MiniStreamCutoffSize = data.ReadUInt32();
if (header.MiniStreamCutoffSize != 0x00001000)
return null;
header.FirstMiniFATSectorLocation = data.ReadUInt32();
header.NumberOfMiniFATSectors = data.ReadUInt32();
header.FirstDIFATSectorLocation = data.ReadUInt32();
header.NumberOfDIFATSectors = data.ReadUInt32();
header.DIFAT = new SectorNumber[109];
for (int i = 0; i < header.DIFAT.Length; i++)
{
header.DIFAT[i] = (SectorNumber)data.ReadUInt32();
}
// Skip rest of sector for version 4
if (header.MajorVersion == 4)
_ = data.ReadBytes(3584);
return header;
}
/// <summary>
/// Parse a Stream into a sector full of sector numbers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="sectorShift">Sector shift from the header</param>
/// <returns>Filled sector full of sector numbers on success, null on error</returns>
private static SectorNumber[] ParseSectorNumbers(Stream data, ushort sectorShift)
{
// TODO: Use marshalling here instead of building
int sectorCount = (int)(Math.Pow(2, sectorShift) / sizeof(uint));
SectorNumber[] sectorNumbers = new SectorNumber[sectorCount];
for (int i = 0; i < sectorNumbers.Length; i++)
{
sectorNumbers[i] = (SectorNumber)data.ReadUInt32();
}
return sectorNumbers;
}
/// <summary>
/// Parse a Stream into a sector full of directory entries
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="sectorShift">Sector shift from the header</param>
/// <param name="majorVersion">Major version from the header</param>
/// <returns>Filled sector full of directory entries on success, null on error</returns>
private static DirectoryEntry[] ParseDirectoryEntries(Stream data, ushort sectorShift, ushort majorVersion)
{
// TODO: Use marshalling here instead of building
const int directoryEntrySize = 64 + 2 + 1 + 1 + 4 + 4 + 4 + 16 + 4 + 8 + 8 + 4 + 8;
int sectorCount = (int)(Math.Pow(2, sectorShift) / directoryEntrySize);
DirectoryEntry[] directoryEntries = new DirectoryEntry[sectorCount];
for (int i = 0; i < directoryEntries.Length; i++)
{
var directoryEntry = ParseDirectoryEntry(data, majorVersion);
if (directoryEntry == null)
return null;
directoryEntries[i] = directoryEntry;
}
return directoryEntries;
}
/// <summary>
/// Parse a Stream into a directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version from the header</param>
/// <returns>Filled directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data, ushort majorVersion)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
byte[] name = data.ReadBytes(64);
directoryEntry.Name = Encoding.Unicode.GetString(name).TrimEnd('\0');
directoryEntry.NameLength = data.ReadUInt16();
directoryEntry.ObjectType = (ObjectType)data.ReadByteValue();
directoryEntry.ColorFlag = (ColorFlag)data.ReadByteValue();
directoryEntry.LeftSiblingID = (StreamID)data.ReadUInt32();
directoryEntry.RightSiblingID = (StreamID)data.ReadUInt32();
directoryEntry.ChildID = (StreamID)data.ReadUInt32();
directoryEntry.CLSID = data.ReadGuid();
directoryEntry.StateBits = data.ReadUInt32();
directoryEntry.CreationTime = data.ReadUInt64();
directoryEntry.ModifiedTime = data.ReadUInt64();
directoryEntry.StartingSectorLocation = data.ReadUInt32();
directoryEntry.StreamSize = data.ReadUInt64();
if (majorVersion == 3)
directoryEntry.StreamSize &= 0x0000FFFF;
return directoryEntry;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,775 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.GCF;
using BurnOutSharp.Utilities;
namespace BurnOutSharp.Builders
{
public static class GCF
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Game Cache
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
public static Models.GCF.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Game Cache
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
public static Models.GCF.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life Game Cache to fill
var file = new Models.GCF.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the game cache header
file.Header = header;
#endregion
#region Block Entry Header
// Try to parse the block entry header
var blockEntryHeader = ParseBlockEntryHeader(data);
if (blockEntryHeader == null)
return null;
// Set the game cache block entry header
file.BlockEntryHeader = blockEntryHeader;
#endregion
#region Block Entries
// Create the block entry array
file.BlockEntries = new BlockEntry[blockEntryHeader.BlockCount];
// Try to parse the block entries
for (int i = 0; i < blockEntryHeader.BlockCount; i++)
{
var blockEntry = ParseBlockEntry(data);
file.BlockEntries[i] = blockEntry;
}
#endregion
#region Fragmentation Map Header
// Try to parse the fragmentation map header
var fragmentationMapHeader = ParseFragmentationMapHeader(data);
if (fragmentationMapHeader == null)
return null;
// Set the game cache fragmentation map header
file.FragmentationMapHeader = fragmentationMapHeader;
#endregion
#region Fragmentation Maps
// Create the fragmentation map array
file.FragmentationMaps = new FragmentationMap[fragmentationMapHeader.BlockCount];
// Try to parse the fragmentation maps
for (int i = 0; i < fragmentationMapHeader.BlockCount; i++)
{
var fragmentationMap = ParseFragmentationMap(data);
file.FragmentationMaps[i] = fragmentationMap;
}
#endregion
#region Block Entry Map Header
if (header.MinorVersion < 6)
{
// Try to parse the block entry map header
var blockEntryMapHeader = ParseBlockEntryMapHeader(data);
if (blockEntryMapHeader == null)
return null;
// Set the game cache block entry map header
file.BlockEntryMapHeader = blockEntryMapHeader;
}
#endregion
#region Block Entry Maps
if (header.MinorVersion < 6)
{
// Create the block entry map array
file.BlockEntryMaps = new BlockEntryMap[file.BlockEntryMapHeader.BlockCount];
// Try to parse the block entry maps
for (int i = 0; i < file.BlockEntryMapHeader.BlockCount; i++)
{
var blockEntryMap = ParseBlockEntryMap(data);
file.BlockEntryMaps[i] = blockEntryMap;
}
}
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Directory Header
// Try to parse the directory header
var directoryHeader = ParseDirectoryHeader(data);
if (directoryHeader == null)
return null;
// Set the game cache directory header
file.DirectoryHeader = directoryHeader;
#endregion
#region Directory Entries
// Create the directory entry array
file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount];
// Try to parse the directory entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.DirectoryEntries[i] = directoryEntry;
}
#endregion
#region Directory Names
if (directoryHeader.NameSize > 0)
{
// Get the current offset for adjustment
long directoryNamesStart = data.Position;
// Get the ending offset
long directoryNamesEnd = data.Position + directoryHeader.NameSize;
// Create the string dictionary
file.DirectoryNames = new Dictionary<long, string>();
// Loop and read the null-terminated strings
while (data.Position < directoryNamesEnd)
{
long nameOffset = data.Position - directoryNamesStart;
string directoryName = data.ReadString(Encoding.ASCII);
if (data.Position > directoryNamesEnd)
{
data.Seek(-directoryName.Length, SeekOrigin.Current);
byte[] endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
if (endingData != null)
directoryName = Encoding.ASCII.GetString(endingData);
else
directoryName = null;
}
file.DirectoryNames[nameOffset] = directoryName;
}
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}
#endregion
#region Directory Info 1 Entries
// Create the directory info 1 entry array
file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count];
// Try to parse the directory info 1 entries
for (int i = 0; i < directoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
}
#endregion
#region Directory Info 2 Entries
// Create the directory info 2 entry array
file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount];
// Try to parse the directory info 2 entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
}
#endregion
#region Directory Copy Entries
// Create the directory copy entry array
file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount];
// Try to parse the directory copy entries
for (int i = 0; i < directoryHeader.CopyCount; i++)
{
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
file.DirectoryCopyEntries[i] = directoryCopyEntry;
}
#endregion
#region Directory Local Entries
// Create the directory local entry array
file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount];
// Try to parse the directory local entries
for (int i = 0; i < directoryHeader.LocalCount; i++)
{
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
file.DirectoryLocalEntries[i] = directoryLocalEntry;
}
#endregion
// Seek to end of directory section, just in case
data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin);
#region Directory Map Header
if (header.MinorVersion >= 5)
{
// Try to parse the directory map header
var directoryMapHeader = ParseDirectoryMapHeader(data);
if (directoryMapHeader == null)
return null;
// Set the game cache directory map header
file.DirectoryMapHeader = directoryMapHeader;
}
#endregion
#region Directory Map Entries
// Create the directory map entry array
file.DirectoryMapEntries = new DirectoryMapEntry[directoryHeader.ItemCount];
// Try to parse the directory map entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryMapEntry = ParseDirectoryMapEntry(data);
file.DirectoryMapEntries[i] = directoryMapEntry;
}
#endregion
#region Checksum Header
// Try to parse the checksum header
var checksumHeader = ParseChecksumHeader(data);
if (checksumHeader == null)
return null;
// Set the game cache checksum header
file.ChecksumHeader = checksumHeader;
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Checksum Map Header
// Try to parse the checksum map header
var checksumMapHeader = ParseChecksumMapHeader(data);
if (checksumMapHeader == null)
return null;
// Set the game cache checksum map header
file.ChecksumMapHeader = checksumMapHeader;
#endregion
#region Checksum Map Entries
// Create the checksum map entry array
file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount];
// Try to parse the checksum map entries
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = ParseChecksumMapEntry(data);
file.ChecksumMapEntries[i] = checksumMapEntry;
}
#endregion
#region Checksum Entries
// Create the checksum entry array
file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount];
// Try to parse the checksum entries
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = ParseChecksumEntry(data);
file.ChecksumEntries[i] = checksumEntry;
}
#endregion
// Seek to end of checksum section, just in case
data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin);
#region Data Block Header
// Try to parse the data block header
var dataBlockHeader = ParseDataBlockHeader(data, header.MinorVersion);
if (dataBlockHeader == null)
return null;
// Set the game cache data block header
file.DataBlockHeader = dataBlockHeader;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.Dummy0 = data.ReadUInt32();
if (header.Dummy0 != 0x00000001)
return null;
header.MajorVersion = data.ReadUInt32();
if (header.MajorVersion != 0x00000001)
return null;
header.MinorVersion = data.ReadUInt32();
if (header.MinorVersion != 3 && header.MinorVersion != 5 && header.MinorVersion != 6)
return null;
header.CacheID = data.ReadUInt32();
header.LastVersionPlayed = data.ReadUInt32();
header.Dummy1 = data.ReadUInt32();
header.Dummy2 = data.ReadUInt32();
header.FileSize = data.ReadUInt32();
header.BlockSize = data.ReadUInt32();
header.BlockCount = data.ReadUInt32();
header.Dummy3 = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry header on success, null on error</returns>
private static BlockEntryHeader ParseBlockEntryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryHeader blockEntryHeader = new BlockEntryHeader();
blockEntryHeader.BlockCount = data.ReadUInt32();
blockEntryHeader.BlocksUsed = data.ReadUInt32();
blockEntryHeader.Dummy0 = data.ReadUInt32();
blockEntryHeader.Dummy1 = data.ReadUInt32();
blockEntryHeader.Dummy2 = data.ReadUInt32();
blockEntryHeader.Dummy3 = data.ReadUInt32();
blockEntryHeader.Dummy4 = data.ReadUInt32();
blockEntryHeader.Checksum = data.ReadUInt32();
return blockEntryHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry on success, null on error</returns>
private static BlockEntry ParseBlockEntry(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntry blockEntry = new BlockEntry();
blockEntry.EntryFlags = data.ReadUInt32();
blockEntry.FileDataOffset = data.ReadUInt32();
blockEntry.FileDataSize = data.ReadUInt32();
blockEntry.FirstDataBlockIndex = data.ReadUInt32();
blockEntry.NextBlockEntryIndex = data.ReadUInt32();
blockEntry.PreviousBlockEntryIndex = data.ReadUInt32();
blockEntry.DirectoryIndex = data.ReadUInt32();
return blockEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache fragmentation map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache fragmentation map header on success, null on error</returns>
private static FragmentationMapHeader ParseFragmentationMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
FragmentationMapHeader fragmentationMapHeader = new FragmentationMapHeader();
fragmentationMapHeader.BlockCount = data.ReadUInt32();
fragmentationMapHeader.FirstUnusedEntry = data.ReadUInt32();
fragmentationMapHeader.Terminator = data.ReadUInt32();
fragmentationMapHeader.Checksum = data.ReadUInt32();
return fragmentationMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache fragmentation map
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache fragmentation map on success, null on error</returns>
private static FragmentationMap ParseFragmentationMap(Stream data)
{
// TODO: Use marshalling here instead of building
FragmentationMap fragmentationMap = new FragmentationMap();
fragmentationMap.NextDataBlockIndex = data.ReadUInt32();
return fragmentationMap;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry map header on success, null on error</returns>
private static BlockEntryMapHeader ParseBlockEntryMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryMapHeader blockEntryMapHeader = new BlockEntryMapHeader();
blockEntryMapHeader.BlockCount = data.ReadUInt32();
blockEntryMapHeader.FirstBlockEntryIndex = data.ReadUInt32();
blockEntryMapHeader.LastBlockEntryIndex = data.ReadUInt32();
blockEntryMapHeader.Dummy0 = data.ReadUInt32();
blockEntryMapHeader.Checksum = data.ReadUInt32();
return blockEntryMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache block entry map
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry map on success, null on error</returns>
private static BlockEntryMap ParseBlockEntryMap(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryMap blockEntryMap = new BlockEntryMap();
blockEntryMap.PreviousBlockEntryIndex = data.ReadUInt32();
blockEntryMap.NextBlockEntryIndex = data.ReadUInt32();
return blockEntryMap;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory header on success, null on error</returns>
private static DirectoryHeader ParseDirectoryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryHeader directoryHeader = new DirectoryHeader();
directoryHeader.Dummy0 = data.ReadUInt32();
directoryHeader.CacheID = data.ReadUInt32();
directoryHeader.LastVersionPlayed = data.ReadUInt32();
directoryHeader.ItemCount = data.ReadUInt32();
directoryHeader.FileCount = data.ReadUInt32();
directoryHeader.Dummy1 = data.ReadUInt32();
directoryHeader.DirectorySize = data.ReadUInt32();
directoryHeader.NameSize = data.ReadUInt32();
directoryHeader.Info1Count = data.ReadUInt32();
directoryHeader.CopyCount = data.ReadUInt32();
directoryHeader.LocalCount = data.ReadUInt32();
directoryHeader.Dummy2 = data.ReadUInt32();
directoryHeader.Dummy3 = data.ReadUInt32();
directoryHeader.Checksum = data.ReadUInt32();
return directoryHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.NameOffset = data.ReadUInt32();
directoryEntry.ItemSize = data.ReadUInt32();
directoryEntry.ChecksumIndex = data.ReadUInt32();
directoryEntry.DirectoryFlags = (HL_GCF_FLAG)data.ReadUInt32();
directoryEntry.ParentIndex = data.ReadUInt32();
directoryEntry.NextIndex = data.ReadUInt32();
directoryEntry.FirstIndex = data.ReadUInt32();
return directoryEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory info 1 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory info 1 entry on success, null on error</returns>
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
return directoryInfo1Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory info 2 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory info 2 entry on success, null on error</returns>
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
return directoryInfo2Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory copy entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory copy entry on success, null on error</returns>
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
return directoryCopyEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory local entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory local entry on success, null on error</returns>
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
return directoryLocalEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory map header on success, null on error</returns>
private static DirectoryMapHeader ParseDirectoryMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapHeader directoryMapHeader = new DirectoryMapHeader();
directoryMapHeader.Dummy0 = data.ReadUInt32();
if (directoryMapHeader.Dummy0 != 0x00000001)
return null;
directoryMapHeader.Dummy1 = data.ReadUInt32();
if (directoryMapHeader.Dummy1 != 0x00000000)
return null;
return directoryMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache directory map entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory map entry on success, null on error</returns>
private static DirectoryMapEntry ParseDirectoryMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapEntry directoryMapEntry = new DirectoryMapEntry();
directoryMapEntry.FirstBlockIndex = data.ReadUInt32();
return directoryMapEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum header on success, null on error</returns>
private static ChecksumHeader ParseChecksumHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumHeader checksumHeader = new ChecksumHeader();
checksumHeader.Dummy0 = data.ReadUInt32();
if (checksumHeader.Dummy0 != 0x00000001)
return null;
checksumHeader.ChecksumSize = data.ReadUInt32();
return checksumHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum map header on success, null on error</returns>
private static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
checksumMapHeader.Dummy0 = data.ReadUInt32();
if (checksumMapHeader.Dummy0 != 0x14893721)
return null;
checksumMapHeader.Dummy1 = data.ReadUInt32();
if (checksumMapHeader.Dummy1 != 0x00000001)
return null;
checksumMapHeader.ItemCount = data.ReadUInt32();
checksumMapHeader.ChecksumCount = data.ReadUInt32();
return checksumMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum map entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum map entry on success, null on error</returns>
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
checksumMapEntry.ChecksumCount = data.ReadUInt32();
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
return checksumMapEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache checksum entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum entry on success, null on error</returns>
private static ChecksumEntry ParseChecksumEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumEntry checksumEntry = new ChecksumEntry();
checksumEntry.Checksum = data.ReadUInt32();
return checksumEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life Game Cache data block header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="minorVersion">Minor version field from the header</param>
/// <returns>Filled Half-Life Game Cache data block header on success, null on error</returns>
private static DataBlockHeader ParseDataBlockHeader(Stream data, uint minorVersion)
{
// TODO: Use marshalling here instead of building
DataBlockHeader dataBlockHeader = new DataBlockHeader();
// In version 3 the DataBlockHeader is missing the LastVersionPlayed field.
if (minorVersion >= 5)
dataBlockHeader.LastVersionPlayed = data.ReadUInt32();
dataBlockHeader.BlockCount = data.ReadUInt32();
dataBlockHeader.BlockSize = data.ReadUInt32();
dataBlockHeader.FirstBlockOffset = data.ReadUInt32();
dataBlockHeader.BlocksUsed = data.ReadUInt32();
dataBlockHeader.Checksum = data.ReadUInt32();
return dataBlockHeader;
}
#endregion
}
}

View File

@@ -0,0 +1,808 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.InstallShieldCabinet;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.InstallShieldCabinet.Constants;
namespace BurnOutSharp.Builders
{
// TODO: Add multi-cabinet reading
public class InstallShieldCabinet
{
#region Byte Data
/// <summary>
/// Parse a byte array into a InstallShield Cabinet file
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Cabinet ParseCabinet(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseCabinet(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a InstallShield Cabinet file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Cabinet ParseCabinet(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new cabinet to fill
var cabinet = new Cabinet();
#region Common Header
// Try to parse the cabinet header
var commonHeader = ParseCommonHeader(data);
if (commonHeader == null)
return null;
// Set the cabinet header
cabinet.CommonHeader = commonHeader;
#endregion
#region Volume Header
// Try to parse the volume header
var volumeHeader = ParseVolumeHeader(data, GetMajorVersion(commonHeader));
if (volumeHeader == null)
return null;
// Set the volume header
cabinet.VolumeHeader = volumeHeader;
#endregion
#region Descriptor
// Get the descriptor offset
uint descriptorOffset = commonHeader.DescriptorOffset;
if (descriptorOffset < 0 || descriptorOffset >= data.Length)
return null;
// Seek to the descriptor
data.Seek(descriptorOffset, SeekOrigin.Begin);
// Try to parse the descriptor
var descriptor = ParseDescriptor(data);
if (descriptor == null)
return null;
// Set the descriptor
cabinet.Descriptor = descriptor;
#endregion
#region File Descriptor Offsets
// Get the file table offset
uint fileTableOffset = commonHeader.DescriptorOffset + descriptor.FileTableOffset;
if (fileTableOffset < 0 || fileTableOffset >= data.Length)
return null;
// Seek to the file table
data.Seek(fileTableOffset, SeekOrigin.Begin);
// Get the number of file table items
uint fileTableItems;
if (GetMajorVersion(commonHeader) <= 5)
fileTableItems = descriptor.DirectoryCount + descriptor.FileCount;
else
fileTableItems = descriptor.DirectoryCount;
// Create and fill the file table
cabinet.FileDescriptorOffsets = new uint[fileTableItems];
for (int i = 0; i < cabinet.FileDescriptorOffsets.Length; i++)
{
cabinet.FileDescriptorOffsets[i] = data.ReadUInt32();
}
#endregion
#region Directory Descriptors
// Create and fill the directory descriptors
cabinet.DirectoryNames = new string[descriptor.DirectoryCount];
for (int i = 0; i < descriptor.DirectoryCount; i++)
{
// Get the directory descriptor offset
uint offset = descriptorOffset
+ descriptor.FileTableOffset
+ cabinet.FileDescriptorOffsets[i];
// If we have an invalid offset
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the file descriptor offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the file descriptor
string directoryName = ParseDirectoryName(data, GetMajorVersion(commonHeader));
cabinet.DirectoryNames[i] = directoryName;
}
#endregion
#region File Descriptors
// Create and fill the file descriptors
cabinet.FileDescriptors = new FileDescriptor[descriptor.FileCount];
for (int i = 0; i < descriptor.FileCount; i++)
{
// Get the file descriptor offset
uint offset;
if (GetMajorVersion(commonHeader) <= 5)
{
offset = descriptorOffset
+ descriptor.FileTableOffset
+ cabinet.FileDescriptorOffsets[descriptor.DirectoryCount + i];
}
else
{
offset = descriptorOffset
+ descriptor.FileTableOffset
+ descriptor.FileTableOffset2
+ (uint)(i * 0x57);
}
// If we have an invalid offset
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the file descriptor offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the file descriptor
FileDescriptor fileDescriptor = ParseFileDescriptor(data, GetMajorVersion(commonHeader), descriptorOffset + descriptor.FileTableOffset);
cabinet.FileDescriptors[i] = fileDescriptor;
}
#endregion
#region File Group Offsets
// Create and fill the file group offsets
cabinet.FileGroupOffsets = new Dictionary<long, OffsetList>();
for (int i = 0; i < descriptor.FileGroupOffsets.Length; i++)
{
// Get the file group offset
uint offset = descriptor.FileGroupOffsets[i];
if (offset == 0)
continue;
// Adjust the file group offset
offset += commonHeader.DescriptorOffset;
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the file group offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the offset
OffsetList offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
cabinet.FileGroupOffsets[descriptor.FileGroupOffsets[i]] = offsetList;
// If we have a nonzero next offset
uint nextOffset = offsetList.NextOffset;
while (nextOffset != 0)
{
// Get the next offset to read
uint internalOffset = nextOffset + commonHeader.DescriptorOffset;
// Seek to the file group offset
data.Seek(internalOffset, SeekOrigin.Begin);
// Create and add the offset
offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
cabinet.FileGroupOffsets[nextOffset] = offsetList;
// Set the next offset
nextOffset = offsetList.NextOffset;
}
}
#endregion
#region File Groups
// Create the file groups array
cabinet.FileGroups = new FileGroup[cabinet.FileGroupOffsets.Count];
// Create and fill the file groups
int fileGroupId = 0;
foreach (var kvp in cabinet.FileGroupOffsets)
{
// Get the offset
OffsetList list = kvp.Value;
if (list == null)
{
fileGroupId++;
continue;
}
// If we have an invalid offset
if (list.DescriptorOffset <= 0)
{
fileGroupId++;
continue;
}
/// Seek to the file group
data.Seek(list.DescriptorOffset + descriptorOffset, SeekOrigin.Begin);
// Try to parse the file group
var fileGroup = ParseFileGroup(data, GetMajorVersion(commonHeader), descriptorOffset);
if (fileGroup == null)
return null;
// Add the file group
cabinet.FileGroups[fileGroupId++] = fileGroup;
}
#endregion
#region Component Offsets
// Create and fill the component offsets
cabinet.ComponentOffsets = new Dictionary<long, OffsetList>();
for (int i = 0; i < descriptor.ComponentOffsets.Length; i++)
{
// Get the component offset
uint offset = descriptor.ComponentOffsets[i];
if (offset == 0)
continue;
// Adjust the component offset
offset += commonHeader.DescriptorOffset;
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the component offset
data.Seek(offset, SeekOrigin.Begin);
// Create and add the offset
OffsetList offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
cabinet.ComponentOffsets[descriptor.ComponentOffsets[i]] = offsetList;
// If we have a nonzero next offset
uint nextOffset = offsetList.NextOffset;
while (nextOffset != 0)
{
// Get the next offset to read
uint internalOffset = nextOffset + commonHeader.DescriptorOffset;
// Seek to the file group offset
data.Seek(internalOffset, SeekOrigin.Begin);
// Create and add the offset
offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
cabinet.ComponentOffsets[nextOffset] = offsetList;
// Set the next offset
nextOffset = offsetList.NextOffset;
}
}
#endregion
#region Components
// Create the components array
cabinet.Components = new Component[cabinet.ComponentOffsets.Count];
// Create and fill the components
int componentId = 0;
foreach (KeyValuePair<long, OffsetList> kvp in cabinet.ComponentOffsets)
{
// Get the offset
OffsetList list = kvp.Value;
if (list == null)
{
componentId++;
continue;
}
// If we have an invalid offset
if (list.DescriptorOffset <= 0)
{
componentId++;
continue;
}
// Seek to the component
data.Seek(list.DescriptorOffset + descriptorOffset, SeekOrigin.Begin);
// Try to parse the component
var component = ParseComponent(data, GetMajorVersion(commonHeader), descriptorOffset);
if (component == null)
return null;
// Add the component
cabinet.Components[componentId++] = component;
}
#endregion
// TODO: Parse setup types
return cabinet;
}
/// <summary>
/// Parse a Stream into a common header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled common header on success, null on error</returns>
private static CommonHeader ParseCommonHeader(Stream data)
{
CommonHeader commonHeader = new CommonHeader();
byte[] signature = data.ReadBytes(4);
commonHeader.Signature = Encoding.ASCII.GetString(signature);
if (commonHeader.Signature != SignatureString)
return null;
commonHeader.Version = data.ReadUInt32();
commonHeader.VolumeInfo = data.ReadUInt32();
commonHeader.DescriptorOffset = data.ReadUInt32();
commonHeader.DescriptorSize = data.ReadUInt32();
return commonHeader;
}
/// <summary>
/// Parse a Stream into a volume header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled volume header on success, null on error</returns>
private static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
{
VolumeHeader volumeHeader = new VolumeHeader();
// Read the descriptor based on version
if (majorVersion <= 5)
{
volumeHeader.DataOffset = data.ReadUInt32();
_ = data.ReadBytes(0x04); // Skip 0x04 bytes, unknown data?
volumeHeader.FirstFileIndex = data.ReadUInt32();
volumeHeader.LastFileIndex = data.ReadUInt32();
volumeHeader.FirstFileOffset = data.ReadUInt32();
volumeHeader.FirstFileSizeExpanded = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressed = data.ReadUInt32();
volumeHeader.LastFileOffset = data.ReadUInt32();
volumeHeader.LastFileSizeExpanded = data.ReadUInt32();
volumeHeader.LastFileSizeCompressed = data.ReadUInt32();
}
else
{
// TODO: Should standard and high values be combined?
volumeHeader.DataOffset = data.ReadUInt32();
volumeHeader.DataOffsetHigh = data.ReadUInt32();
volumeHeader.FirstFileIndex = data.ReadUInt32();
volumeHeader.LastFileIndex = data.ReadUInt32();
volumeHeader.FirstFileOffset = data.ReadUInt32();
volumeHeader.FirstFileOffsetHigh = data.ReadUInt32();
volumeHeader.FirstFileSizeExpanded = data.ReadUInt32();
volumeHeader.FirstFileSizeExpandedHigh = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressed = data.ReadUInt32();
volumeHeader.FirstFileSizeCompressedHigh = data.ReadUInt32();
volumeHeader.LastFileOffset = data.ReadUInt32();
volumeHeader.LastFileOffsetHigh = data.ReadUInt32();
volumeHeader.LastFileSizeExpanded = data.ReadUInt32();
volumeHeader.LastFileSizeExpandedHigh = data.ReadUInt32();
volumeHeader.LastFileSizeCompressed = data.ReadUInt32();
volumeHeader.LastFileSizeCompressedHigh = data.ReadUInt32();
}
return volumeHeader;
}
/// <summary>
/// Parse a Stream into a descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled descriptor on success, null on error</returns>
private static Descriptor ParseDescriptor(Stream data)
{
Descriptor descriptor = new Descriptor();
descriptor.StringsOffset = data.ReadUInt32();
descriptor.Reserved0 = data.ReadBytes(4);
descriptor.ComponentListOffset = data.ReadUInt32();
descriptor.FileTableOffset = data.ReadUInt32();
descriptor.Reserved1 = data.ReadBytes(4);
descriptor.FileTableSize = data.ReadUInt32();
descriptor.FileTableSize2 = data.ReadUInt32();
descriptor.DirectoryCount = data.ReadUInt16();
descriptor.Reserved2 = data.ReadBytes(4);
descriptor.Reserved3 = data.ReadBytes(2);
descriptor.Reserved4 = data.ReadBytes(4);
descriptor.FileCount = data.ReadUInt32();
descriptor.FileTableOffset2 = data.ReadUInt32();
descriptor.ComponentTableInfoCount = data.ReadUInt16();
descriptor.ComponentTableOffset = data.ReadUInt32();
descriptor.Reserved5 = data.ReadBytes(4);
descriptor.Reserved6 = data.ReadBytes(4);
descriptor.FileGroupOffsets = new uint[MAX_FILE_GROUP_COUNT];
for (int i = 0; i < descriptor.FileGroupOffsets.Length; i++)
{
descriptor.FileGroupOffsets[i] = data.ReadUInt32();
}
descriptor.ComponentOffsets = new uint[MAX_COMPONENT_COUNT];
for (int i = 0; i < descriptor.ComponentOffsets.Length; i++)
{
descriptor.ComponentOffsets[i] = data.ReadUInt32();
}
descriptor.SetupTypesOffset = data.ReadUInt32();
descriptor.SetupTableOffset = data.ReadUInt32();
descriptor.Reserved7 = data.ReadBytes(4);
descriptor.Reserved8 = data.ReadBytes(4);
return descriptor;
}
/// <summary>
/// Parse a Stream into an offset list
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled offset list on success, null on error</returns>
private static OffsetList ParseOffsetList(Stream data, int majorVersion, uint descriptorOffset)
{
OffsetList offsetList = new OffsetList();
offsetList.NameOffset = data.ReadUInt32();
offsetList.DescriptorOffset = data.ReadUInt32();
offsetList.NextOffset = data.ReadUInt32();
// Cache the current offset
long currentOffset = data.Position;
// Seek to the name offset
data.Seek(offsetList.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
offsetList.Name = data.ReadString(Encoding.Unicode);
else
offsetList.Name = data.ReadString(Encoding.ASCII);
// Seek back to the correct offset
data.Seek(currentOffset, SeekOrigin.Begin);
return offsetList;
}
/// <summary>
/// Parse a Stream into a file group
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file group on success, null on error</returns>
private static FileGroup ParseFileGroup(Stream data, int majorVersion, uint descriptorOffset)
{
FileGroup fileGroup = new FileGroup();
fileGroup.NameOffset = data.ReadUInt32();
fileGroup.ExpandedSize = data.ReadUInt32();
fileGroup.Reserved0 = data.ReadBytes(4);
fileGroup.CompressedSize = data.ReadUInt32();
fileGroup.Reserved1 = data.ReadBytes(4);
fileGroup.Reserved2 = data.ReadBytes(2);
fileGroup.Attribute1 = data.ReadUInt16();
fileGroup.Attribute2 = data.ReadUInt16();
// TODO: Figure out what data lives in this area for V5 and below
if (majorVersion <= 5)
data.Seek(0x36, SeekOrigin.Current);
fileGroup.FirstFile = data.ReadUInt32();
fileGroup.LastFile = data.ReadUInt32();
fileGroup.UnknownOffset = data.ReadUInt32();
fileGroup.Var4Offset = data.ReadUInt32();
fileGroup.Var1Offset = data.ReadUInt32();
fileGroup.HTTPLocationOffset = data.ReadUInt32();
fileGroup.FTPLocationOffset = data.ReadUInt32();
fileGroup.MiscOffset = data.ReadUInt32();
fileGroup.Var2Offset = data.ReadUInt32();
fileGroup.TargetDirectoryOffset = data.ReadUInt32();
fileGroup.Reserved3 = data.ReadBytes(2);
fileGroup.Reserved4 = data.ReadBytes(2);
fileGroup.Reserved5 = data.ReadBytes(2);
fileGroup.Reserved6 = data.ReadBytes(2);
fileGroup.Reserved7 = data.ReadBytes(2);
// Cache the current position
long currentPosition = data.Position;
// Read the name, if possible
if (fileGroup.NameOffset != 0)
{
// Seek to the name
data.Seek(fileGroup.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
fileGroup.Name = data.ReadString(Encoding.Unicode);
else
fileGroup.Name = data.ReadString(Encoding.ASCII);
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return fileGroup;
}
/// <summary>
/// Parse a Stream into a component
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled component on success, null on error</returns>
private static Component ParseComponent(Stream data, int majorVersion, uint descriptorOffset)
{
Component component = new Component();
component.IdentifierOffset = data.ReadUInt32();
component.DescriptorOffset = data.ReadUInt32();
component.DisplayNameOffset = data.ReadUInt32();
component.Reserved0 = data.ReadBytes(2);
component.ReservedOffset0 = data.ReadUInt32();
component.ReservedOffset1 = data.ReadUInt32();
component.ComponentIndex = data.ReadUInt16();
component.NameOffset = data.ReadUInt32();
component.ReservedOffset2 = data.ReadUInt32();
component.ReservedOffset3 = data.ReadUInt32();
component.ReservedOffset4 = data.ReadUInt32();
component.Reserved1 = data.ReadBytes(32);
component.CLSIDOffset = data.ReadUInt32();
component.Reserved2 = data.ReadBytes(28);
component.Reserved3 = data.ReadBytes(majorVersion <= 5 ? 2 : 1);
component.DependsCount = data.ReadUInt16();
component.DependsOffset = data.ReadUInt32();
component.FileGroupCount = data.ReadUInt16();
component.FileGroupNamesOffset = data.ReadUInt32();
component.X3Count = data.ReadUInt16();
component.X3Offset = data.ReadUInt32();
component.SubComponentsCount = data.ReadUInt16();
component.SubComponentsOffset = data.ReadUInt32();
component.NextComponentOffset = data.ReadUInt32();
component.ReservedOffset5 = data.ReadUInt32();
component.ReservedOffset6 = data.ReadUInt32();
component.ReservedOffset7 = data.ReadUInt32();
component.ReservedOffset8 = data.ReadUInt32();
// Cache the current position
long currentPosition = data.Position;
// Read the identifier, if possible
if (component.IdentifierOffset != 0)
{
// Seek to the identifier
data.Seek(component.IdentifierOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
component.Identifier = data.ReadString(Encoding.Unicode);
else
component.Identifier = data.ReadString(Encoding.ASCII);
}
// Read the display name, if possible
if (component.DisplayNameOffset != 0)
{
// Seek to the name
data.Seek(component.DisplayNameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
component.DisplayName = data.ReadString(Encoding.Unicode);
else
component.DisplayName = data.ReadString(Encoding.ASCII);
}
// Read the name, if possible
if (component.NameOffset != 0)
{
// Seek to the name
data.Seek(component.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
component.Name = data.ReadString(Encoding.Unicode);
else
component.Name = data.ReadString(Encoding.ASCII);
}
// Read the CLSID, if possible
if (component.CLSIDOffset != 0)
{
// Seek to the CLSID
data.Seek(component.CLSIDOffset + descriptorOffset, SeekOrigin.Begin);
// Read the GUID
component.CLSID = data.ReadGuid();
}
// Read the file group names, if possible
if (component.FileGroupCount != 0 && component.FileGroupNamesOffset != 0)
{
// Seek to the file group table offset
data.Seek(component.FileGroupNamesOffset + descriptorOffset, SeekOrigin.Begin);
// Read the file group names table
component.FileGroupNames = new string[component.FileGroupCount];
for (int j = 0; j < component.FileGroupCount; j++)
{
// Get the name offset
uint nameOffset = data.ReadUInt32();
// Cache the current offset
long preNameOffset = data.Position;
// Seek to the name offset
data.Seek(nameOffset + descriptorOffset, SeekOrigin.Begin);
if (majorVersion >= 17)
component.FileGroupNames[j] = data.ReadString(Encoding.Unicode);
else
component.FileGroupNames[j] = data.ReadString(Encoding.ASCII);
// Seek back to the original position
data.Seek(preNameOffset, SeekOrigin.Begin);
}
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return component;
}
/// <summary>
/// Parse a Stream into a directory name
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled directory name on success, null on error</returns>
private static string ParseDirectoryName(Stream data, int majorVersion)
{
// Read the string
if (majorVersion >= 17)
return data.ReadString(Encoding.Unicode);
else
return data.ReadString(Encoding.ASCII);
}
/// <summary>
/// Parse a Stream into a file descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file descriptor on success, null on error</returns>
private static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset)
{
FileDescriptor fileDescriptor = new FileDescriptor();
// Read the descriptor based on version
if (majorVersion <= 5)
{
fileDescriptor.Volume = 0xFFFF; // Set by the header index
fileDescriptor.NameOffset = data.ReadUInt32();
fileDescriptor.DirectoryIndex = data.ReadUInt32();
fileDescriptor.Flags = (FileFlags)data.ReadUInt16();
fileDescriptor.ExpandedSize = data.ReadUInt32();
fileDescriptor.CompressedSize = data.ReadUInt32();
_ = data.ReadBytes(0x14); // Skip 0x14 bytes, unknown data?
fileDescriptor.DataOffset = data.ReadUInt32();
if (majorVersion == 5)
fileDescriptor.MD5 = data.ReadBytes(0x10);
}
else
{
fileDescriptor.Flags = (FileFlags)data.ReadUInt16();
fileDescriptor.ExpandedSize = data.ReadUInt64();
fileDescriptor.CompressedSize = data.ReadUInt64();
fileDescriptor.DataOffset = data.ReadUInt64();
fileDescriptor.MD5 = data.ReadBytes(0x10);
_ = data.ReadBytes(0x10); // Skip 0x10 bytes, unknown data?
fileDescriptor.NameOffset = data.ReadUInt32();
fileDescriptor.DirectoryIndex = data.ReadUInt16();
_ = data.ReadBytes(0x0C); // Skip 0x0C bytes, unknown data?
fileDescriptor.LinkPrevious = data.ReadUInt32();
fileDescriptor.LinkNext = data.ReadUInt32();
fileDescriptor.LinkFlags = (LinkFlags)data.ReadByteValue();
fileDescriptor.Volume = data.ReadUInt16();
}
// Cache the current position
long currentPosition = data.Position;
// Read the name, if possible
if (fileDescriptor.NameOffset != 0)
{
// Seek to the name
data.Seek(fileDescriptor.NameOffset + descriptorOffset, SeekOrigin.Begin);
// Read the string
if (majorVersion >= 17)
fileDescriptor.Name = data.ReadString(Encoding.Unicode);
else
fileDescriptor.Name = data.ReadString(Encoding.ASCII);
}
// Seek back to the correct offset
data.Seek(currentPosition, SeekOrigin.Begin);
return fileDescriptor;
}
#endregion
#region Helpers
/// <summary>
/// Get the major version of the cabinet
/// </summary>
/// <remarks>This should live in the wrapper but is needed during parsing</remarks>
private static int GetMajorVersion(CommonHeader commonHeader)
{
uint majorVersion = commonHeader.Version;
if (majorVersion >> 24 == 1)
{
majorVersion = (majorVersion >> 12) & 0x0F;
}
else if (majorVersion >> 24 == 2 || majorVersion >> 24 == 4)
{
majorVersion = majorVersion & 0xFFFF;
if (majorVersion != 0)
majorVersion /= 100;
}
return (int)majorVersion;
}
#endregion
}
}

View File

@@ -0,0 +1,943 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.LinearExecutable;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.LinearExecutable.Constants;
namespace BurnOutSharp.Builders
{
public static class LinearExecutable
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Linear Executable
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseExecutable(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Linear Executable
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new executable to fill
var executable = new Executable();
#region MS-DOS Stub
// Parse the MS-DOS stub
var stub = MSDOS.ParseExecutable(data);
if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0)
return null;
// Set the MS-DOS stub
executable.Stub = stub;
#endregion
#region Information Block
// Try to parse the executable header
data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin);
var informationBlock = ParseInformationBlock(data);
if (informationBlock == null)
return null;
// Set the executable header
executable.InformationBlock = informationBlock;
#endregion
#region Object Table
// Get the object table offset
long offset = informationBlock.ObjectTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the object table
data.Seek(offset, SeekOrigin.Begin);
// Create the object table
executable.ObjectTable = new ObjectTableEntry[informationBlock.ObjectTableCount];
// Try to parse the object table
for (int i = 0; i < executable.ObjectTable.Length; i++)
{
var entry = ParseObjectTableEntry(data);
if (entry == null)
return null;
executable.ObjectTable[i] = entry;
}
}
#endregion
#region Object Page Map
// Get the object page map offset
offset = informationBlock.ObjectPageMapOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the object page map
data.Seek(offset, SeekOrigin.Begin);
// Create the object page map
executable.ObjectPageMap = new ObjectPageMapEntry[informationBlock.ObjectTableCount];
// Try to parse the object page map
for (int i = 0; i < executable.ObjectPageMap.Length; i++)
{
var entry = ParseObjectPageMapEntry(data);
if (entry == null)
return null;
executable.ObjectPageMap[i] = entry;
}
}
#endregion
#region Object Iterate Data Map
offset = informationBlock.ObjectIterateDataMapOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the object page map
data.Seek(offset, SeekOrigin.Begin);
// TODO: Implement when model found
// No model has been found in the documentation about what
// each of the entries looks like for this map.
}
#endregion
#region Resource Table
// Get the resource table offset
offset = informationBlock.ResourceTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the resource table
data.Seek(offset, SeekOrigin.Begin);
// Create the resource table
executable.ResourceTable = new ResourceTableEntry[informationBlock.ResourceTableCount];
// Try to parse the resource table
for (int i = 0; i < executable.ResourceTable.Length; i++)
{
var entry = ParseResourceTableEntry(data);
if (entry == null)
return null;
executable.ResourceTable[i] = entry;
}
}
#endregion
#region Resident Names Table
// Get the resident names table offset
offset = informationBlock.ResidentNamesTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the resident names table
data.Seek(offset, SeekOrigin.Begin);
// Create the resident names table
var residentNamesTable = new List<ResidentNamesTableEntry>();
// Try to parse the resident names table
while (true)
{
var entry = ParseResidentNamesTableEntry(data);
residentNamesTable.Add(entry);
// If we have a 0-length entry
if (entry.Length == 0)
break;
}
// Assign the resident names table
executable.ResidentNamesTable = residentNamesTable.ToArray();
}
#endregion
#region Entry Table
// Get the entry table offset
offset = informationBlock.EntryTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the entry table
data.Seek(offset, SeekOrigin.Begin);
// Create the entry table
var entryTable = new List<EntryTableBundle>();
// Try to parse the entry table
while (true)
{
var bundle = ParseEntryTableBundle(data);
entryTable.Add(bundle);
// If we have a 0-length entry
if (bundle.Entries == 0)
break;
}
// Assign the entry table
executable.EntryTable = entryTable.ToArray();
}
#endregion
#region Module Format Directives Table
// Get the module format directives table offset
offset = informationBlock.ModuleDirectivesTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the module format directives table
data.Seek(offset, SeekOrigin.Begin);
// Create the module format directives table
executable.ModuleFormatDirectivesTable = new ModuleFormatDirectivesTableEntry[informationBlock.ModuleDirectivesCount];
// Try to parse the module format directives table
for (int i = 0; i < executable.ModuleFormatDirectivesTable.Length; i++)
{
var entry = ParseModuleFormatDirectivesTableEntry(data);
if (entry == null)
return null;
executable.ModuleFormatDirectivesTable[i] = entry;
}
}
#endregion
#region Verify Record Directive Table
// TODO: Figure out where the offset to this table is stored
// The documentation suggests it's either part of or immediately following
// the Module Format Directives Table
#endregion
#region Fix-up Page Table
// Get the fix-up page table offset
offset = informationBlock.FixupPageTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the fix-up page table
data.Seek(offset, SeekOrigin.Begin);
// Create the fix-up page table
executable.FixupPageTable = new FixupPageTableEntry[executable.ObjectPageMap.Length + 1];
// Try to parse the fix-up page table
for (int i = 0; i < executable.FixupPageTable.Length; i++)
{
var entry = ParseFixupPageTableEntry(data);
if (entry == null)
return null;
executable.FixupPageTable[i] = entry;
}
}
#endregion
#region Fix-up Record Table
// Get the fix-up record table offset
offset = informationBlock.FixupRecordTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the fix-up record table
data.Seek(offset, SeekOrigin.Begin);
// Create the fix-up record table
executable.FixupRecordTable = new FixupRecordTableEntry[executable.ObjectPageMap.Length + 1];
// Try to parse the fix-up record table
for (int i = 0; i < executable.FixupRecordTable.Length; i++)
{
var entry = ParseFixupRecordTableEntry(data);
if (entry == null)
return null;
executable.FixupRecordTable[i] = entry;
}
}
#endregion
#region Imported Module Name Table
// Get the imported module name table offset
offset = informationBlock.ImportedModulesNameTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the imported module name table
data.Seek(offset, SeekOrigin.Begin);
// Create the imported module name table
executable.ImportModuleNameTable = new ImportModuleNameTableEntry[informationBlock.ImportedModulesCount];
// Try to parse the imported module name table
for (int i = 0; i < executable.ImportModuleNameTable.Length; i++)
{
var entry = ParseImportModuleNameTableEntry(data);
if (entry == null)
return null;
executable.ImportModuleNameTable[i] = entry;
}
}
#endregion
#region Imported Module Procedure Name Table
// Get the imported module procedure name table offset
offset = informationBlock.ImportProcedureNameTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the imported module procedure name table
data.Seek(offset, SeekOrigin.Begin);
// Get the size of the imported module procedure name table
long tableSize = informationBlock.FixupPageTableOffset
+ informationBlock.FixupSectionSize
- informationBlock.ImportProcedureNameTableOffset;
// Create the imported module procedure name table
var importModuleProcedureNameTable = new List<ImportModuleProcedureNameTableEntry>();
// Try to parse the imported module procedure name table
while (data.Position < offset + tableSize)
{
var entry = ParseImportModuleProcedureNameTableEntry(data);
if (entry == null)
return null;
importModuleProcedureNameTable.Add(entry);
}
// Assign the resident names table
executable.ImportModuleProcedureNameTable = importModuleProcedureNameTable.ToArray();
}
#endregion
#region Per-Page Checksum Table
// Get the per-page checksum table offset
offset = informationBlock.PerPageChecksumTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the per-page checksum name table
data.Seek(offset, SeekOrigin.Begin);
// Create the per-page checksum name table
executable.PerPageChecksumTable = new PerPageChecksumTableEntry[informationBlock.ModuleNumberPages];
// Try to parse the per-page checksum name table
for (int i = 0; i < executable.PerPageChecksumTable.Length; i++)
{
var entry = ParsePerPageChecksumTableEntry(data);
if (entry == null)
return null;
executable.PerPageChecksumTable[i] = entry;
}
}
#endregion
#region Non-Resident Names Table
// Get the non-resident names table offset
offset = informationBlock.NonResidentNamesTableOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the non-resident names table
data.Seek(offset, SeekOrigin.Begin);
// Create the non-resident names table
var nonResidentNamesTable = new List<NonResidentNamesTableEntry>();
// Try to parse the non-resident names table
while (true)
{
var entry = ParseNonResidentNameTableEntry(data);
nonResidentNamesTable.Add(entry);
// If we have a 0-length entry
if (entry.Length == 0)
break;
}
// Assign the non-resident names table
executable.NonResidentNamesTable = nonResidentNamesTable.ToArray();
}
#endregion
#region Debug Information
// Get the debug information offset
offset = informationBlock.DebugInformationOffset + stub.Header.NewExeHeaderAddr;
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
{
// Seek to the debug information
data.Seek(offset, SeekOrigin.Begin);
// Try to parse the debug information
var debugInformation = ParseDebugInformation(data, informationBlock.DebugInformationLength);
if (debugInformation == null)
return null;
// Set the debug information
executable.DebugInformation = debugInformation;
}
#endregion
return executable;
}
/// <summary>
/// Parse a Stream into an information block
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled information block on success, null on error</returns>
private static InformationBlock ParseInformationBlock(Stream data)
{
// TODO: Use marshalling here instead of building
var informationBlock = new InformationBlock();
byte[] magic = data.ReadBytes(2);
informationBlock.Signature = Encoding.ASCII.GetString(magic);
if (informationBlock.Signature != LESignatureString && informationBlock.Signature != LXSignatureString)
return null;
informationBlock.ByteOrder = (ByteOrder)data.ReadByteValue();
informationBlock.WordOrder = (WordOrder)data.ReadByteValue();
informationBlock.ExecutableFormatLevel = data.ReadUInt32();
informationBlock.CPUType = (CPUType)data.ReadUInt16();
informationBlock.ModuleOS = (OperatingSystem)data.ReadUInt16();
informationBlock.ModuleVersion = data.ReadUInt32();
informationBlock.ModuleTypeFlags = (ModuleFlags)data.ReadUInt32();
informationBlock.ModuleNumberPages = data.ReadUInt32();
informationBlock.InitialObjectCS = data.ReadUInt32();
informationBlock.InitialEIP = data.ReadUInt32();
informationBlock.InitialObjectSS = data.ReadUInt32();
informationBlock.InitialESP = data.ReadUInt32();
informationBlock.MemoryPageSize = data.ReadUInt32();
informationBlock.BytesOnLastPage = data.ReadUInt32();
informationBlock.FixupSectionSize = data.ReadUInt32();
informationBlock.FixupSectionChecksum = data.ReadUInt32();
informationBlock.LoaderSectionSize = data.ReadUInt32();
informationBlock.LoaderSectionChecksum = data.ReadUInt32();
informationBlock.ObjectTableOffset = data.ReadUInt32();
informationBlock.ObjectTableCount = data.ReadUInt32();
informationBlock.ObjectPageMapOffset = data.ReadUInt32();
informationBlock.ObjectIterateDataMapOffset = data.ReadUInt32();
informationBlock.ResourceTableOffset = data.ReadUInt32();
informationBlock.ResourceTableCount = data.ReadUInt32();
informationBlock.ResidentNamesTableOffset = data.ReadUInt32();
informationBlock.EntryTableOffset = data.ReadUInt32();
informationBlock.ModuleDirectivesTableOffset = data.ReadUInt32();
informationBlock.ModuleDirectivesCount = data.ReadUInt32();
informationBlock.FixupPageTableOffset = data.ReadUInt32();
informationBlock.FixupRecordTableOffset = data.ReadUInt32();
informationBlock.ImportedModulesNameTableOffset = data.ReadUInt32();
informationBlock.ImportedModulesCount = data.ReadUInt32();
informationBlock.ImportProcedureNameTableOffset = data.ReadUInt32();
informationBlock.PerPageChecksumTableOffset = data.ReadUInt32();
informationBlock.DataPagesOffset = data.ReadUInt32();
informationBlock.PreloadPageCount = data.ReadUInt32();
informationBlock.NonResidentNamesTableOffset = data.ReadUInt32();
informationBlock.NonResidentNamesTableLength = data.ReadUInt32();
informationBlock.NonResidentNamesTableChecksum = data.ReadUInt32();
informationBlock.AutomaticDataObject = data.ReadUInt32();
informationBlock.DebugInformationOffset = data.ReadUInt32();
informationBlock.DebugInformationLength = data.ReadUInt32();
informationBlock.PreloadInstancePagesNumber = data.ReadUInt32();
informationBlock.DemandInstancePagesNumber = data.ReadUInt32();
informationBlock.ExtraHeapAllocation = data.ReadUInt32();
return informationBlock;
}
/// <summary>
/// Parse a Stream into an object table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled object table entry on success, null on error</returns>
private static ObjectTableEntry ParseObjectTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ObjectTableEntry();
entry.VirtualSegmentSize = data.ReadUInt32();
entry.RelocationBaseAddress = data.ReadUInt32();
entry.ObjectFlags = (ObjectFlags)data.ReadUInt16();
entry.PageTableIndex = data.ReadUInt32();
entry.PageTableEntries = data.ReadUInt32();
entry.Reserved = data.ReadUInt32();
return entry;
}
/// <summary>
/// Parse a Stream into an object page map entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled object page map entry on success, null on error</returns>
private static ObjectPageMapEntry ParseObjectPageMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ObjectPageMapEntry();
entry.PageDataOffset = data.ReadUInt32();
entry.DataSize = data.ReadUInt16();
entry.Flags = (ObjectPageFlags)data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into a resource table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resource table entry on success, null on error</returns>
private static ResourceTableEntry ParseResourceTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ResourceTableEntry();
entry.TypeID = (ResourceTableEntryType)data.ReadUInt32();
entry.NameID = data.ReadUInt16();
entry.ResourceSize = data.ReadUInt32();
entry.ObjectNumber = data.ReadUInt16();
entry.Offset = data.ReadUInt32();
return entry;
}
/// <summary>
/// Parse a Stream into a resident names table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resident names table entry on success, null on error</returns>
private static ResidentNamesTableEntry ParseResidentNamesTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ResidentNamesTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
entry.OrdinalNumber = data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into an entry table bundle
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled entry table bundle on success, null on error</returns>
private static EntryTableBundle ParseEntryTableBundle(Stream data)
{
// TODO: Use marshalling here instead of building
var bundle = new EntryTableBundle();
bundle.Entries = data.ReadByteValue();
if (bundle.Entries == 0)
return bundle;
bundle.BundleType = (BundleType)data.ReadByteValue();
bundle.TableEntries = new EntryTableEntry[bundle.Entries];
for (int i = 0; i < bundle.Entries; i++)
{
var entry = new EntryTableEntry();
switch (bundle.BundleType & ~BundleType.ParameterTypingInformationPresent)
{
case BundleType.UnusedEntry:
// Empty entry with no information
break;
case BundleType.SixteenBitEntry:
entry.SixteenBitObjectNumber = data.ReadUInt16();
entry.SixteenBitEntryFlags = (EntryFlags)data.ReadByteValue();
entry.SixteenBitOffset = data.ReadUInt16();
break;
case BundleType.TwoEightySixCallGateEntry:
entry.TwoEightySixObjectNumber = data.ReadUInt16();
entry.TwoEightySixEntryFlags = (EntryFlags)data.ReadByteValue();
entry.TwoEightySixOffset = data.ReadUInt16();
entry.TwoEightySixCallgate = data.ReadUInt16();
break;
case BundleType.ThirtyTwoBitEntry:
entry.ThirtyTwoBitObjectNumber = data.ReadUInt16();
entry.ThirtyTwoBitEntryFlags = (EntryFlags)data.ReadByteValue();
entry.ThirtyTwoBitOffset = data.ReadUInt32();
break;
case BundleType.ForwarderEntry:
entry.ForwarderReserved = data.ReadUInt16();
entry.ForwarderFlags = (ForwarderFlags)data.ReadByteValue();
entry.ForwarderModuleOrdinalNumber = data.ReadUInt16();
entry.ProcedureNameOffset = data.ReadUInt32();
entry.ImportOrdinalNumber = data.ReadUInt32();
break;
default:
return null;
}
bundle.TableEntries[i] = entry;
}
return bundle;
}
/// <summary>
/// Parse a Stream into a module format directives table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled module format directives table entry on success, null on error</returns>
private static ModuleFormatDirectivesTableEntry ParseModuleFormatDirectivesTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ModuleFormatDirectivesTableEntry();
entry.DirectiveNumber = (DirectiveNumber)data.ReadUInt16();
entry.DirectiveDataLength = data.ReadUInt16();
entry.DirectiveDataOffset = data.ReadUInt32();
return entry;
}
/// <summary>
/// Parse a Stream into a verify record directive table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled verify record directive table entry on success, null on error</returns>
private static VerifyRecordDirectiveTableEntry ParseVerifyRecordDirectiveTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new VerifyRecordDirectiveTableEntry();
entry.EntryCount = data.ReadUInt16();
entry.OrdinalIndex = data.ReadUInt16();
entry.Version = data.ReadUInt16();
entry.ObjectEntriesCount = data.ReadUInt16();
entry.ObjectNumberInModule = data.ReadUInt16();
entry.ObjectLoadBaseAddress = data.ReadUInt16();
entry.ObjectVirtualAddressSize = data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into a fix-up page table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled fix-up page table entry on success, null on error</returns>
private static FixupPageTableEntry ParseFixupPageTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new FixupPageTableEntry();
entry.Offset = data.ReadUInt32();
return entry;
}
/// <summary>
/// Parse a Stream into a fix-up record table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled fix-up record table entry on success, null on error</returns>
private static FixupRecordTableEntry ParseFixupRecordTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new FixupRecordTableEntry();
entry.SourceType = (FixupRecordSourceType)data.ReadByteValue();
entry.TargetFlags = (FixupRecordTargetFlags)data.ReadByteValue();
// Source list flag
if (entry.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag))
entry.SourceOffsetListCount = data.ReadByteValue();
else
entry.SourceOffset = data.ReadUInt16();
// OBJECT / TRGOFF
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReference))
{
// 16-bit Object Number/Module Ordinal Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
entry.TargetObjectNumberWORD = data.ReadUInt16();
else
entry.TargetObjectNumberByte = data.ReadByteValue();
// 16-bit Selector fixup
if (!entry.SourceType.HasFlag(FixupRecordSourceType.SixteenBitSelectorFixup))
{
// 32-bit Target Offset Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
entry.TargetOffsetDWORD = data.ReadUInt32();
else
entry.TargetOffsetWORD = data.ReadUInt16();
}
}
// MOD ORD# / IMPORT ORD / ADDITIVE
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByOrdinal))
{
// 16-bit Object Number/Module Ordinal Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
else
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
// 8-bit Ordinal Flag & 32-bit Target Offset Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.EightBitOrdinalFlag))
entry.ImportedOrdinalNumberByte = data.ReadByteValue();
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
entry.ImportedOrdinalNumberDWORD = data.ReadUInt32();
else
entry.ImportedOrdinalNumberWORD = data.ReadUInt16();
// Additive Fixup Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
{
// 32-bit Additive Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
else
entry.AdditiveFixupValueWORD = data.ReadUInt16();
}
}
// MOD ORD# / PROCEDURE NAME OFFSET / ADDITIVE
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByName))
{
// 16-bit Object Number/Module Ordinal Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
else
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
// 32-bit Target Offset Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
entry.OffsetImportProcedureNameTableDWORD = data.ReadUInt32();
else
entry.OffsetImportProcedureNameTableWORD = data.ReadUInt16();
// Additive Fixup Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
{
// 32-bit Additive Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
else
entry.AdditiveFixupValueWORD = data.ReadUInt16();
}
}
// ORD # / ADDITIVE
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReferenceViaEntryTable))
{
// 16-bit Object Number/Module Ordinal Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
else
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
// Additive Fixup Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
{
// 32-bit Additive Flag
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
else
entry.AdditiveFixupValueWORD = data.ReadUInt16();
}
}
// No other top-level flags recognized
else
{
return null;
}
#region SCROFFn
if (entry.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag))
{
entry.SourceOffsetList = new ushort[entry.SourceOffsetListCount];
for (int i = 0; i < entry.SourceOffsetList.Length; i++)
{
entry.SourceOffsetList[i] = data.ReadUInt16();
}
}
#endregion
return entry;
}
/// <summary>
/// Parse a Stream into a import module name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled import module name table entry on success, null on error</returns>
private static ImportModuleNameTableEntry ParseImportModuleNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ImportModuleNameTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
return entry;
}
/// <summary>
/// Parse a Stream into a import module name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled import module name table entry on success, null on error</returns>
private static ImportModuleProcedureNameTableEntry ParseImportModuleProcedureNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ImportModuleProcedureNameTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
return entry;
}
/// <summary>
/// Parse a Stream into a per-page checksum table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled per-page checksum table entry on success, null on error</returns>
private static PerPageChecksumTableEntry ParsePerPageChecksumTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new PerPageChecksumTableEntry();
entry.Checksum = data.ReadUInt32();
return entry;
}
/// <summary>
/// Parse a Stream into a non-resident names table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled non-resident names table entry on success, null on error</returns>
private static NonResidentNamesTableEntry ParseNonResidentNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new NonResidentNamesTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
{
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
entry.OrdinalNumber = data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into a debug information
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="size">Total size of the debug information</param>
/// <returns>Filled debug information on success, null on error</returns>
private static DebugInformation ParseDebugInformation(Stream data, long size)
{
// TODO: Use marshalling here instead of building
var debugInformation = new DebugInformation();
byte[] signature = data.ReadBytes(3);
debugInformation.Signature = Encoding.ASCII.GetString(signature);
if (debugInformation.Signature != DebugInformationSignatureString)
return null;
debugInformation.FormatType = (DebugFormatType)data.ReadByteValue();
debugInformation.DebuggerData = data.ReadBytes((int)(size - 4));
return debugInformation;
}
#endregion
}
}

View File

@@ -0,0 +1,175 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.MSDOS;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.MSDOS.Constants;
namespace BurnOutSharp.Builders
{
public static class MSDOS
{
#region Byte Data
/// <summary>
/// Parse a byte array into an MS-DOS executable
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseExecutable(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into an MS-DOS executable
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new executable to fill
var executable = new Executable();
#region Executable Header
// Try to parse the executable header
var executableHeader = ParseExecutableHeader(data);
if (executableHeader == null)
return null;
// Set the executable header
executable.Header = executableHeader;
#endregion
#region Relocation Table
// If the offset for the relocation table doesn't exist
int tableAddress = initialOffset + executableHeader.RelocationTableAddr;
if (tableAddress >= data.Length)
return executable;
// Try to parse the relocation table
data.Seek(tableAddress, SeekOrigin.Begin);
var relocationTable = ParseRelocationTable(data, executableHeader.RelocationItems);
if (relocationTable == null)
return null;
// Set the relocation table
executable.RelocationTable = relocationTable;
#endregion
// Return the executable
return executable;
}
/// <summary>
/// Parse a Stream into an MS-DOS executable header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable header on success, null on error</returns>
private static ExecutableHeader ParseExecutableHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new ExecutableHeader();
#region Standard Fields
byte[] magic = data.ReadBytes(2);
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.LastPageBytes = data.ReadUInt16();
header.Pages = data.ReadUInt16();
header.RelocationItems = data.ReadUInt16();
header.HeaderParagraphSize = data.ReadUInt16();
header.MinimumExtraParagraphs = data.ReadUInt16();
header.MaximumExtraParagraphs = data.ReadUInt16();
header.InitialSSValue = data.ReadUInt16();
header.InitialSPValue = data.ReadUInt16();
header.Checksum = data.ReadUInt16();
header.InitialIPValue = data.ReadUInt16();
header.InitialCSValue = data.ReadUInt16();
header.RelocationTableAddr = data.ReadUInt16();
header.OverlayNumber = data.ReadUInt16();
#endregion
// If we don't have enough data for PE extensions
if (data.Position >= data.Length || data.Length - data.Position < 36)
return header;
#region PE Extensions
header.Reserved1 = new ushort[4];
for (int i = 0; i < header.Reserved1.Length; i++)
{
header.Reserved1[i] = data.ReadUInt16();
}
header.OEMIdentifier = data.ReadUInt16();
header.OEMInformation = data.ReadUInt16();
header.Reserved2 = new ushort[10];
for (int i = 0; i < header.Reserved2.Length; i++)
{
header.Reserved2[i] = data.ReadUInt16();
}
header.NewExeHeaderAddr = data.ReadUInt32();
#endregion
return header;
}
/// <summary>
/// Parse a Stream into a relocation table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of relocation table entries to read</param>
/// <returns>Filled relocation table on success, null on error</returns>
private static RelocationEntry[] ParseRelocationTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var relocationTable = new RelocationEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new RelocationEntry();
entry.Offset = data.ReadUInt16();
entry.Segment = data.ReadUInt16();
relocationTable[i] = entry;
}
return relocationTable;
}
#endregion
}
}

View File

@@ -0,0 +1,258 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.MicrosoftCabinet;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.MicrosoftCabinet.Constants;
namespace BurnOutSharp.Builders
{
// TODO: Add multi-cabinet reading
public class MicrosoftCabinet
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Microsoft Cabinet file
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Cabinet ParseCabinet(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseCabinet(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Microsoft Cabinet file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cabinet on success, null on error</returns>
public static Cabinet ParseCabinet(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new cabinet to fill
var cabinet = new Cabinet();
#region Cabinet Header
// Try to parse the cabinet header
var cabinetHeader = ParseCabinetHeader(data);
if (cabinetHeader == null)
return null;
// Set the cabinet header
cabinet.Header = cabinetHeader;
#endregion
#region Folders
// Set the folder array
cabinet.Folders = new CFFOLDER[cabinetHeader.FolderCount];
// Try to parse each folder, if we have any
for (int i = 0; i < cabinetHeader.FolderCount; i++)
{
var folder = ParseFolder(data, cabinetHeader);
if (folder == null)
return null;
// Set the folder
cabinet.Folders[i] = folder;
}
#endregion
#region Files
// Get the files offset
int filesOffset = (int)cabinetHeader.FilesOffset + initialOffset;
if (filesOffset > data.Length)
return null;
// Seek to the offset
data.Seek(filesOffset, SeekOrigin.Begin);
// Set the file array
cabinet.Files = new CFFILE[cabinetHeader.FileCount];
// Try to parse each file, if we have any
for (int i = 0; i < cabinetHeader.FileCount; i++)
{
var file = ParseFile(data);
if (file == null)
return null;
// Set the file
cabinet.Files[i] = file;
}
#endregion
return cabinet;
}
/// <summary>
/// Parse a Stream into a cabinet header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cabinet header on success, null on error</returns>
private static CFHEADER ParseCabinetHeader(Stream data)
{
CFHEADER header = new CFHEADER();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.Reserved1 = data.ReadUInt32();
header.CabinetSize = data.ReadUInt32();
header.Reserved2 = data.ReadUInt32();
header.FilesOffset = data.ReadUInt32();
header.Reserved3 = data.ReadUInt32();
header.VersionMinor = data.ReadByteValue();
header.VersionMajor = data.ReadByteValue();
header.FolderCount = data.ReadUInt16();
header.FileCount = data.ReadUInt16();
header.Flags = (HeaderFlags)data.ReadUInt16();
header.SetID = data.ReadUInt16();
header.CabinetIndex = data.ReadUInt16();
if (header.Flags.HasFlag(HeaderFlags.RESERVE_PRESENT))
{
header.HeaderReservedSize = data.ReadUInt16();
if (header.HeaderReservedSize > 60_000)
return null;
header.FolderReservedSize = data.ReadByteValue();
header.DataReservedSize = data.ReadByteValue();
if (header.HeaderReservedSize > 0)
header.ReservedData = data.ReadBytes(header.HeaderReservedSize);
}
if (header.Flags.HasFlag(HeaderFlags.PREV_CABINET))
{
header.CabinetPrev = data.ReadString(Encoding.ASCII);
header.DiskPrev = data.ReadString(Encoding.ASCII);
}
if (header.Flags.HasFlag(HeaderFlags.NEXT_CABINET))
{
header.CabinetNext = data.ReadString(Encoding.ASCII);
header.DiskNext = data.ReadString(Encoding.ASCII);
}
return header;
}
/// <summary>
/// Parse a Stream into a folder
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="header">Cabinet header to get flags and sizes from</param>
/// <returns>Filled folder on success, null on error</returns>
private static CFFOLDER ParseFolder(Stream data, CFHEADER header)
{
CFFOLDER folder = new CFFOLDER();
folder.CabStartOffset = data.ReadUInt32();
folder.DataCount = data.ReadUInt16();
folder.CompressionType = (CompressionType)data.ReadUInt16();
if (header.FolderReservedSize > 0)
folder.ReservedData = data.ReadBytes(header.FolderReservedSize);
if (folder.CabStartOffset > 0)
{
long currentPosition = data.Position;
data.Seek(folder.CabStartOffset, SeekOrigin.Begin);
folder.DataBlocks = new CFDATA[folder.DataCount];
for (int i = 0; i < folder.DataCount; i++)
{
CFDATA dataBlock = ParseDataBlock(data, header.DataReservedSize);
folder.DataBlocks[i] = dataBlock;
}
data.Seek(currentPosition, SeekOrigin.Begin);
}
return folder;
}
/// <summary>
/// Parse a Stream into a data block
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="dataReservedSize">Reserved byte size for data blocks</param>
/// <returns>Filled folder on success, null on error</returns>
private static CFDATA ParseDataBlock(Stream data, byte dataReservedSize)
{
CFDATA dataBlock = new CFDATA();
dataBlock.Checksum = data.ReadUInt32();
dataBlock.CompressedSize = data.ReadUInt16();
dataBlock.UncompressedSize = data.ReadUInt16();
if (dataReservedSize > 0)
dataBlock.ReservedData = data.ReadBytes(dataReservedSize);
if (dataBlock.CompressedSize > 0)
dataBlock.CompressedData = data.ReadBytes(dataBlock.CompressedSize);
return dataBlock;
}
/// <summary>
/// Parse a Stream into a file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file on success, null on error</returns>
private static CFFILE ParseFile(Stream data)
{
CFFILE file = new CFFILE();
file.FileSize = data.ReadUInt32();
file.FolderStartOffset = data.ReadUInt32();
file.FolderIndex = (FolderIndex)data.ReadUInt16();
file.Date = data.ReadUInt16();
file.Time = data.ReadUInt16();
file.Attributes = (Models.MicrosoftCabinet.FileAttributes)data.ReadUInt16();
if (file.Attributes.HasFlag(Models.MicrosoftCabinet.FileAttributes.NAME_IS_UTF))
file.Name = data.ReadString(Encoding.Unicode);
else
file.Name = data.ReadString(Encoding.ASCII);
return file;
}
#endregion
}
}

View File

@@ -0,0 +1,651 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.MoPaQ;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.MoPaQ.Constants;
namespace BurnOutSharp.Builders
{
public class MoPaQ
{
#region Byte Data
/// <summary>
/// Parse a byte array into a MoPaQ archive
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseArchive(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a MoPaQ archive
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region User Data
// Check for User Data
uint possibleSignature = data.ReadUInt32();
data.Seek(-4, SeekOrigin.Current);
if (possibleSignature == 0x1B51504D)
{
// Save the current position for offset correction
long basePtr = data.Position;
// Deserialize the user data, returning null if invalid
var userData = ParseUserData(data);
if (userData == null)
return null;
// Set the user data
archive.UserData = userData;
// Set the starting position according to the header offset
data.Seek(basePtr + (int)archive.UserData.HeaderOffset, SeekOrigin.Begin);
}
#endregion
#region Archive Header
// Check for the Header
possibleSignature = data.ReadUInt32();
data.Seek(-4, SeekOrigin.Current);
if (possibleSignature == 0x1A51504D)
{
// Try to parse the archive header
var archiveHeader = ParseArchiveHeader(data);
if (archiveHeader == null)
return null;
// Set the archive header
archive.ArchiveHeader = archiveHeader;
}
else
{
return null;
}
#endregion
#region Hash Table
// TODO: The hash table has to be be decrypted before reading
// Version 1
if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format1)
{
// If we have a hash table
long hashTableOffset = archive.ArchiveHeader.HashTablePosition;
if (hashTableOffset != 0)
{
// Seek to the offset
data.Seek(hashTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long hashTableEnd = hashTableOffset + archive.ArchiveHeader.HashTableSize;
// Read in the hash table
var hashTable = new List<HashEntry>();
while (data.Position < hashTableEnd)
{
var hashEntry = ParseHashEntry(data);
if (hashEntry == null)
return null;
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
}
}
// Version 2 and 3
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format2 || archive.ArchiveHeader.FormatVersion == FormatVersion.Format3)
{
// If we have a hash table
long hashTableOffset = ((uint)archive.ArchiveHeader.HashTablePositionHi << 23) | archive.ArchiveHeader.HashTablePosition;
if (hashTableOffset != 0)
{
// Seek to the offset
data.Seek(hashTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long hashTableEnd = hashTableOffset + archive.ArchiveHeader.HashTableSize;
// Read in the hash table
var hashTable = new List<HashEntry>();
while (data.Position < hashTableEnd)
{
var hashEntry = ParseHashEntry(data);
if (hashEntry == null)
return null;
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
}
}
// Version 4
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format4)
{
// If we have a hash table
long hashTableOffset = ((uint)archive.ArchiveHeader.HashTablePositionHi << 23) | archive.ArchiveHeader.HashTablePosition;
if (hashTableOffset != 0)
{
// Seek to the offset
data.Seek(hashTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long hashTableEnd = hashTableOffset + (long)archive.ArchiveHeader.HashTableSizeLong;
// Read in the hash table
var hashTable = new List<HashEntry>();
while (data.Position < hashTableEnd)
{
var hashEntry = ParseHashEntry(data);
if (hashEntry == null)
return null;
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
}
}
#endregion
#region Block Table
// Version 1
if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format1)
{
// If we have a block table
long blockTableOffset = archive.ArchiveHeader.BlockTablePosition;
if (blockTableOffset != 0)
{
// Seek to the offset
data.Seek(blockTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long blockTableEnd = blockTableOffset + archive.ArchiveHeader.BlockTableSize;
// Read in the block table
var blockTable = new List<BlockEntry>();
while (data.Position < blockTableEnd)
{
var blockEntry = ParseBlockEntry(data);
if (blockEntry == null)
return null;
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
}
}
// Version 2 and 3
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format2 || archive.ArchiveHeader.FormatVersion == FormatVersion.Format3)
{
// If we have a block table
long blockTableOffset = ((uint)archive.ArchiveHeader.BlockTablePositionHi << 23) | archive.ArchiveHeader.BlockTablePosition;
if (blockTableOffset != 0)
{
// Seek to the offset
data.Seek(blockTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long blockTableEnd = blockTableOffset + archive.ArchiveHeader.BlockTableSize;
// Read in the block table
var blockTable = new List<BlockEntry>();
while (data.Position < blockTableEnd)
{
var blockEntry = ParseBlockEntry(data);
if (blockEntry == null)
return null;
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
}
}
// Version 4
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format4)
{
// If we have a block table
long blockTableOffset = ((uint)archive.ArchiveHeader.BlockTablePositionHi << 23) | archive.ArchiveHeader.BlockTablePosition;
if (blockTableOffset != 0)
{
// Seek to the offset
data.Seek(blockTableOffset, SeekOrigin.Begin);
// Find the ending offset based on size
long blockTableEnd = blockTableOffset + (long)archive.ArchiveHeader.BlockTableSizeLong;
// Read in the block table
var blockTable = new List<BlockEntry>();
while (data.Position < blockTableEnd)
{
var blockEntry = ParseBlockEntry(data);
if (blockEntry == null)
return null;
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
}
}
#endregion
#region Hi-Block Table
// Version 2, 3, and 4
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format2)
{
// If we have a hi-block table
long hiBlockTableOffset = (long)archive.ArchiveHeader.HiBlockTablePosition;
if (hiBlockTableOffset != 0)
{
// Seek to the offset
data.Seek(hiBlockTableOffset, SeekOrigin.Begin);
// Read in the hi-block table
var hiBlockTable = new List<short>();
for (int i = 0; i < archive.BlockTable.Length; i++)
{
short hiBlockEntry = data.ReadInt16();
hiBlockTable.Add(hiBlockEntry);
}
archive.HiBlockTable = hiBlockTable.ToArray();
}
}
#endregion
#region BET Table
// Version 3 and 4
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format3)
{
// If we have a BET table
long betTableOffset = (long)archive.ArchiveHeader.BetTablePosition;
if (betTableOffset != 0)
{
// Seek to the offset
data.Seek(betTableOffset, SeekOrigin.Begin);
// Read in the BET table
var betTable = ParseBetTable(data);
if (betTable != null)
return null;
archive.BetTable = betTable;
}
}
#endregion
#region HET Table
// Version 3 and 4
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format3)
{
// If we have a HET table
long hetTableOffset = (long)archive.ArchiveHeader.HetTablePosition;
if (hetTableOffset != 0)
{
// Seek to the offset
data.Seek(hetTableOffset, SeekOrigin.Begin);
// Read in the HET table
var hetTable = ParseHetTable(data);
if (hetTable != null)
return null;
archive.HetTable = hetTable;
}
}
#endregion
return archive;
}
/// <summary>
/// Parse a Stream into a archive header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive header on success, null on error</returns>
private static ArchiveHeader ParseArchiveHeader(Stream data)
{
ArchiveHeader archiveHeader = new ArchiveHeader();
// V1 - Common
byte[] signature = data.ReadBytes(4);
archiveHeader.Signature = Encoding.ASCII.GetString(signature);
if (archiveHeader.Signature != ArchiveHeaderSignatureString)
return null;
archiveHeader.HeaderSize = data.ReadUInt32();
archiveHeader.ArchiveSize = data.ReadUInt32();
archiveHeader.FormatVersion = (FormatVersion)data.ReadUInt16();
archiveHeader.BlockSize = data.ReadUInt16();
archiveHeader.HashTablePosition = data.ReadUInt32();
archiveHeader.BlockTablePosition = data.ReadUInt32();
archiveHeader.HashTableSize = data.ReadUInt32();
archiveHeader.BlockTableSize = data.ReadUInt32();
// V2
if (archiveHeader.FormatVersion >= FormatVersion.Format2)
{
archiveHeader.HiBlockTablePosition = data.ReadUInt64();
archiveHeader.HashTablePositionHi = data.ReadUInt16();
archiveHeader.BlockTablePositionHi = data.ReadUInt16();
}
// V3
if (archiveHeader.FormatVersion >= FormatVersion.Format3)
{
archiveHeader.ArchiveSizeLong = data.ReadUInt64();
archiveHeader.BetTablePosition = data.ReadUInt64();
archiveHeader.HetTablePosition = data.ReadUInt64();
}
// V4
if (archiveHeader.FormatVersion >= FormatVersion.Format4)
{
archiveHeader.HashTableSizeLong = data.ReadUInt64();
archiveHeader.BlockTableSizeLong = data.ReadUInt64();
archiveHeader.HiBlockTableSize = data.ReadUInt64();
archiveHeader.HetTableSize = data.ReadUInt64();
archiveHeader.BetTablesize = data.ReadUInt64();
archiveHeader.RawChunkSize = data.ReadUInt32();
archiveHeader.BlockTableMD5 = data.ReadBytes(0x10);
archiveHeader.HashTableMD5 = data.ReadBytes(0x10);
archiveHeader.HiBlockTableMD5 = data.ReadBytes(0x10);
archiveHeader.BetTableMD5 = data.ReadBytes(0x10);
archiveHeader.HetTableMD5 = data.ReadBytes(0x10);
archiveHeader.HetTableMD5 = data.ReadBytes(0x10);
}
return archiveHeader;
}
/// <summary>
/// Parse a Stream into a user data object
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled user data on success, null on error</returns>
private static UserData ParseUserData(Stream data)
{
UserData userData = new UserData();
byte[] signature = data.ReadBytes(4);
userData.Signature = Encoding.ASCII.GetString(signature);
if (userData.Signature != UserDataSignatureString)
return null;
userData.UserDataSize = data.ReadUInt32();
userData.HeaderOffset = data.ReadUInt32();
userData.UserDataHeaderSize = data.ReadUInt32();
return userData;
}
/// <summary>
/// Parse a Stream into a HET table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled HET table on success, null on error</returns>
private static HetTable ParseHetTable(Stream data)
{
HetTable hetTable = new HetTable();
// Common Headers
byte[] signature = data.ReadBytes(4);
hetTable.Signature = Encoding.ASCII.GetString(signature);
if (hetTable.Signature != HetTableSignatureString)
return null;
hetTable.Version = data.ReadUInt32();
hetTable.DataSize = data.ReadUInt32();
// HET-Specific
hetTable.TableSize = data.ReadUInt32();
hetTable.MaxFileCount = data.ReadUInt32();
hetTable.HashTableSize = data.ReadUInt32();
hetTable.TotalIndexSize = data.ReadUInt32();
hetTable.IndexSizeExtra = data.ReadUInt32();
hetTable.IndexSize = data.ReadUInt32();
hetTable.BlockTableSize = data.ReadUInt32();
hetTable.HashTable = data.ReadBytes((int)hetTable.HashTableSize);
// TODO: Populate the file indexes array
hetTable.FileIndexes = new byte[(int)hetTable.HashTableSize][];
return hetTable;
}
/// <summary>
/// Parse a Stream into a BET table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled BET table on success, null on error</returns>
private static BetTable ParseBetTable(Stream data)
{
BetTable betTable = new BetTable();
// Common Headers
byte[] signature = data.ReadBytes(4);
betTable.Signature = Encoding.ASCII.GetString(signature);
if (betTable.Signature != BetTableSignatureString)
return null;
betTable.Version = data.ReadUInt32();
betTable.DataSize = data.ReadUInt32();
// BET-Specific
betTable.TableSize = data.ReadUInt32();
betTable.FileCount = data.ReadUInt32();
betTable.Unknown = data.ReadUInt32();
betTable.TableEntrySize = data.ReadUInt32();
betTable.FilePositionBitIndex = data.ReadUInt32();
betTable.FileSizeBitIndex = data.ReadUInt32();
betTable.CompressedSizeBitIndex = data.ReadUInt32();
betTable.FlagIndexBitIndex = data.ReadUInt32();
betTable.UnknownBitIndex = data.ReadUInt32();
betTable.FilePositionBitCount = data.ReadUInt32();
betTable.FileSizeBitCount = data.ReadUInt32();
betTable.CompressedSizeBitCount = data.ReadUInt32();
betTable.FlagIndexBitCount = data.ReadUInt32();
betTable.UnknownBitCount = data.ReadUInt32();
betTable.TotalBetHashSize = data.ReadUInt32();
betTable.BetHashSizeExtra = data.ReadUInt32();
betTable.BetHashSize = data.ReadUInt32();
betTable.BetHashArraySize = data.ReadUInt32();
betTable.FlagCount = data.ReadUInt32();
betTable.FlagsArray = new uint[betTable.FlagCount];
byte[] flagsArray = data.ReadBytes((int)betTable.FlagCount * 4);
Buffer.BlockCopy(flagsArray, 0, betTable.FlagsArray, 0, (int)betTable.FlagCount * 4);
// TODO: Populate the file table
// TODO: Populate the hash table
return betTable;
}
/// <summary>
/// Parse a Stream into a hash entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled hash entry on success, null on error</returns>
private static HashEntry ParseHashEntry(Stream data)
{
// TODO: Use marshalling here instead of building
HashEntry hashEntry = new HashEntry();
hashEntry.NameHashPartA = data.ReadUInt32();
hashEntry.NameHashPartB = data.ReadUInt32();
hashEntry.Locale = (Locale)data.ReadUInt16();
hashEntry.Platform = data.ReadUInt16();
hashEntry.BlockIndex = data.ReadUInt32();
return hashEntry;
}
/// <summary>
/// Parse a Stream into a block entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled block entry on success, null on error</returns>
private static BlockEntry ParseBlockEntry(Stream data)
{
BlockEntry blockEntry = new BlockEntry();
blockEntry.FilePosition = data.ReadUInt32();
blockEntry.CompressedSize = data.ReadUInt32();
blockEntry.UncompressedSize = data.ReadUInt32();
blockEntry.Flags = (FileFlags)data.ReadUInt32();
return blockEntry;
}
/// <summary>
/// Parse a Stream into a patch info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled patch info on success, null on error</returns>
private static PatchInfo ParsePatchInfo(Stream data)
{
// TODO: Use marshalling here instead of building
PatchInfo patchInfo = new PatchInfo();
patchInfo.Length = data.ReadUInt32();
patchInfo.Flags = data.ReadUInt32();
patchInfo.DataSize = data.ReadUInt32();
patchInfo.MD5 = data.ReadBytes(0x10);
// TODO: Fill the sector offset table
return patchInfo;
}
#endregion
#region Helpers
/// <summary>
/// Buffer for encryption and decryption
/// </summary>
private uint[] _stormBuffer = new uint[STORM_BUFFER_SIZE];
/// <summary>
/// Prepare the encryption table
/// </summary>
private void PrepareCryptTable()
{
uint seed = 0x00100001;
for (uint index1 = 0; index1 < 0x100; index1++)
{
for (uint index2 = index1, i = 0; i < 5; i++, index2 += 0x100)
{
seed = (seed * 125 + 3) % 0x2AAAAB;
uint temp1 = (seed & 0xFFFF) << 0x10;
seed = (seed * 125 + 3) % 0x2AAAAB;
uint temp2 = (seed & 0xFFFF);
_stormBuffer[index2] = (temp1 | temp2);
}
}
}
/// <summary>
/// Decrypt a single block of data
/// </summary>
private unsafe byte[] DecryptBlock(byte[] block, uint length, uint key)
{
uint seed = 0xEEEEEEEE;
uint[] castBlock = new uint[length / 4];
Buffer.BlockCopy(block, 0, castBlock, 0, (int)length);
int castBlockPtr = 0;
// Round to uints
length >>= 2;
while (length-- > 0)
{
seed += _stormBuffer[MPQ_HASH_KEY2_MIX + (key & 0xFF)];
uint ch = castBlock[castBlockPtr] ^ (key + seed);
key = ((~key << 0x15) + 0x11111111) | (key >> 0x0B);
seed = ch + seed + (seed << 5) + 3;
castBlock[castBlockPtr++] = ch;
}
Buffer.BlockCopy(castBlock, 0, block, 0, (int)length);
return block;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,544 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.NCF;
using BurnOutSharp.Utilities;
namespace BurnOutSharp.Builders
{
public static class NCF
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life No Cache
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life No Cache on success, null on error</returns>
public static Models.NCF.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life No Cache
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache on success, null on error</returns>
public static Models.NCF.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life No Cache to fill
var file = new Models.NCF.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the no cache header
file.Header = header;
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Directory Header
// Try to parse the directory header
var directoryHeader = ParseDirectoryHeader(data);
if (directoryHeader == null)
return null;
// Set the game cache directory header
file.DirectoryHeader = directoryHeader;
#endregion
#region Directory Entries
// Create the directory entry array
file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount];
// Try to parse the directory entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.DirectoryEntries[i] = directoryEntry;
}
#endregion
#region Directory Names
if (directoryHeader.NameSize > 0)
{
// Get the current offset for adjustment
long directoryNamesStart = data.Position;
// Get the ending offset
long directoryNamesEnd = data.Position + directoryHeader.NameSize;
// Create the string dictionary
file.DirectoryNames = new Dictionary<long, string>();
// Loop and read the null-terminated strings
while (data.Position < directoryNamesEnd)
{
long nameOffset = data.Position - directoryNamesStart;
string directoryName = data.ReadString(Encoding.ASCII);
if (data.Position > directoryNamesEnd)
{
data.Seek(-directoryName.Length, SeekOrigin.Current);
byte[] endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
if (endingData != null)
directoryName = Encoding.ASCII.GetString(endingData);
else
directoryName = null;
}
file.DirectoryNames[nameOffset] = directoryName;
}
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}
#endregion
#region Directory Info 1 Entries
// Create the directory info 1 entry array
file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count];
// Try to parse the directory info 1 entries
for (int i = 0; i < directoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
}
#endregion
#region Directory Info 2 Entries
// Create the directory info 2 entry array
file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount];
// Try to parse the directory info 2 entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
}
#endregion
#region Directory Copy Entries
// Create the directory copy entry array
file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount];
// Try to parse the directory copy entries
for (int i = 0; i < directoryHeader.CopyCount; i++)
{
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
file.DirectoryCopyEntries[i] = directoryCopyEntry;
}
#endregion
#region Directory Local Entries
// Create the directory local entry array
file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount];
// Try to parse the directory local entries
for (int i = 0; i < directoryHeader.LocalCount; i++)
{
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
file.DirectoryLocalEntries[i] = directoryLocalEntry;
}
#endregion
// Seek to end of directory section, just in case
data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin);
#region Unknown Header
// Try to parse the unknown header
var unknownHeader = ParseUnknownHeader(data);
if (unknownHeader == null)
return null;
// Set the game cache unknown header
file.UnknownHeader = unknownHeader;
#endregion
#region Unknown Entries
// Create the unknown entry array
file.UnknownEntries = new UnknownEntry[directoryHeader.ItemCount];
// Try to parse the unknown entries
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var unknownEntry = ParseUnknownEntry(data);
file.UnknownEntries[i] = unknownEntry;
}
#endregion
#region Checksum Header
// Try to parse the checksum header
var checksumHeader = ParseChecksumHeader(data);
if (checksumHeader == null)
return null;
// Set the game cache checksum header
file.ChecksumHeader = checksumHeader;
#endregion
// Cache the current offset
initialOffset = data.Position;
#region Checksum Map Header
// Try to parse the checksum map header
var checksumMapHeader = ParseChecksumMapHeader(data);
if (checksumMapHeader == null)
return null;
// Set the game cache checksum map header
file.ChecksumMapHeader = checksumMapHeader;
#endregion
#region Checksum Map Entries
// Create the checksum map entry array
file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount];
// Try to parse the checksum map entries
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = ParseChecksumMapEntry(data);
file.ChecksumMapEntries[i] = checksumMapEntry;
}
#endregion
#region Checksum Entries
// Create the checksum entry array
file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount];
// Try to parse the checksum entries
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = ParseChecksumEntry(data);
file.ChecksumEntries[i] = checksumEntry;
}
#endregion
// Seek to end of checksum section, just in case
data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin);
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.Dummy0 = data.ReadUInt32();
if (header.Dummy0 != 0x00000001)
return null;
header.MajorVersion = data.ReadUInt32();
if (header.MajorVersion != 0x00000002)
return null;
header.MinorVersion = data.ReadUInt32();
if (header.MinorVersion != 1)
return null;
header.CacheID = data.ReadUInt32();
header.LastVersionPlayed = data.ReadUInt32();
header.Dummy1 = data.ReadUInt32();
header.Dummy2 = data.ReadUInt32();
header.FileSize = data.ReadUInt32();
header.BlockSize = data.ReadUInt32();
header.BlockCount = data.ReadUInt32();
header.Dummy3 = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory header on success, null on error</returns>
private static DirectoryHeader ParseDirectoryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryHeader directoryHeader = new DirectoryHeader();
directoryHeader.Dummy0 = data.ReadUInt32();
if (directoryHeader.Dummy0 != 0x00000004)
return null;
directoryHeader.CacheID = data.ReadUInt32();
directoryHeader.LastVersionPlayed = data.ReadUInt32();
directoryHeader.ItemCount = data.ReadUInt32();
directoryHeader.FileCount = data.ReadUInt32();
directoryHeader.ChecksumDataLength = data.ReadUInt32();
directoryHeader.DirectorySize = data.ReadUInt32();
directoryHeader.NameSize = data.ReadUInt32();
directoryHeader.Info1Count = data.ReadUInt32();
directoryHeader.CopyCount = data.ReadUInt32();
directoryHeader.LocalCount = data.ReadUInt32();
directoryHeader.Dummy1 = data.ReadUInt32();
directoryHeader.Dummy2 = data.ReadUInt32();
directoryHeader.Checksum = data.ReadUInt32();
return directoryHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.NameOffset = data.ReadUInt32();
directoryEntry.ItemSize = data.ReadUInt32();
directoryEntry.ChecksumIndex = data.ReadUInt32();
directoryEntry.DirectoryFlags = (HL_NCF_FLAG)data.ReadUInt32();
directoryEntry.ParentIndex = data.ReadUInt32();
directoryEntry.NextIndex = data.ReadUInt32();
directoryEntry.FirstIndex = data.ReadUInt32();
return directoryEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory info 1 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory info 1 entry on success, null on error</returns>
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
return directoryInfo1Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory info 2 entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory info 2 entry on success, null on error</returns>
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
return directoryInfo2Entry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory copy entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory copy entry on success, null on error</returns>
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
return directoryCopyEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache directory local entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory local entry on success, null on error</returns>
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
return directoryLocalEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache unknown header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache unknown header on success, null on error</returns>
private static UnknownHeader ParseUnknownHeader(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownHeader unknownHeader = new UnknownHeader();
unknownHeader.Dummy0 = data.ReadUInt32();
if (unknownHeader.Dummy0 != 0x00000001)
return null;
unknownHeader.Dummy1 = data.ReadUInt32();
if (unknownHeader.Dummy1 != 0x00000000)
return null;
return unknownHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache unknown entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cacheunknown entry on success, null on error</returns>
private static UnknownEntry ParseUnknownEntry(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownEntry unknownEntry = new UnknownEntry();
unknownEntry.Dummy0 = data.ReadUInt32();
return unknownEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum header on success, null on error</returns>
private static ChecksumHeader ParseChecksumHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumHeader checksumHeader = new ChecksumHeader();
checksumHeader.Dummy0 = data.ReadUInt32();
if (checksumHeader.Dummy0 != 0x00000001)
return null;
checksumHeader.ChecksumSize = data.ReadUInt32();
return checksumHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum map header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum map header on success, null on error</returns>
private static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
checksumMapHeader.Dummy0 = data.ReadUInt32();
if (checksumMapHeader.Dummy0 != 0x14893721)
return null;
checksumMapHeader.Dummy1 = data.ReadUInt32();
if (checksumMapHeader.Dummy1 != 0x00000001)
return null;
checksumMapHeader.ItemCount = data.ReadUInt32();
checksumMapHeader.ChecksumCount = data.ReadUInt32();
return checksumMapHeader;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum map entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum map entry on success, null on error</returns>
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
checksumMapEntry.ChecksumCount = data.ReadUInt32();
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
return checksumMapEntry;
}
/// <summary>
/// Parse a Stream into a Half-Life No Cache checksum entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum entry on success, null on error</returns>
private static ChecksumEntry ParseChecksumEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumEntry checksumEntry = new ChecksumEntry();
checksumEntry.Checksum = data.ReadUInt32();
return checksumEntry;
}
#endregion
}
}

View File

@@ -0,0 +1,508 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using BurnOutSharp.Models.NewExecutable;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.NewExecutable.Constants;
namespace BurnOutSharp.Builders
{
public static class NewExecutable
{
#region Byte Data
/// <summary>
/// Parse a byte array into a New Executable
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseExecutable(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a New Executable
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable on success, null on error</returns>
public static Executable ParseExecutable(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new executable to fill
var executable = new Executable();
#region MS-DOS Stub
// Parse the MS-DOS stub
var stub = MSDOS.ParseExecutable(data);
if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0)
return null;
// Set the MS-DOS stub
executable.Stub = stub;
#endregion
#region Executable Header
// Try to parse the executable header
data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin);
var executableHeader = ParseExecutableHeader(data);
if (executableHeader == null)
return null;
// Set the executable header
executable.Header = executableHeader;
#endregion
#region Segment Table
// If the offset for the segment table doesn't exist
int tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.SegmentTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the segment table
data.Seek(tableAddress, SeekOrigin.Begin);
var segmentTable = ParseSegmentTable(data, executableHeader.FileSegmentCount);
if (segmentTable == null)
return null;
// Set the segment table
executable.SegmentTable = segmentTable;
#endregion
#region Resource Table
// If the offset for the segment table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.SegmentTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the resource table
data.Seek(tableAddress, SeekOrigin.Begin);
var resourceTable = ParseResourceTable(data, executableHeader.ResourceEntriesCount);
if (resourceTable == null)
return null;
// Set the resource table
executable.ResourceTable = resourceTable;
#endregion
#region Resident-Name Table
// If the offset for the resident-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ResidentNameTableOffset;
int endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the resident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var residentNameTable = ParseResidentNameTable(data, endOffset);
if (residentNameTable == null)
return null;
// Set the resident-name table
executable.ResidentNameTable = residentNameTable;
#endregion
#region Module-Reference Table
// If the offset for the module-reference table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the module-reference table
data.Seek(tableAddress, SeekOrigin.Begin);
var moduleReferenceTable = ParseModuleReferenceTable(data, executableHeader.ModuleReferenceTableSize);
if (moduleReferenceTable == null)
return null;
// Set the module-reference table
executable.ModuleReferenceTable = moduleReferenceTable;
#endregion
#region Imported-Name Table
// If the offset for the imported-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.ImportedNamesTableOffset;
endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset;
if (tableAddress >= data.Length)
return executable;
// Try to parse the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var importedNameTable = ParseImportedNameTable(data, endOffset);
if (importedNameTable == null)
return null;
// Set the imported-name table
executable.ImportedNameTable = importedNameTable;
#endregion
#region Entry Table
// If the offset for the imported-name table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset;
endOffset = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.EntryTableOffset
+ executableHeader.EntryTableSize;
if (tableAddress >= data.Length)
return executable;
// Try to parse the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var entryTable = ParseEntryTable(data, endOffset);
if (entryTable == null)
return null;
// Set the entry table
executable.EntryTable = entryTable;
#endregion
#region Nonresident-Name Table
// If the offset for the nonresident-name table doesn't exist
tableAddress = initialOffset
+ (int)executableHeader.NonResidentNamesTableOffset;
endOffset = initialOffset
+ (int)executableHeader.NonResidentNamesTableOffset
+ executableHeader.NonResidentNameTableSize;
if (tableAddress >= data.Length)
return executable;
// Try to parse the nonresident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
var nonResidentNameTable = ParseNonResidentNameTable(data, endOffset);
if (nonResidentNameTable == null)
return null;
// Set the nonresident-name table
executable.NonResidentNameTable = nonResidentNameTable;
#endregion
return executable;
}
/// <summary>
/// Parse a Stream into a New Executable header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable header on success, null on error</returns>
private static ExecutableHeader ParseExecutableHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new ExecutableHeader();
byte[] magic = data.ReadBytes(2);
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.LinkerVersion = data.ReadByteValue();
header.LinkerRevision = data.ReadByteValue();
header.EntryTableOffset = data.ReadUInt16();
header.EntryTableSize = data.ReadUInt16();
header.CrcChecksum = data.ReadUInt32();
header.FlagWord = (HeaderFlag)data.ReadUInt16();
header.AutomaticDataSegmentNumber = data.ReadUInt16();
header.InitialHeapAlloc = data.ReadUInt16();
header.InitialStackAlloc = data.ReadUInt16();
header.InitialCSIPSetting = data.ReadUInt32();
header.InitialSSSPSetting = data.ReadUInt32();
header.FileSegmentCount = data.ReadUInt16();
header.ModuleReferenceTableSize = data.ReadUInt16();
header.NonResidentNameTableSize = data.ReadUInt16();
header.SegmentTableOffset = data.ReadUInt16();
header.ResourceTableOffset = data.ReadUInt16();
header.ResidentNameTableOffset = data.ReadUInt16();
header.ModuleReferenceTableOffset = data.ReadUInt16();
header.ImportedNamesTableOffset = data.ReadUInt16();
header.NonResidentNamesTableOffset = data.ReadUInt32();
header.MovableEntriesCount = data.ReadUInt16();
header.SegmentAlignmentShiftCount = data.ReadUInt16();
header.ResourceEntriesCount = data.ReadUInt16();
header.TargetOperatingSystem = (OperatingSystem)data.ReadByteValue();
header.AdditionalFlags = (OS2Flag)data.ReadByteValue();
header.ReturnThunkOffset = data.ReadUInt16();
header.SegmentReferenceThunkOffset = data.ReadUInt16();
header.MinCodeSwapAreaSize = data.ReadUInt16();
header.WindowsSDKRevision = data.ReadByteValue();
header.WindowsSDKVersion = data.ReadByteValue();
return header;
}
/// <summary>
/// Parse a Stream into a segment table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of segment table entries to read</param>
/// <returns>Filled segment table on success, null on error</returns>
private static SegmentTableEntry[] ParseSegmentTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var segmentTable = new SegmentTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new SegmentTableEntry();
entry.Offset = data.ReadUInt16();
entry.Length = data.ReadUInt16();
entry.FlagWord = (SegmentTableEntryFlag)data.ReadUInt16();
entry.MinimumAllocationSize = data.ReadUInt16();
segmentTable[i] = entry;
}
return segmentTable;
}
/// <summary>
/// Parse a Stream into a resource table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of resource table entries to read</param>
/// <returns>Filled resource table on success, null on error</returns>
private static ResourceTable ParseResourceTable(Stream data, int count)
{
long initialOffset = data.Position;
// TODO: Use marshalling here instead of building
var resourceTable = new ResourceTable();
resourceTable.AlignmentShiftCount = data.ReadUInt16();
resourceTable.ResourceTypes = new ResourceTypeInformationEntry[count];
for (int i = 0; i < resourceTable.ResourceTypes.Length; i++)
{
var entry = new ResourceTypeInformationEntry();
entry.TypeID = data.ReadUInt16();
entry.ResourceCount = data.ReadUInt16();
entry.Reserved = data.ReadUInt32();
entry.Resources = new ResourceTypeResourceEntry[entry.ResourceCount];
for (int j = 0; j < entry.ResourceCount; j++)
{
// TODO: Should we read and store the resource data?
var resource = new ResourceTypeResourceEntry();
resource.Offset = data.ReadUInt16();
resource.Length = data.ReadUInt16();
resource.FlagWord = (ResourceTypeResourceFlag)data.ReadUInt16();
resource.ResourceID = data.ReadUInt16();
resource.Reserved = data.ReadUInt32();
entry.Resources[j] = resource;
}
resourceTable.ResourceTypes[i] = entry;
}
// Get the full list of unique string offsets
var stringOffsets = resourceTable.ResourceTypes
.Where(rt => rt.IsIntegerType() == false)
.Select(rt => rt.TypeID)
.Union(resourceTable.ResourceTypes
.SelectMany(rt => rt.Resources)
.Where(r => r.IsIntegerType() == false)
.Select(r => r.ResourceID))
.Distinct()
.OrderBy(o => o)
.ToList();
// Populate the type and name string dictionary
resourceTable.TypeAndNameStrings = new Dictionary<ushort, ResourceTypeAndNameString>();
for (int i = 0; i < stringOffsets.Count; i++)
{
int stringOffset = (int)(stringOffsets[i] + initialOffset);
data.Seek(stringOffset, SeekOrigin.Begin);
var str = new ResourceTypeAndNameString();
str.Length = data.ReadByteValue();
str.Text = data.ReadBytes(str.Length);
resourceTable.TypeAndNameStrings[stringOffsets[i]] = str;
}
return resourceTable;
}
/// <summary>
/// Parse a Stream into a resident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the resident-name table</param>
/// <returns>Filled resident-name table on success, null on error</returns>
private static ResidentNameTableEntry[] ParseResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<ResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new ResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
residentNameTable.Add(entry);
}
return residentNameTable.ToArray();
}
/// <summary>
/// Parse a Stream into a module-reference table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of module-reference table entries to read</param>
/// <returns>Filled module-reference table on success, null on error</returns>
private static ModuleReferenceTableEntry[] ParseModuleReferenceTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var moduleReferenceTable = new ModuleReferenceTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new ModuleReferenceTableEntry();
entry.Offset = data.ReadUInt16();
moduleReferenceTable[i] = entry;
}
return moduleReferenceTable;
}
/// <summary>
/// Parse a Stream into an imported-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the imported-name table</param>
/// <returns>Filled imported-name table on success, null on error</returns>
private static Dictionary<ushort, ImportedNameTableEntry> ParseImportedNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry>();
while (data.Position < endOffset)
{
ushort currentOffset = (ushort)data.Position;
var entry = new ImportedNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
importedNameTable[currentOffset] = entry;
}
return importedNameTable;
}
/// <summary>
/// Parse a Stream into an entry table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the entry table</param>
/// <returns>Filled entry table on success, null on error</returns>
private static EntryTableBundle[] ParseEntryTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var entryTable = new List<EntryTableBundle>();
while (data.Position < endOffset)
{
var entry = new EntryTableBundle();
entry.EntryCount = data.ReadByteValue();
entry.SegmentIndicator = data.ReadByteValue();
switch (entry.GetEntryType())
{
case SegmentEntryType.Unused:
break;
case SegmentEntryType.FixedSegment:
entry.FixedFlagWord = (FixedSegmentEntryFlag)data.ReadByteValue();
entry.FixedOffset = data.ReadUInt16();
break;
case SegmentEntryType.MoveableSegment:
entry.MoveableFlagWord = (MoveableSegmentEntryFlag)data.ReadByteValue();
entry.MoveableReserved = data.ReadUInt16();
entry.MoveableSegmentNumber = data.ReadByteValue();
entry.MoveableOffset = data.ReadUInt16();
break;
}
entryTable.Add(entry);
}
return entryTable.ToArray();
}
/// <summary>
/// Parse a Stream into a nonresident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the nonresident-name table</param>
/// <returns>Filled nonresident-name table on success, null on error</returns>
private static NonResidentNameTableEntry[] ParseNonResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<NonResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new NonResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
residentNameTable.Add(entry);
}
return residentNameTable.ToArray();
}
#endregion
}
}

View File

@@ -0,0 +1,393 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.Nitro;
using BurnOutSharp.Utilities;
namespace BurnOutSharp.Builders
{
public class Nitro
{
#region Byte Data
/// <summary>
/// Parse a byte array into a NDS cart image
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled cart image on success, null on error</returns>
public static Cart ParseCart(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseCart(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a NDS cart image
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled cart image on success, null on error</returns>
public static Cart ParseCart(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new cart image to fill
var cart = new Cart();
#region Header
// Try to parse the header
var header = ParseCommonHeader(data);
if (header == null)
return null;
// Set the cart image header
cart.CommonHeader = header;
#endregion
#region Extended DSi Header
// If we have a DSi-compatible cartridge
if (header.UnitCode == Unitcode.NDSPlusDSi || header.UnitCode == Unitcode.DSi)
{
var extendedDSiHeader = ParseExtendedDSiHeader(data);
if (extendedDSiHeader == null)
return null;
cart.ExtendedDSiHeader = extendedDSiHeader;
}
#endregion
#region Secure Area
// Try to get the secure area offset
long secureAreaOffset = 0x4000;
if (secureAreaOffset > data.Length)
return null;
// Seek to the secure area
data.Seek(secureAreaOffset, SeekOrigin.Begin);
// Read the secure area without processing
cart.SecureArea = data.ReadBytes(0x800);
#endregion
#region Name Table
// Try to get the name table offset
long nameTableOffset = header.FileNameTableOffset;
if (nameTableOffset < 0 || nameTableOffset > data.Length)
return null;
// Seek to the name table
data.Seek(nameTableOffset, SeekOrigin.Begin);
// Try to parse the name table
var nameTable = ParseNameTable(data);
if (nameTable == null)
return null;
// Set the name table
cart.NameTable = nameTable;
#endregion
#region File Allocation Table
// Try to get the file allocation table offset
long fileAllocationTableOffset = header.FileAllocationTableOffset;
if (fileAllocationTableOffset < 0 || fileAllocationTableOffset > data.Length)
return null;
// Seek to the file allocation table
data.Seek(fileAllocationTableOffset, SeekOrigin.Begin);
// Create the file allocation table
var fileAllocationTable = new List<FileAllocationTableEntry>();
// Try to parse the file allocation table
while (data.Position - fileAllocationTableOffset < header.FileAllocationTableLength)
{
var entry = ParseFileAllocationTableEntry(data);
fileAllocationTable.Add(entry);
}
// Set the file allocation table
cart.FileAllocationTable = fileAllocationTable.ToArray();
#endregion
// TODO: Read and optionally parse out the other areas
// Look for offsets and lengths in the header pieces
return cart;
}
/// <summary>
/// Parse a Stream into a common header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled common header on success, null on error</returns>
private static CommonHeader ParseCommonHeader(Stream data)
{
// TODO: Use marshalling here instead of building
CommonHeader commonHeader = new CommonHeader();
byte[] gameTitle = data.ReadBytes(12);
commonHeader.GameTitle = Encoding.ASCII.GetString(gameTitle).TrimEnd('\0');
commonHeader.GameCode = data.ReadUInt32();
byte[] makerCode = data.ReadBytes(2);
commonHeader.MakerCode = Encoding.ASCII.GetString(bytes: makerCode).TrimEnd('\0');
commonHeader.UnitCode = (Unitcode)data.ReadByteValue();
commonHeader.EncryptionSeedSelect = data.ReadByteValue();
commonHeader.DeviceCapacity = data.ReadByteValue();
commonHeader.Reserved1 = data.ReadBytes(7);
commonHeader.GameRevision = data.ReadUInt16();
commonHeader.RomVersion = data.ReadByteValue();
commonHeader.InternalFlags = data.ReadByteValue();
commonHeader.ARM9RomOffset = data.ReadUInt32();
commonHeader.ARM9EntryAddress = data.ReadUInt32();
commonHeader.ARM9LoadAddress = data.ReadUInt32();
commonHeader.ARM9Size = data.ReadUInt32();
commonHeader.ARM7RomOffset = data.ReadUInt32();
commonHeader.ARM7EntryAddress = data.ReadUInt32();
commonHeader.ARM7LoadAddress = data.ReadUInt32();
commonHeader.ARM7Size = data.ReadUInt32();
commonHeader.FileNameTableOffset = data.ReadUInt32();
commonHeader.FileNameTableLength = data.ReadUInt32();
commonHeader.FileAllocationTableOffset = data.ReadUInt32();
commonHeader.FileAllocationTableLength = data.ReadUInt32();
commonHeader.ARM9OverlayOffset = data.ReadUInt32();
commonHeader.ARM9OverlayLength = data.ReadUInt32();
commonHeader.ARM7OverlayOffset = data.ReadUInt32();
commonHeader.ARM7OverlayLength = data.ReadUInt32();
commonHeader.NormalCardControlRegisterSettings = data.ReadUInt32();
commonHeader.SecureCardControlRegisterSettings = data.ReadUInt32();
commonHeader.IconBannerOffset = data.ReadUInt32();
commonHeader.SecureAreaCRC = data.ReadUInt16();
commonHeader.SecureTransferTimeout = data.ReadUInt16();
commonHeader.ARM9Autoload = data.ReadUInt32();
commonHeader.ARM7Autoload = data.ReadUInt32();
commonHeader.SecureDisable = data.ReadBytes(8);
commonHeader.NTRRegionRomSize = data.ReadUInt32();
commonHeader.HeaderSize = data.ReadUInt32();
commonHeader.Reserved2 = data.ReadBytes(56);
commonHeader.NintendoLogo = data.ReadBytes(156);
commonHeader.NintendoLogoCRC = data.ReadUInt16();
commonHeader.HeaderCRC = data.ReadUInt16();
commonHeader.DebuggerReserved = data.ReadBytes(0x20);
return commonHeader;
}
/// <summary>
/// Parse a Stream into an extended DSi header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled extended DSi header on success, null on error</returns>
private static ExtendedDSiHeader ParseExtendedDSiHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ExtendedDSiHeader extendedDSiHeader = new ExtendedDSiHeader();
extendedDSiHeader.GlobalMBK15Settings = new uint[5];
for (int i = 0; i < 5; i++)
{
extendedDSiHeader.GlobalMBK15Settings[i] = data.ReadUInt32();
}
extendedDSiHeader.LocalMBK68SettingsARM9 = new uint[3];
for (int i = 0; i < 3; i++)
{
extendedDSiHeader.LocalMBK68SettingsARM9[i] = data.ReadUInt32();
}
extendedDSiHeader.LocalMBK68SettingsARM7 = new uint[3];
for (int i = 0; i < 3; i++)
{
extendedDSiHeader.LocalMBK68SettingsARM7[i] = data.ReadUInt32();
}
extendedDSiHeader.GlobalMBK9Setting = data.ReadUInt32();
extendedDSiHeader.RegionFlags = data.ReadUInt32();
extendedDSiHeader.AccessControl = data.ReadUInt32();
extendedDSiHeader.ARM7SCFGEXTMask = data.ReadUInt32();
extendedDSiHeader.ReservedFlags = data.ReadUInt32();
extendedDSiHeader.ARM9iRomOffset = data.ReadUInt32();
extendedDSiHeader.Reserved3 = data.ReadUInt32();
extendedDSiHeader.ARM9iLoadAddress = data.ReadUInt32();
extendedDSiHeader.ARM9iSize = data.ReadUInt32();
extendedDSiHeader.ARM7iRomOffset = data.ReadUInt32();
extendedDSiHeader.Reserved4 = data.ReadUInt32();
extendedDSiHeader.ARM7iLoadAddress = data.ReadUInt32();
extendedDSiHeader.ARM7iSize = data.ReadUInt32();
extendedDSiHeader.DigestNTRRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestNTRRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestTWLRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestTWLRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestSectorHashtableRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestSectorHashtableRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestBlockHashtableRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestBlockHashtableRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestSectorSize = data.ReadUInt32();
extendedDSiHeader.DigestBlockSectorCount = data.ReadUInt32();
extendedDSiHeader.IconBannerSize = data.ReadUInt32();
extendedDSiHeader.Unknown1 = data.ReadUInt32();
extendedDSiHeader.ModcryptArea1Offset = data.ReadUInt32();
extendedDSiHeader.ModcryptArea1Size = data.ReadUInt32();
extendedDSiHeader.ModcryptArea2Offset = data.ReadUInt32();
extendedDSiHeader.ModcryptArea2Size = data.ReadUInt32();
extendedDSiHeader.TitleID = data.ReadBytes(8);
extendedDSiHeader.DSiWarePublicSavSize = data.ReadUInt32();
extendedDSiHeader.DSiWarePrivateSavSize = data.ReadUInt32();
extendedDSiHeader.ReservedZero = data.ReadBytes(176);
extendedDSiHeader.Unknown2 = data.ReadBytes(0x10);
extendedDSiHeader.ARM9WithSecureAreaSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.ARM7SHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.DigestMasterSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.BannerSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.ARM9iDecryptedSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.ARM7iDecryptedSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.Reserved5 = data.ReadBytes(40);
extendedDSiHeader.ARM9NoSecureAreaSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.Reserved6 = data.ReadBytes(2636);
extendedDSiHeader.ReservedAndUnchecked = data.ReadBytes(0x180);
extendedDSiHeader.RSASignature = data.ReadBytes(0x80);
return extendedDSiHeader;
}
/// <summary>
/// Parse a Stream into a name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name table on success, null on error</returns>
private static NameTable ParseNameTable(Stream data)
{
// TODO: Use marshalling here instead of building
NameTable nameTable = new NameTable();
// Create a variable-length table
var folderAllocationTable = new List<FolderAllocationTableEntry>();
int entryCount = int.MaxValue;
while (entryCount > 0)
{
var entry = ParseFolderAllocationTableEntry(data);
folderAllocationTable.Add(entry);
// If we have the root entry
if (entryCount == int.MaxValue)
entryCount = (entry.Unknown << 8) | entry.ParentFolderIndex;
// Decrement the entry count
entryCount--;
}
// Assign the folder allocation table
nameTable.FolderAllocationTable = folderAllocationTable.ToArray();
// Create a variable-length table
var nameList = new List<NameListEntry>();
while (true)
{
var entry = ParseNameListEntry(data);
if (entry == null)
break;
nameList.Add(entry);
}
// Assign the name list
nameTable.NameList = nameList.ToArray();
return nameTable;
}
/// <summary>
/// Parse a Stream into a folder allocation table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled folder allocation table entry on success, null on error</returns>
private static FolderAllocationTableEntry ParseFolderAllocationTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
FolderAllocationTableEntry entry = new FolderAllocationTableEntry();
entry.StartOffset = data.ReadUInt32();
entry.FirstFileIndex = data.ReadUInt16();
entry.ParentFolderIndex = data.ReadByteValue();
entry.Unknown = data.ReadByteValue();
return entry;
}
/// <summary>
/// Parse a Stream into a name list entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name list entry on success, null on error</returns>
private static NameListEntry ParseNameListEntry(Stream data)
{
// TODO: Use marshalling here instead of building
NameListEntry entry = new NameListEntry();
byte flagAndSize = data.ReadByteValue();
if (flagAndSize == 0xFF)
return null;
entry.Folder = (flagAndSize & 0x80) != 0;
byte size = (byte)(flagAndSize & ~0x80);
if (size > 0)
{
byte[] name = data.ReadBytes(size);
entry.Name = Encoding.UTF8.GetString(name);
}
if (entry.Folder)
entry.Index = data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into a name list entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name list entry on success, null on error</returns>
private static FileAllocationTableEntry ParseFileAllocationTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
FileAllocationTableEntry entry = new FileAllocationTableEntry();
entry.StartOffset = data.ReadUInt32();
entry.EndOffset = data.ReadUInt32();
return entry;
}
#endregion
}
}

View File

@@ -0,0 +1,137 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.PAK;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.PAK.Constants;
namespace BurnOutSharp.Builders
{
public static class PAK
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Package
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Package on success, null on error</returns>
public static Models.PAK.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Package
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Package on success, null on error</returns>
public static Models.PAK.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life Package to fill
var file = new Models.PAK.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
#region Directory Items
// Get the directory items offset
uint directoryItemsOffset = header.DirectoryOffset;
if (directoryItemsOffset < 0 || directoryItemsOffset >= data.Length)
return null;
// Seek to the directory items
data.Seek(directoryItemsOffset, SeekOrigin.Begin);
// Create the directory item array
file.DirectoryItems = new DirectoryItem[header.DirectoryLength / 64];
// Try to parse the directory items
for (int i = 0; i < file.DirectoryItems.Length; i++)
{
var directoryItem = ParseDirectoryItem(data);
file.DirectoryItems[i] = directoryItem;
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Package header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Package header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.DirectoryOffset = data.ReadUInt32();
header.DirectoryLength = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life Package directory item
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Package directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryItem directoryItem = new DirectoryItem();
byte[] itemName = data.ReadBytes(56);
directoryItem.ItemName = Encoding.ASCII.GetString(itemName).TrimEnd('\0');
directoryItem.ItemOffset = data.ReadUInt32();
directoryItem.ItemLength = data.ReadUInt32();
return directoryItem;
}
#endregion
}
}

View File

@@ -0,0 +1,211 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.PFF;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.PFF.Constants;
namespace BurnOutSharp.Builders
{
public class PFF
{
#region Byte Data
/// <summary>
/// Parse a byte array into a PFF archive
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseArchive(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a PFF archive
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the archive header
archive.Header = header;
#endregion
#region Segments
// Get the segments
long offset = header.FileListOffset;
if (offset < 0 || offset >= data.Length)
return null;
// Seek to the segments
data.Seek(offset, SeekOrigin.Begin);
// Create the segments array
archive.Segments = new Segment[header.NumberOfFiles];
// Read all segments in turn
for (int i = 0; i < header.NumberOfFiles; i++)
{
var file = ParseSegment(data, header.FileSegmentSize);
if (file == null)
return null;
archive.Segments[i] = file;
}
#endregion
#region Footer
// Get the footer offset
offset = header.FileListOffset + (header.FileSegmentSize * header.NumberOfFiles);
if (offset < 0 || offset >= data.Length)
return null;
// Seek to the footer
data.Seek(offset, SeekOrigin.Begin);
// Try to parse the footer
var footer = ParseFooter(data);
if (footer == null)
return null;
// Set the archive footer
archive.Footer = footer;
#endregion
return archive;
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.HeaderSize = data.ReadUInt32();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
header.NumberOfFiles = data.ReadUInt32();
header.FileSegmentSize = data.ReadUInt32();
switch (header.Signature)
{
case Version0SignatureString:
if (header.FileSegmentSize != Version0HSegmentSize)
return null;
break;
case Version2SignatureString:
if (header.FileSegmentSize != Version2SegmentSize)
return null;
break;
// Version 3 can sometimes have Version 2 segment sizes
case Version3SignatureString:
if (header.FileSegmentSize != Version2SegmentSize && header.FileSegmentSize != Version3SegmentSize)
return null;
break;
case Version4SignatureString:
if (header.FileSegmentSize != Version4SegmentSize)
return null;
break;
default:
return null;
}
header.FileListOffset = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a footer
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled footer on success, null on error</returns>
private static Footer ParseFooter(Stream data)
{
// TODO: Use marshalling here instead of building
Footer footer = new Footer();
footer.SystemIP = data.ReadUInt32();
footer.Reserved = data.ReadUInt32();
byte[] kingTag = data.ReadBytes(4);
footer.KingTag = Encoding.ASCII.GetString(kingTag);
return footer;
}
/// <summary>
/// Parse a Stream into a file entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="segmentSize">PFF segment size</param>
/// <returns>Filled file entry on success, null on error</returns>
private static Segment ParseSegment(Stream data, uint segmentSize)
{
// TODO: Use marshalling here instead of building
Segment segment = new Segment();
segment.Deleted = data.ReadUInt32();
segment.FileLocation = data.ReadUInt32();
segment.FileSize = data.ReadUInt32();
segment.PackedDate = data.ReadUInt32();
byte[] fileName = data.ReadBytes(0x10);
segment.FileName = Encoding.ASCII.GetString(fileName).TrimEnd('\0');
if (segmentSize > Version2SegmentSize)
segment.ModifiedDate = data.ReadUInt32();
if (segmentSize > Version3SegmentSize)
segment.CompressionLevel = data.ReadUInt32();
return segment;
}
#endregion
}
}

View File

@@ -0,0 +1,463 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.PlayJ;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.PlayJ.Constants;
namespace BurnOutSharp.Builders
{
public class PlayJ
{
#region Byte Data
/// <summary>
/// Parse a byte array into a PlayJ playlist
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled playlist on success, null on error</returns>
public static Playlist ParsePlaylist(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParsePlaylist(dataStream);
}
/// <summary>
/// Parse a byte array into a PlayJ audio file
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled audio file on success, null on error</returns>
public static AudioFile ParseAudioFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseAudioFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a PlayJ playlist
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled playlist on success, null on error</returns>
public static Playlist ParsePlaylist(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new playlist to fill
var playlist = new Playlist();
#region Playlist Header
// Try to parse the playlist header
var playlistHeader = ParsePlaylistHeader(data);
if (playlistHeader == null)
return null;
// Set the playlist header
playlist.Header = playlistHeader;
#endregion
#region Audio Files
// Create the audio files array
playlist.AudioFiles = new AudioFile[playlistHeader.TrackCount];
// Try to parse the audio files
for (int i = 0; i < playlist.AudioFiles.Length; i++)
{
long currentOffset = data.Position;
var entryHeader = ParseAudioFile(data, currentOffset);
if (entryHeader == null)
return null;
playlist.AudioFiles[i] = entryHeader;
}
#endregion
return playlist;
}
/// <summary>
/// Parse a Stream into a PlayJ audio file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="adjust">Offset to adjust all seeking by</param>
/// <returns>Filled audio file on success, null on error</returns>
public static AudioFile ParseAudioFile(Stream data, long adjust = 0)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new audio file to fill
var audioFile = new AudioFile();
#region Audio Header
// Try to parse the audio header
var audioHeader = ParseAudioHeader(data);
if (audioHeader == null)
return null;
// Set the audio header
audioFile.Header = audioHeader;
#endregion
#region Unknown Block 1
uint unknownOffset1 = (audioHeader.Version == 0x00000000)
? (audioHeader as AudioHeaderV1).UnknownOffset1
: (audioHeader as AudioHeaderV2).UnknownOffset1 + 0x54;
// If we have an unknown block 1 offset
if (unknownOffset1 > 0)
{
// Get the unknown block 1 offset
long offset = unknownOffset1 + adjust;
if (offset < 0 || offset >= data.Length)
return null;
// Seek to the unknown block 1
data.Seek(offset, SeekOrigin.Begin);
}
// Try to parse the unknown block 1
var unknownBlock1 = ParseUnknownBlock1(data);
if (unknownBlock1 == null)
return null;
// Set the unknown block 1
audioFile.UnknownBlock1 = unknownBlock1;
#endregion
#region V1 Only
// If we have a V1 file
if (audioHeader.Version == 0x00000000)
{
#region Unknown Value 2
// Get the V1 unknown offset 2
uint? unknownOffset2 = (audioHeader as AudioHeaderV1)?.UnknownOffset2;
// If we have an unknown value 2 offset
if (unknownOffset2 != null && unknownOffset2 > 0)
{
// Get the unknown value 2 offset
long offset = unknownOffset2.Value + adjust;
if (offset < 0 || offset >= data.Length)
return null;
// Seek to the unknown value 2
data.Seek(offset, SeekOrigin.Begin);
}
// Set the unknown value 2
audioFile.UnknownValue2 = data.ReadUInt32();
#endregion
#region Unknown Block 3
// Get the V1 unknown offset 3
uint? unknownOffset3 = (audioHeader as AudioHeaderV1)?.UnknownOffset3;
// If we have an unknown block 3 offset
if (unknownOffset3 != null && unknownOffset3 > 0)
{
// Get the unknown block 3 offset
long offset = unknownOffset3.Value + adjust;
if (offset < 0 || offset >= data.Length)
return null;
// Seek to the unknown block 3
data.Seek(offset, SeekOrigin.Begin);
}
// Try to parse the unknown block 3
var unknownBlock3 = ParseUnknownBlock3(data);
if (unknownBlock3 == null)
return null;
// Set the unknown block 3
audioFile.UnknownBlock3 = unknownBlock3;
#endregion
}
#endregion
#region V2 Only
// If we have a V2 file
if (audioHeader.Version == 0x0000000A)
{
#region Data Files Count
// Set the data files count
audioFile.DataFilesCount = data.ReadUInt32();
#endregion
#region Data Files
// Create the data files array
audioFile.DataFiles = new DataFile[audioFile.DataFilesCount];
// Try to parse the data files
for (int i = 0; i < audioFile.DataFiles.Length; i++)
{
var dataFile = ParseDataFile(data);
if (dataFile == null)
return null;
audioFile.DataFiles[i] = dataFile;
}
#endregion
}
#endregion
return audioFile;
}
/// <summary>
/// Parse a Stream into a playlist header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled playlist header on success, null on error</returns>
private static PlaylistHeader ParsePlaylistHeader(Stream data)
{
// TODO: Use marshalling here instead of building
PlaylistHeader playlistHeader = new PlaylistHeader();
playlistHeader.TrackCount = data.ReadUInt32();
playlistHeader.Data = data.ReadBytes(52);
return playlistHeader;
}
/// <summary>
/// Parse a Stream into an audio header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled audio header on success, null on error</returns>
private static AudioHeader ParseAudioHeader(Stream data)
{
// Cache the current offset
long initialOffset = data.Position;
// TODO: Use marshalling here instead of building
AudioHeader audioHeader;
// Get the common header pieces
uint signature = data.ReadUInt32();
if (signature != SignatureUInt32)
return null;
uint version = data.ReadUInt32();
// Build the header according to version
uint unknownOffset1;
switch (version)
{
// Version 1
case 0x00000000:
AudioHeaderV1 v1 = new AudioHeaderV1();
v1.Signature = signature;
v1.Version = version;
v1.TrackID = data.ReadUInt32();
v1.UnknownOffset1 = data.ReadUInt32();
v1.UnknownOffset2 = data.ReadUInt32();
v1.UnknownOffset3 = data.ReadUInt32();
v1.Unknown1 = data.ReadUInt32();
v1.Unknown2 = data.ReadUInt32();
v1.Year = data.ReadUInt32();
v1.TrackNumber = data.ReadByteValue();
v1.Subgenre = (Subgenre)data.ReadByteValue();
v1.Duration = data.ReadUInt32();
audioHeader = v1;
unknownOffset1 = v1.UnknownOffset1;
break;
// Version 2
case 0x0000000A:
AudioHeaderV2 v2 = new AudioHeaderV2();
v2.Signature = signature;
v2.Version = version;
v2.Unknown1 = data.ReadUInt32();
v2.Unknown2 = data.ReadUInt32();
v2.Unknown3 = data.ReadUInt32();
v2.Unknown4 = data.ReadUInt32();
v2.Unknown5 = data.ReadUInt32();
v2.Unknown6 = data.ReadUInt32();
v2.UnknownOffset1 = data.ReadUInt32();
v2.Unknown7 = data.ReadUInt32();
v2.Unknown8 = data.ReadUInt32();
v2.Unknown9 = data.ReadUInt32();
v2.UnknownOffset2 = data.ReadUInt32();
v2.Unknown10 = data.ReadUInt32();
v2.Unknown11 = data.ReadUInt32();
v2.Unknown12 = data.ReadUInt32();
v2.Unknown13 = data.ReadUInt32();
v2.Unknown14 = data.ReadUInt32();
v2.Unknown15 = data.ReadUInt32();
v2.Unknown16 = data.ReadUInt32();
v2.Unknown17 = data.ReadUInt32();
v2.TrackID = data.ReadUInt32();
v2.Year = data.ReadUInt32();
v2.TrackNumber = data.ReadUInt32();
v2.Unknown18 = data.ReadUInt32();
audioHeader = v2;
unknownOffset1 = v2.UnknownOffset1 + 0x54;
break;
// No other version are recognized
default:
return null;
}
audioHeader.TrackLength = data.ReadUInt16();
byte[] track = data.ReadBytes(audioHeader.TrackLength);
if (track != null)
audioHeader.Track = Encoding.ASCII.GetString(track);
audioHeader.ArtistLength = data.ReadUInt16();
byte[] artist = data.ReadBytes(audioHeader.ArtistLength);
if (artist != null)
audioHeader.Artist = Encoding.ASCII.GetString(artist);
audioHeader.AlbumLength = data.ReadUInt16();
byte[] album = data.ReadBytes(audioHeader.AlbumLength);
if (album != null)
audioHeader.Album = Encoding.ASCII.GetString(album);
audioHeader.WriterLength = data.ReadUInt16();
byte[] writer = data.ReadBytes(audioHeader.WriterLength);
if (writer != null)
audioHeader.Writer = Encoding.ASCII.GetString(writer);
audioHeader.PublisherLength = data.ReadUInt16();
byte[] publisher = data.ReadBytes(audioHeader.PublisherLength);
if (publisher != null)
audioHeader.Publisher = Encoding.ASCII.GetString(publisher);
audioHeader.LabelLength = data.ReadUInt16();
byte[] label = data.ReadBytes(audioHeader.LabelLength);
if (label != null)
audioHeader.Label = Encoding.ASCII.GetString(label);
if (data.Position - initialOffset < unknownOffset1)
{
audioHeader.CommentsLength = data.ReadUInt16();
byte[] comments = data.ReadBytes(audioHeader.CommentsLength);
if (comments != null)
audioHeader.Comments = Encoding.ASCII.GetString(comments);
}
return audioHeader;
}
/// <summary>
/// Parse a Stream into an unknown block 1
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled unknown block 1 on success, null on error</returns>
private static UnknownBlock1 ParseUnknownBlock1(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownBlock1 unknownBlock1 = new UnknownBlock1();
unknownBlock1.Length = data.ReadUInt32();
unknownBlock1.Data = data.ReadBytes((int)unknownBlock1.Length);
return unknownBlock1;
}
/// <summary>
/// Parse a Stream into an unknown block 3
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled unknown block 3 on success, null on error</returns>
private static UnknownBlock3 ParseUnknownBlock3(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownBlock3 unknownBlock3 = new UnknownBlock3();
// No-op because we don't even know the length
return unknownBlock3;
}
/// <summary>
/// Parse a Stream into a data file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled data file on success, null on error</returns>
private static DataFile ParseDataFile(Stream data)
{
// TODO: Use marshalling here instead of building
DataFile dataFile = new DataFile();
dataFile.FileNameLength = data.ReadUInt16();
byte[] fileName = data.ReadBytes(dataFile.FileNameLength);
if (fileName != null)
dataFile.FileName = Encoding.ASCII.GetString(fileName);
dataFile.DataLength = data.ReadUInt32();
dataFile.Data = data.ReadBytes((int)dataFile.DataLength);
return dataFile;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,184 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.Quantum;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.Quantum.Constants;
namespace BurnOutSharp.Builders
{
public class Quantum
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Quantum archive
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseArchive(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Quantum archive
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive on success, null on error</returns>
public static Archive ParseArchive(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the archive header
archive.Header = header;
#endregion
#region File List
// If we have any files
if (header.FileCount > 0)
{
var fileDescriptors = new FileDescriptor[header.FileCount];
// Read all entries in turn
for (int i = 0; i < header.FileCount; i++)
{
var file = ParseFileDescriptor(data, header.MinorVersion);
if (file == null)
return null;
fileDescriptors[i] = file;
}
// Set the file list
archive.FileList = fileDescriptors;
}
#endregion
// Cache the compressed data offset
archive.CompressedDataOffset = data.Position;
return archive;
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(2);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.MajorVersion = data.ReadByteValue();
header.MinorVersion = data.ReadByteValue();
header.FileCount = data.ReadUInt16();
header.TableSize = data.ReadByteValue();
header.CompressionFlags = data.ReadByteValue();
return header;
}
/// <summary>
/// Parse a Stream into a file descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="minorVersion">Minor version of the archive</param>
/// <returns>Filled file descriptor on success, null on error</returns>
private static FileDescriptor ParseFileDescriptor(Stream data, byte minorVersion)
{
// TODO: Use marshalling here instead of building
FileDescriptor fileDescriptor = new FileDescriptor();
fileDescriptor.FileNameSize = ReadVariableLength(data);
if (fileDescriptor.FileNameSize > 0)
{
byte[] fileName = data.ReadBytes(fileDescriptor.FileNameSize);
fileDescriptor.FileName = Encoding.ASCII.GetString(fileName);
}
fileDescriptor.CommentFieldSize = ReadVariableLength(data);
if (fileDescriptor.CommentFieldSize > 0)
{
byte[] commentField = data.ReadBytes(fileDescriptor.CommentFieldSize);
fileDescriptor.CommentField = Encoding.ASCII.GetString(commentField);
}
fileDescriptor.ExpandedFileSize = data.ReadUInt32();
fileDescriptor.FileTime = data.ReadUInt16();
fileDescriptor.FileDate = data.ReadUInt16();
// Hack for unknown format data
if (minorVersion == 22)
fileDescriptor.Unknown = data.ReadUInt16();
return fileDescriptor;
}
/// <summary>
/// Parse a Stream into a variable-length size prefix
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Variable-length size prefix</returns>
/// <remarks>
/// Strings are prefixed with their length. If the length is less than 128
/// then it is stored directly in one byte. If it is greater than 127 then
/// the high bit of the first byte is set to 1 and the remaining fifteen bits
/// contain the actual length in big-endian format.
/// </remarks>
private static int ReadVariableLength(Stream data)
{
byte b0 = data.ReadByteValue();
if (b0 < 0x7F)
return b0;
b0 &= 0x7F;
byte b1 = data.ReadByteValue();
return (b0 << 8) | b1;
}
#endregion
}
}

View File

@@ -0,0 +1,732 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.SGA;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.SGA.Constants;
namespace BurnOutSharp.Builders
{
public static class SGA
{
#region Byte Data
/// <summary>
/// Parse a byte array into an SGA
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled SGA on success, null on error</returns>
public static Models.SGA.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into an SGA
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA on success, null on error</returns>
public static Models.SGA.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new SGA to fill
var file = new Models.SGA.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the SGA header
file.Header = header;
#endregion
#region Directory
// Try to parse the directory
var directory = ParseDirectory(data, header.MajorVersion);
if (directory == null)
return null;
// Set the SGA directory
file.Directory = directory;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into an SGA header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
byte[] signatureBytes = data.ReadBytes(8);
string signature = Encoding.ASCII.GetString(signatureBytes);
if (signature != SignatureString)
return null;
ushort majorVersion = data.ReadUInt16();
ushort minorVersion = data.ReadUInt16();
if (minorVersion != 0)
return null;
switch (majorVersion)
{
// Versions 4 and 5 share the same header
case 4:
case 5:
Header4 header4 = new Header4();
header4.Signature = signature;
header4.MajorVersion = majorVersion;
header4.MinorVersion = minorVersion;
header4.FileMD5 = data.ReadBytes(0x10);
byte[] header4Name = data.ReadBytes(count: 128);
header4.Name = Encoding.Unicode.GetString(header4Name).TrimEnd('\0');
header4.HeaderMD5 = data.ReadBytes(0x10);
header4.HeaderLength = data.ReadUInt32();
header4.FileDataOffset = data.ReadUInt32();
header4.Dummy0 = data.ReadUInt32();
return header4;
// Versions 6 and 7 share the same header
case 6:
case 7:
Header6 header6 = new Header6();
header6.Signature = signature;
header6.MajorVersion = majorVersion;
header6.MinorVersion = minorVersion;
byte[] header6Name = data.ReadBytes(count: 128);
header6.Name = Encoding.Unicode.GetString(header6Name).TrimEnd('\0');
header6.HeaderLength = data.ReadUInt32();
header6.FileDataOffset = data.ReadUInt32();
header6.Dummy0 = data.ReadUInt32();
return header6;
// No other major versions are recognized
default:
return null;
}
}
/// <summary>
/// Parse a Stream into an SGA directory
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA directory on success, null on error</returns>
private static Models.SGA.Directory ParseDirectory(Stream data, ushort majorVersion)
{
#region Directory
// Create the appropriate type of directory
Models.SGA.Directory directory;
switch (majorVersion)
{
case 4: directory = new Directory4(); break;
case 5: directory = new Directory5(); break;
case 6: directory = new Directory6(); break;
case 7: directory = new Directory7(); break;
default: return null;
}
#endregion
// Cache the current offset
long currentOffset = data.Position;
#region Directory Header
// Try to parse the directory header
var directoryHeader = ParseDirectoryHeader(data, majorVersion);
if (directoryHeader == null)
return null;
// Set the directory header
switch (majorVersion)
{
case 4: (directory as Directory4).DirectoryHeader = directoryHeader as DirectoryHeader4; break;
case 5: (directory as Directory5).DirectoryHeader = directoryHeader as DirectoryHeader5; break;
case 6: (directory as Directory6).DirectoryHeader = directoryHeader as DirectoryHeader5; break;
case 7: (directory as Directory7).DirectoryHeader = directoryHeader as DirectoryHeader7; break;
default: return null;
}
#endregion
#region Sections
// Get the sections offset
long sectionOffset;
switch (majorVersion)
{
case 4: sectionOffset = (directoryHeader as DirectoryHeader4).SectionOffset; break;
case 5:
case 6: sectionOffset = (directoryHeader as DirectoryHeader5).SectionOffset; break;
case 7: sectionOffset = (directoryHeader as DirectoryHeader7).SectionOffset; break;
default: return null;
}
// Adjust the sections offset based on the directory
sectionOffset += currentOffset;
// Validate the offset
if (sectionOffset < 0 || sectionOffset >= data.Length)
return null;
// Seek to the sections
data.Seek(sectionOffset, SeekOrigin.Begin);
// Get the section count
uint sectionCount;
switch (majorVersion)
{
case 4: sectionCount = (directoryHeader as DirectoryHeader4).SectionCount; break;
case 5:
case 6: sectionCount = (directoryHeader as DirectoryHeader5).SectionCount; break;
case 7: sectionCount = (directoryHeader as DirectoryHeader7).SectionCount; break;
default: return null;
}
// Create the sections array
object[] sections;
switch (majorVersion)
{
case 4: sections = new Section4[sectionCount]; break;
case 5:
case 6:
case 7: sections = new Section5[sectionCount]; break;
default: return null;
}
// Try to parse the sections
for (int i = 0; i < sections.Length; i++)
{
switch (majorVersion)
{
case 4: sections[i] = ParseSection4(data); break;
case 5:
case 6:
case 7: sections[i] = ParseSection5(data); break;
default: return null;
}
}
// Assign the sections
switch (majorVersion)
{
case 4: (directory as Directory4).Sections = sections as Section4[]; break;
case 5: (directory as Directory5).Sections = sections as Section5[]; break;
case 6: (directory as Directory6).Sections = sections as Section5[]; break;
case 7: (directory as Directory7).Sections = sections as Section5[]; break;
default: return null;
}
#endregion
#region Folders
// Get the folders offset
long folderOffset;
switch (majorVersion)
{
case 4: folderOffset = (directoryHeader as DirectoryHeader4).FolderOffset; break;
case 5: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break;
case 6: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break;
case 7: folderOffset = (directoryHeader as DirectoryHeader7).FolderOffset; break;
default: return null;
}
// Adjust the folders offset based on the directory
folderOffset += currentOffset;
// Validate the offset
if (folderOffset < 0 || folderOffset >= data.Length)
return null;
// Seek to the folders
data.Seek(folderOffset, SeekOrigin.Begin);
// Get the folder count
uint folderCount;
switch (majorVersion)
{
case 4: folderCount = (directoryHeader as DirectoryHeader4).FolderCount; break;
case 5: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break;
case 6: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break;
case 7: folderCount = (directoryHeader as DirectoryHeader7).FolderCount; break;
default: return null;
}
// Create the folders array
object[] folders;
switch (majorVersion)
{
case 4: folders = new Folder4[folderCount]; break;
case 5: folders = new Folder5[folderCount]; break;
case 6: folders = new Folder5[folderCount]; break;
case 7: folders = new Folder5[folderCount]; break;
default: return null;
}
// Try to parse the folders
for (int i = 0; i < folders.Length; i++)
{
switch (majorVersion)
{
case 4: folders[i] = ParseFolder4(data); break;
case 5: folders[i] = ParseFolder5(data); break;
case 6: folders[i] = ParseFolder5(data); break;
case 7: folders[i] = ParseFolder5(data); break;
default: return null;
}
}
// Assign the folders
switch (majorVersion)
{
case 4: (directory as Directory4).Folders = folders as Folder4[]; break;
case 5: (directory as Directory5).Folders = folders as Folder5[]; break;
case 6: (directory as Directory6).Folders = folders as Folder5[]; break;
case 7: (directory as Directory7).Folders = folders as Folder5[]; break;
default: return null;
}
#endregion
#region Files
// Get the files offset
long fileOffset;
switch (majorVersion)
{
case 4: fileOffset = (directoryHeader as DirectoryHeader4).FileOffset; break;
case 5: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break;
case 6: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break;
case 7: fileOffset = (directoryHeader as DirectoryHeader7).FileOffset; break;
default: return null;
}
// Adjust the files offset based on the directory
fileOffset += currentOffset;
// Validate the offset
if (fileOffset < 0 || fileOffset >= data.Length)
return null;
// Seek to the files
data.Seek(fileOffset, SeekOrigin.Begin);
// Get the file count
uint fileCount;
switch (majorVersion)
{
case 4: fileCount = (directoryHeader as DirectoryHeader4).FileCount; break;
case 5: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break;
case 6: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break;
case 7: fileCount = (directoryHeader as DirectoryHeader7).FileCount; break;
default: return null;
}
// Create the files array
object[] files;
switch (majorVersion)
{
case 4: files = new File4[fileCount]; break;
case 5: files = new File4[fileCount]; break;
case 6: files = new File6[fileCount]; break;
case 7: files = new File7[fileCount]; break;
default: return null;
}
// Try to parse the files
for (int i = 0; i < files.Length; i++)
{
switch (majorVersion)
{
case 4: files[i] = ParseFile4(data); break;
case 5: files[i] = ParseFile4(data); break;
case 6: files[i] = ParseFile6(data); break;
case 7: files[i] = ParseFile7(data); break;
default: return null;
}
}
// Assign the files
switch (majorVersion)
{
case 4: (directory as Directory4).Files = files as File4[]; break;
case 5: (directory as Directory5).Files = files as File4[]; break;
case 6: (directory as Directory6).Files = files as File6[]; break;
case 7: (directory as Directory7).Files = files as File7[]; break;
default: return null;
}
#endregion
#region String Table
// Get the string table offset
long stringTableOffset;
switch (majorVersion)
{
case 4: stringTableOffset = (directoryHeader as DirectoryHeader4).StringTableOffset; break;
case 5: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break;
case 6: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break;
case 7: stringTableOffset = (directoryHeader as DirectoryHeader7).StringTableOffset; break;
default: return null;
}
// Adjust the string table offset based on the directory
stringTableOffset += currentOffset;
// Validate the offset
if (stringTableOffset < 0 || stringTableOffset >= data.Length)
return null;
// Seek to the string table
data.Seek(stringTableOffset, SeekOrigin.Begin);
// Get the string table count
uint stringCount;
switch (majorVersion)
{
case 4: stringCount = (directoryHeader as DirectoryHeader4).StringTableCount; break;
case 5: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break;
case 6: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break;
case 7: stringCount = (directoryHeader as DirectoryHeader7).StringTableCount; break;
default: return null;
}
// TODO: Are these strings actually indexed by number and not position?
// TODO: If indexed by position, I think it needs to be adjusted by start of table
// Create the strings dictionary
Dictionary<long, string> strings = new Dictionary<long, string>((int)stringCount);
// Get the current position to adjust the offsets
long stringTableStart = data.Position;
// Try to parse the strings
for (int i = 0; i < stringCount; i++)
{
long currentPosition = data.Position - stringTableStart;
strings[currentPosition] = data.ReadString(Encoding.ASCII);
}
// Assign the files
switch (majorVersion)
{
case 4: (directory as Directory4).StringTable = strings; break;
case 5: (directory as Directory5).StringTable = strings; break;
case 6: (directory as Directory6).StringTable = strings; break;
case 7: (directory as Directory7).StringTable = strings; break;
default: return null;
}
// Loop through all folders to assign names
for (int i = 0; i < folderCount; i++)
{
switch (majorVersion)
{
case 4: (directory as Directory4).Folders[i].Name = strings[(directory as Directory4).Folders[i].NameOffset]; break;
case 5: (directory as Directory5).Folders[i].Name = strings[(directory as Directory5).Folders[i].NameOffset]; break;
case 6: (directory as Directory6).Folders[i].Name = strings[(directory as Directory6).Folders[i].NameOffset]; break;
case 7: (directory as Directory7).Folders[i].Name = strings[(directory as Directory7).Folders[i].NameOffset]; break;
default: return null;
}
}
// Loop through all files to assign names
for (int i = 0; i < fileCount; i++)
{
switch (majorVersion)
{
case 4: (directory as Directory4).Files[i].Name = strings[(directory as Directory4).Files[i].NameOffset]; break;
case 5: (directory as Directory5).Files[i].Name = strings[(directory as Directory5).Files[i].NameOffset]; break;
case 6: (directory as Directory6).Files[i].Name = strings[(directory as Directory6).Files[i].NameOffset]; break;
case 7: (directory as Directory7).Files[i].Name = strings[(directory as Directory7).Files[i].NameOffset]; break;
default: return null;
}
}
#endregion
return directory;
}
/// <summary>
/// Parse a Stream into an SGA directory header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA directory header on success, null on error</returns>
private static object ParseDirectoryHeader(Stream data, ushort majorVersion)
{
switch (majorVersion)
{
case 4: return ParseDirectory4Header(data);
case 5: return ParseDirectory5Header(data);
case 6: return ParseDirectory5Header(data);
case 7: return ParseDirectory7Header(data);
default: return null;
}
}
/// <summary>
/// Parse a Stream into an SGA directory header version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA directory header version 4 on success, null on error</returns>
private static DirectoryHeader4 ParseDirectory4Header(Stream data)
{
DirectoryHeader4 directoryHeader4 = new DirectoryHeader4();
directoryHeader4.SectionOffset = data.ReadUInt32();
directoryHeader4.SectionCount = data.ReadUInt16();
directoryHeader4.FolderOffset = data.ReadUInt32();
directoryHeader4.FolderCount = data.ReadUInt16();
directoryHeader4.FileOffset = data.ReadUInt32();
directoryHeader4.FileCount = data.ReadUInt16();
directoryHeader4.StringTableOffset = data.ReadUInt32();
directoryHeader4.StringTableCount = data.ReadUInt16();
return directoryHeader4;
}
/// <summary>
/// Parse a Stream into an SGA directory header version 5
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA directory header version 5 on success, null on error</returns>
private static DirectoryHeader5 ParseDirectory5Header(Stream data)
{
DirectoryHeader5 directoryHeader5 = new DirectoryHeader5();
directoryHeader5.SectionOffset = data.ReadUInt32();
directoryHeader5.SectionCount = data.ReadUInt32();
directoryHeader5.FolderOffset = data.ReadUInt32();
directoryHeader5.FolderCount = data.ReadUInt32();
directoryHeader5.FileOffset = data.ReadUInt32();
directoryHeader5.FileCount = data.ReadUInt32();
directoryHeader5.StringTableOffset = data.ReadUInt32();
directoryHeader5.StringTableCount = data.ReadUInt32();
return directoryHeader5;
}
/// <summary>
/// Parse a Stream into an SGA directory header version 7
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SGA directory header version 7 on success, null on error</returns>
private static DirectoryHeader7 ParseDirectory7Header(Stream data)
{
DirectoryHeader7 directoryHeader7 = new DirectoryHeader7();
directoryHeader7.SectionOffset = data.ReadUInt32();
directoryHeader7.SectionCount = data.ReadUInt32();
directoryHeader7.FolderOffset = data.ReadUInt32();
directoryHeader7.FolderCount = data.ReadUInt32();
directoryHeader7.FileOffset = data.ReadUInt32();
directoryHeader7.FileCount = data.ReadUInt32();
directoryHeader7.StringTableOffset = data.ReadUInt32();
directoryHeader7.StringTableCount = data.ReadUInt32();
directoryHeader7.HashTableOffset = data.ReadUInt32();
directoryHeader7.BlockSize = data.ReadUInt32();
return directoryHeader7;
}
/// <summary>
/// Parse a Stream into an SGA section version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA section version 4 on success, null on error</returns>
private static Section4 ParseSection4(Stream data)
{
Section4 section4 = new Section4();
byte[] section4Alias = data.ReadBytes(count: 64);
section4.Alias = Encoding.ASCII.GetString(section4Alias).TrimEnd('\0');
byte[] section4Name = data.ReadBytes(64);
section4.Name = Encoding.ASCII.GetString(section4Name).TrimEnd('\0');
section4.FolderStartIndex = data.ReadUInt16();
section4.FolderEndIndex = data.ReadUInt16();
section4.FileStartIndex = data.ReadUInt16();
section4.FileEndIndex = data.ReadUInt16();
section4.FolderRootIndex = data.ReadUInt16();
return section4;
}
/// <summary>
/// Parse a Stream into an SGA section version 5
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA section version 5 on success, null on error</returns>
private static Section5 ParseSection5(Stream data)
{
Section5 section5 = new Section5();
byte[] section5Alias = data.ReadBytes(count: 64);
section5.Alias = Encoding.ASCII.GetString(section5Alias).TrimEnd('\0');
byte[] section5Name = data.ReadBytes(64);
section5.Name = Encoding.ASCII.GetString(section5Name).TrimEnd('\0');
section5.FolderStartIndex = data.ReadUInt32();
section5.FolderEndIndex = data.ReadUInt32();
section5.FileStartIndex = data.ReadUInt32();
section5.FileEndIndex = data.ReadUInt32();
section5.FolderRootIndex = data.ReadUInt32();
return section5;
}
/// <summary>
/// Parse a Stream into an SGA folder version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA folder version 4 on success, null on error</returns>
private static Folder4 ParseFolder4(Stream data)
{
Folder4 folder4 = new Folder4();
folder4.NameOffset = data.ReadUInt32();
folder4.Name = null; // Read from string table
folder4.FolderStartIndex = data.ReadUInt16();
folder4.FolderEndIndex = data.ReadUInt16();
folder4.FileStartIndex = data.ReadUInt16();
folder4.FileEndIndex = data.ReadUInt16();
return folder4;
}
/// <summary>
/// Parse a Stream into an SGA folder version 5
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA folder version 5 on success, null on error</returns>
private static Folder5 ParseFolder5(Stream data)
{
Folder5 folder5 = new Folder5();
folder5.NameOffset = data.ReadUInt32();
folder5.Name = null; // Read from string table
folder5.FolderStartIndex = data.ReadUInt32();
folder5.FolderEndIndex = data.ReadUInt32();
folder5.FileStartIndex = data.ReadUInt32();
folder5.FileEndIndex = data.ReadUInt32();
return folder5;
}
/// <summary>
/// Parse a Stream into an SGA file version 4
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA file version 4 on success, null on error</returns>
private static File4 ParseFile4(Stream data)
{
File4 file4 = new File4();
file4.NameOffset = data.ReadUInt32();
file4.Name = null; // Read from string table
file4.Offset = data.ReadUInt32();
file4.SizeOnDisk = data.ReadUInt32();
file4.Size = data.ReadUInt32();
file4.TimeModified = data.ReadUInt32();
file4.Dummy0 = data.ReadByteValue();
file4.Type = data.ReadByteValue();
return file4;
}
/// <summary>
/// Parse a Stream into an SGA file version 6
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA file version 6 on success, null on error</returns>
private static File6 ParseFile6(Stream data)
{
File6 file6 = new File6();
file6.NameOffset = data.ReadUInt32();
file6.Name = null; // Read from string table
file6.Offset = data.ReadUInt32();
file6.SizeOnDisk = data.ReadUInt32();
file6.Size = data.ReadUInt32();
file6.TimeModified = data.ReadUInt32();
file6.Dummy0 = data.ReadByteValue();
file6.Type = data.ReadByteValue();
file6.CRC32 = data.ReadUInt32();
return file6;
}
/// <summary>
/// Parse a Stream into an SGA file version 7
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">SGA major version</param>
/// <returns>Filled SGA file version 7 on success, null on error</returns>
private static File7 ParseFile7(Stream data)
{
File7 file7 = new File7();
file7.NameOffset = data.ReadUInt32();
file7.Name = null; // Read from string table
file7.Offset = data.ReadUInt32();
file7.SizeOnDisk = data.ReadUInt32();
file7.Size = data.ReadUInt32();
file7.TimeModified = data.ReadUInt32();
file7.Dummy0 = data.ReadByteValue();
file7.Type = data.ReadByteValue();
file7.CRC32 = data.ReadUInt32();
file7.HashOffset = data.ReadUInt32();
return file7;
}
#endregion
}
}

View File

@@ -0,0 +1,141 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.VBSP;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.VBSP.Constants;
namespace BurnOutSharp.Builders
{
public static class VBSP
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life 2 Level
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life 2 Level on success, null on error</returns>
public static Models.VBSP.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life 2 Level
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life 2 Level on success, null on error</returns>
public static Models.VBSP.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life 2 Level to fill
var file = new Models.VBSP.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life 2 Level header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life 2 Level header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.Version = data.ReadInt32();
if ((header.Version < 19 || header.Version > 22) && header.Version != 0x00040014)
return null;
header.Lumps = new Lump[HL_VBSP_LUMP_COUNT];
for (int i = 0; i < HL_VBSP_LUMP_COUNT; i++)
{
header.Lumps[i] = ParseLump(data, header.Version);
}
header.MapRevision = data.ReadInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life 2 Level lump
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="version">VBSP version</param>
/// <returns>Filled Half-Life 2 Level lump on success, null on error</returns>
private static Lump ParseLump(Stream data, int version)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.Length = data.ReadUInt32();
lump.Version = data.ReadUInt32();
lump.FourCC = new char[4];
for (int i = 0; i < 4; i++)
{
lump.FourCC[i] = (char)data.ReadByte();
}
// This block was commented out because test VBSPs with header
// version 21 had the values in the "right" order already and
// were causing decompression issues
//if (version >= 21 && version != 0x00040014)
//{
// uint temp = lump.Version;
// lump.Version = lump.Offset;
// lump.Offset = lump.Length;
// lump.Length = temp;
//}
return lump;
}
#endregion
}
}

View File

@@ -0,0 +1,318 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using BurnOutSharp.Models.VPK;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.VPK.Constants;
namespace BurnOutSharp.Builders
{
public static class VPK
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Valve Package
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Valve Package on success, null on error</returns>
public static Models.VPK.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Valve Package
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package on success, null on error</returns>
public static Models.VPK.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Valve Package to fill
var file = new Models.VPK.File();
#region Header
// Try to parse the header
// The original version had no signature.
var header = ParseHeader(data);
// Set the package header
file.Header = header;
#endregion
#region Extended Header
if (header?.Version == 2)
{
// Try to parse the extended header
var extendedHeader = ParseExtendedHeader(data);
if (extendedHeader == null)
return null;
// Set the package extended header
file.ExtendedHeader = extendedHeader;
}
#endregion
#region Directory Items
// Create the directory items tree
var directoryItems = ParseDirectoryItemTree(data);
// Set the directory items
file.DirectoryItems = directoryItems;
#endregion
#region Archive Hashes
if (header?.Version == 2 && file.ExtendedHeader != null && file.ExtendedHeader.ArchiveHashLength > 0)
{
// Create the archive hashes list
var archiveHashes = new List<ArchiveHash>();
// Cache the current offset
initialOffset = data.Position;
// Try to parse the directory items
while (data.Position < initialOffset + file.ExtendedHeader.ArchiveHashLength)
{
var archiveHash = ParseArchiveHash(data);
archiveHashes.Add(archiveHash);
}
file.ArchiveHashes = archiveHashes.ToArray();
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Valve Package header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.Signature = data.ReadUInt32();
if (header.Signature != SignatureUInt32)
return null;
header.Version = data.ReadUInt32();
if (header.Version > 2)
return null;
header.DirectoryLength = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Valve Package extended header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package extended header on success, null on error</returns>
private static ExtendedHeader ParseExtendedHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ExtendedHeader extendedHeader = new ExtendedHeader();
extendedHeader.Dummy0 = data.ReadUInt32();
extendedHeader.ArchiveHashLength = data.ReadUInt32();
extendedHeader.ExtraLength = data.ReadUInt32();
extendedHeader.Dummy1 = data.ReadUInt32();
return extendedHeader;
}
/// <summary>
/// Parse a Stream into a Valve Package archive hash
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package archive hash on success, null on error</returns>
private static ArchiveHash ParseArchiveHash(Stream data)
{
// TODO: Use marshalling here instead of building
ArchiveHash archiveHash = new ArchiveHash();
archiveHash.ArchiveIndex = data.ReadUInt32();
archiveHash.ArchiveOffset = data.ReadUInt32();
archiveHash.Length = data.ReadUInt32();
archiveHash.Hash = data.ReadBytes(0x10);
return archiveHash;
}
/// <summary>
/// Parse a Stream into a Valve Package directory item tree
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item tree on success, null on error</returns>
private static DirectoryItem[] ParseDirectoryItemTree(Stream data)
{
// Create the directory items list
var directoryItems = new List<DirectoryItem>();
while (true)
{
// Get the extension
string extensionString = data.ReadString(Encoding.ASCII);
if (string.IsNullOrEmpty(extensionString))
break;
// Sanitize the extension
for (int i = 0; i < 0x20; i++)
{
extensionString = extensionString.Replace($"{(char)i}", string.Empty);
}
while (true)
{
// Get the path
string pathString = data.ReadString(Encoding.ASCII);
if (string.IsNullOrEmpty(pathString))
break;
// Sanitize the path
for (int i = 0; i < 0x20; i++)
{
pathString = pathString.Replace($"{(char)i}", string.Empty);
}
while (true)
{
// Get the name
string nameString = data.ReadString(Encoding.ASCII);
if (string.IsNullOrEmpty(nameString))
break;
// Sanitize the name
for (int i = 0; i < 0x20; i++)
{
nameString = nameString.Replace($"{(char)i}", string.Empty);
}
// Get the directory item
var directoryItem = ParseDirectoryItem(data, extensionString, pathString, nameString);
// Add the directory item
directoryItems.Add(directoryItem);
}
}
}
return directoryItems.ToArray();
}
/// <summary>
/// Parse a Stream into a Valve Package directory item
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data, string extension, string path, string name)
{
DirectoryItem directoryItem = new DirectoryItem();
directoryItem.Extension = extension;
directoryItem.Path = path;
directoryItem.Name = name;
// Get the directory entry
var directoryEntry = ParseDirectoryEntry(data);
// Set the directory entry
directoryItem.DirectoryEntry = directoryEntry;
// Get the preload data pointer
long preloadDataPointer = -1; int preloadDataLength = -1;
if (directoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE && directoryEntry.EntryLength > 0)
{
preloadDataPointer = directoryEntry.EntryOffset;
preloadDataLength = (int)directoryEntry.EntryLength;
}
else if (directoryEntry.PreloadBytes > 0)
{
preloadDataPointer = data.Position;
preloadDataLength = directoryEntry.PreloadBytes;
}
// If we had a valid preload data pointer
byte[] preloadData = null;
if (preloadDataPointer >= 0 && preloadDataLength > 0)
{
// Cache the current offset
long initialOffset = data.Position;
// Seek to the preload data offset
data.Seek(preloadDataPointer, SeekOrigin.Begin);
// Read the preload data
preloadData = data.ReadBytes(preloadDataLength);
// Seek back to the original offset
data.Seek(initialOffset, SeekOrigin.Begin);
}
// Set the preload data
directoryItem.PreloadData = preloadData;
return directoryItem;
}
/// <summary>
/// Parse a Stream into a Valve Package directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.CRC = data.ReadUInt32();
directoryEntry.PreloadBytes = data.ReadUInt16();
directoryEntry.ArchiveIndex = data.ReadUInt16();
directoryEntry.EntryOffset = data.ReadUInt32();
directoryEntry.EntryLength = data.ReadUInt32();
directoryEntry.Dummy0 = data.ReadUInt16();
return directoryEntry;
}
#endregion
}
}

View File

@@ -0,0 +1,266 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.WAD;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.WAD.Constants;
namespace BurnOutSharp.Builders
{
public static class WAD
{
#region Byte Data
/// <summary>
/// Parse a byte array into a Half-Life Texture Package
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled Half-Life Texture Package on success, null on error</returns>
public static Models.WAD.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a Half-Life Texture Package
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package on success, null on error</returns>
public static Models.WAD.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new Half-Life Texture Package to fill
var file = new Models.WAD.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
#region Lumps
// Get the lump offset
uint lumpOffset = header.LumpOffset;
if (lumpOffset < 0 || lumpOffset >= data.Length)
return null;
// Seek to the lump offset
data.Seek(lumpOffset, SeekOrigin.Begin);
// Create the lump array
file.Lumps = new Lump[header.LumpCount];
for (int i = 0; i < header.LumpCount; i++)
{
var lump = ParseLump(data);
file.Lumps[i] = lump;
}
#endregion
#region Lump Infos
// Create the lump info array
file.LumpInfos = new LumpInfo[header.LumpCount];
for (int i = 0; i < header.LumpCount; i++)
{
var lump = file.Lumps[i];
if (lump.Compression != 0)
{
file.LumpInfos[i] = null;
continue;
}
// Get the lump info offset
uint lumpInfoOffset = lump.Offset;
if (lumpInfoOffset < 0 || lumpInfoOffset >= data.Length)
{
file.LumpInfos[i] = null;
continue;
}
// Seek to the lump info offset
data.Seek(lumpInfoOffset, SeekOrigin.Begin);
// Try to parse the lump info -- TODO: Do we ever set the mipmap level?
var lumpInfo = ParseLumpInfo(data, lump.Type);
file.LumpInfos[i] = lumpInfo;
}
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.LumpCount = data.ReadUInt32();
header.LumpOffset = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package lump
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package lump on success, null on error</returns>
private static Lump ParseLump(Stream data)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.DiskLength = data.ReadUInt32();
lump.Length = data.ReadUInt32();
lump.Type = data.ReadByteValue();
lump.Compression = data.ReadByteValue();
lump.Padding0 = data.ReadByteValue();
lump.Padding1 = data.ReadByteValue();
byte[] name = data.ReadBytes(16);
lump.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
return lump;
}
/// <summary>
/// Parse a Stream into a Half-Life Texture Package lump info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="type">Lump type</param>
/// <param name="mipmap">Mipmap level</param>
/// <returns>Filled Half-Life Texture Package lump info on success, null on error</returns>
private static LumpInfo ParseLumpInfo(Stream data, byte type, uint mipmap = 0)
{
// TODO: Use marshalling here instead of building
LumpInfo lumpInfo = new LumpInfo();
// Cache the initial offset
long initialOffset = data.Position;
// Type 0x42 has no name, type 0x43 does. Are these flags?
if (type == 0x42)
{
if (mipmap > 0)
return null;
lumpInfo.Width = data.ReadUInt32();
lumpInfo.Height = data.ReadUInt32();
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
lumpInfo.PaletteSize = data.ReadUInt16();
}
else if (type == 0x43)
{
if (mipmap > 3)
return null;
byte[] name = data.ReadBytes(16);
lumpInfo.Name = Encoding.ASCII.GetString(name);
lumpInfo.Width = data.ReadUInt32();
lumpInfo.Height = data.ReadUInt32();
lumpInfo.PixelOffset = data.ReadUInt32();
_ = data.ReadBytes(12); // Unknown data
// Cache the current offset
long currentOffset = data.Position;
// Seek to the pixel data
data.Seek(initialOffset + lumpInfo.PixelOffset, SeekOrigin.Begin);
// Read the pixel data
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
// Seek back to the offset
data.Seek(currentOffset, SeekOrigin.Begin);
uint pixelSize = lumpInfo.Width * lumpInfo.Height;
// Mipmap data -- TODO: How do we determine this during initial parsing?
switch (mipmap)
{
case 1: _ = data.ReadBytes((int)pixelSize); break;
case 2: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4))); break;
case 3: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16))); break;
default: return null;
}
_ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16) + (pixelSize / 64))); // Pixel data
lumpInfo.PaletteSize = data.ReadUInt16();
lumpInfo.PaletteData = data.ReadBytes((int)lumpInfo.PaletteSize * 3);
}
else
{
return null;
}
// Adjust based on mipmap level
switch (mipmap)
{
case 1:
lumpInfo.Width /= 2;
lumpInfo.Height /= 2;
break;
case 2:
lumpInfo.Width /= 4;
lumpInfo.Height /= 4;
break;
case 3:
lumpInfo.Width /= 8;
lumpInfo.Height /= 8;
break;
default:
return null;
}
return lumpInfo;
}
#endregion
}
}

View File

@@ -0,0 +1,274 @@
using System.IO;
using System.Text;
using BurnOutSharp.Models.XZP;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.XZP.Constants;
namespace BurnOutSharp.Builders
{
public static class XZP
{
#region Byte Data
/// <summary>
/// Parse a byte array into a XBox Package File
/// </summary>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled XBox Package File on success, null on error</returns>
public static Models.XZP.File ParseFile(byte[] data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return ParseFile(dataStream);
}
#endregion
#region Stream Data
/// <summary>
/// Parse a Stream into a XBox Package File
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File on success, null on error</returns>
public static Models.XZP.File ParseFile(Stream data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
long initialOffset = data.Position;
// Create a new XBox Package File to fill
var file = new Models.XZP.File();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
#endregion
#region Directory Entries
// Create the directory entry array
file.DirectoryEntries = new DirectoryEntry[header.DirectoryEntryCount];
// Try to parse the directory entries
for (int i = 0; i < header.DirectoryEntryCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.DirectoryEntries[i] = directoryEntry;
}
#endregion
#region Preload Directory Entries
if (header.PreloadBytes > 0)
{
// Create the preload directory entry array
file.PreloadDirectoryEntries = new DirectoryEntry[header.PreloadDirectoryEntryCount];
// Try to parse the preload directory entries
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
file.PreloadDirectoryEntries[i] = directoryEntry;
}
}
#endregion
#region Preload Directory Mappings
if (header.PreloadBytes > 0)
{
// Create the preload directory mapping array
file.PreloadDirectoryMappings = new DirectoryMapping[header.PreloadDirectoryEntryCount];
// Try to parse the preload directory mappings
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
{
var directoryMapping = ParseDirectoryMapping(data);
file.PreloadDirectoryMappings[i] = directoryMapping;
}
}
#endregion
#region Directory Items
if (header.DirectoryItemCount > 0)
{
// Get the directory item offset
uint directoryItemOffset = header.DirectoryItemOffset;
if (directoryItemOffset < 0 || directoryItemOffset >= data.Length)
return null;
// Seek to the directory items
data.Seek(directoryItemOffset, SeekOrigin.Begin);
// Create the directory item array
file.DirectoryItems = new DirectoryItem[header.DirectoryItemCount];
// Try to parse the directory items
for (int i = 0; i < header.DirectoryItemCount; i++)
{
var directoryItem = ParseDirectoryItem(data);
file.DirectoryItems[i] = directoryItem;
}
}
#endregion
#region Footer
// Seek to the footer
data.Seek(-8, SeekOrigin.End);
// Try to parse the footer
var footer = ParseFooter(data);
if (footer == null)
return null;
// Set the package footer
file.Footer = footer;
#endregion
return file;
}
/// <summary>
/// Parse a Stream into a XBox Package File header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File header on success, null on error</returns>
private static Header ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
byte[] signature = data.ReadBytes(4);
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != HeaderSignatureString)
return null;
header.Version = data.ReadUInt32();
if (header.Version != 6)
return null;
header.PreloadDirectoryEntryCount = data.ReadUInt32();
header.DirectoryEntryCount = data.ReadUInt32();
header.PreloadBytes = data.ReadUInt32();
header.HeaderLength = data.ReadUInt32();
header.DirectoryItemCount = data.ReadUInt32();
header.DirectoryItemOffset = data.ReadUInt32();
header.DirectoryItemLength = data.ReadUInt32();
return header;
}
/// <summary>
/// Parse a Stream into a XBox Package File directory entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.FileNameCRC = data.ReadUInt32();
directoryEntry.EntryLength = data.ReadUInt32();
directoryEntry.EntryOffset = data.ReadUInt32();
return directoryEntry;
}
/// <summary>
/// Parse a Stream into a XBox Package File directory mapping
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory mapping on success, null on error</returns>
private static DirectoryMapping ParseDirectoryMapping(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapping directoryMapping = new DirectoryMapping();
directoryMapping.PreloadDirectoryEntryIndex = data.ReadUInt16();
return directoryMapping;
}
/// <summary>
/// Parse a Stream into a XBox Package File directory item
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryItem directoryItem = new DirectoryItem();
directoryItem.FileNameCRC = data.ReadUInt32();
directoryItem.NameOffset = data.ReadUInt32();
directoryItem.TimeCreated = data.ReadUInt32();
// Cache the current offset
long currentPosition = data.Position;
// Seek to the name offset
data.Seek(directoryItem.NameOffset, SeekOrigin.Begin);
// Read the name
directoryItem.Name = data.ReadString(Encoding.ASCII);
// Seek back to the right position
data.Seek(currentPosition, SeekOrigin.Begin);
return directoryItem;
}
/// <summary>
/// Parse a Stream into a XBox Package File footer
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File footer on success, null on error</returns>
private static Footer ParseFooter(Stream data)
{
// TODO: Use marshalling here instead of building
Footer footer = new Footer();
footer.FileLength = data.ReadUInt32();
byte[] signature = data.ReadBytes(4);
footer.Signature = Encoding.ASCII.GetString(signature);
if (footer.Signature != FooterSignatureString)
return null;
return footer;
}
#endregion
}
}

View File

@@ -0,0 +1,12 @@
namespace BurnOutSharp.Compression.ADPCM
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public unsafe struct ADPCM_DATA
{
public uint[] pValues;
public int BitCount;
public int field_8;
public int field_C;
public int field_10;
}
}

View File

@@ -0,0 +1,131 @@
using static BurnOutSharp.Compression.ADPCM.Constants;
using static BurnOutSharp.Compression.ADPCM.Helper;
namespace BurnOutSharp.Compression.ADPCM
{
public unsafe class Compressor
{
/// <summary>
/// Compression routine
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public int CompressADPCM(void* pvOutBuffer, int cbOutBuffer, void* pvInBuffer, int cbInBuffer, int ChannelCount, int CompressionLevel)
{
TADPCMStream os = new TADPCMStream(pvOutBuffer, cbOutBuffer); // The output stream
TADPCMStream @is = new TADPCMStream(pvInBuffer, cbInBuffer); // The input stream
byte BitShift = (byte)(CompressionLevel - 1);
short[] PredictedSamples = new short[MAX_ADPCM_CHANNEL_COUNT];// Predicted samples for each channel
short[] StepIndexes = new short[MAX_ADPCM_CHANNEL_COUNT]; // Step indexes for each channel
short InputSample = 0; // Input sample for the current channel
int TotalStepSize;
int ChannelIndex;
int AbsDifference;
int Difference;
int MaxBitMask;
int StepSize;
// First byte in the output stream contains zero. The second one contains the compression level
os.WriteByteSample(0);
if (!os.WriteByteSample(BitShift))
return 2;
// Set the initial step index for each channel
PredictedSamples[0] = PredictedSamples[1] = 0;
StepIndexes[0] = StepIndexes[1] = INITIAL_ADPCM_STEP_INDEX;
// Next, InitialSample value for each channel follows
for (int i = 0; i < ChannelCount; i++)
{
// Get the initial sample from the input stream
if (!@is.ReadWordSample(ref InputSample))
return os.LengthProcessed(pvOutBuffer);
// Store the initial sample to our sample array
PredictedSamples[i] = InputSample;
// Also store the loaded sample to the output stream
if (!os.WriteWordSample(InputSample))
return os.LengthProcessed(pvOutBuffer);
}
// Get the initial index
ChannelIndex = ChannelCount - 1;
// Now keep reading the input data as long as there is something in the input buffer
while (@is.ReadWordSample(ref InputSample))
{
int EncodedSample = 0;
// If we have two channels, we need to flip the channel index
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
// Get the difference from the previous sample.
// If the difference is negative, set the sign bit to the encoded sample
AbsDifference = InputSample - PredictedSamples[ChannelIndex];
if (AbsDifference < 0)
{
AbsDifference = -AbsDifference;
EncodedSample |= 0x40;
}
// If the difference is too low (higher that difference treshold),
// write a step index modifier marker
StepSize = StepSizeTable[StepIndexes[ChannelIndex]];
if (AbsDifference < (StepSize >> CompressionLevel))
{
if (StepIndexes[ChannelIndex] != 0)
StepIndexes[ChannelIndex]--;
os.WriteByteSample(0x80);
}
else
{
// If the difference is too high, write marker that
// indicates increase in step size
while (AbsDifference > (StepSize << 1))
{
if (StepIndexes[ChannelIndex] >= 0x58)
break;
// Modify the step index
StepIndexes[ChannelIndex] += 8;
if (StepIndexes[ChannelIndex] > 0x58)
StepIndexes[ChannelIndex] = 0x58;
// Write the "modify step index" marker
StepSize = StepSizeTable[StepIndexes[ChannelIndex]];
os.WriteByteSample(0x81);
}
// Get the limit bit value
MaxBitMask = (1 << (BitShift - 1));
MaxBitMask = (MaxBitMask > 0x20) ? 0x20 : MaxBitMask;
Difference = StepSize >> BitShift;
TotalStepSize = 0;
for (int BitVal = 0x01; BitVal <= MaxBitMask; BitVal <<= 1)
{
if ((TotalStepSize + StepSize) <= AbsDifference)
{
TotalStepSize += StepSize;
EncodedSample |= BitVal;
}
StepSize >>= 1;
}
PredictedSamples[ChannelIndex] = (short)UpdatePredictedSample(PredictedSamples[ChannelIndex],
EncodedSample,
Difference + TotalStepSize);
// Write the encoded sample to the output stream
if (!os.WriteByteSample((byte)EncodedSample))
break;
// Calculates the step index to use for the next encode
StepIndexes[ChannelIndex] = GetNextStepIndex(StepIndexes[ChannelIndex], (uint)EncodedSample);
}
}
return os.LengthProcessed(pvOutBuffer);
}
}
}

View File

@@ -0,0 +1,51 @@
namespace BurnOutSharp.Compression.ADPCM
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.h"/>
public static class Constants
{
public const int MAX_ADPCM_CHANNEL_COUNT = 2;
public const byte INITIAL_ADPCM_STEP_INDEX = 0x2C;
#region Tables necessary for decompression
public static readonly int[] NextStepTable =
{
-1, 0, -1, 4, -1, 2, -1, 6,
-1, 1, -1, 5, -1, 3, -1, 7,
-1, 1, -1, 5, -1, 3, -1, 7,
-1, 2, -1, 4, -1, 6, -1, 8
};
public static readonly int[] StepSizeTable =
{
7, 8, 9, 10, 11, 12, 13, 14,
16, 17, 19, 21, 23, 25, 28, 31,
34, 37, 41, 45, 50, 55, 60, 66,
73, 80, 88, 97, 107, 118, 130, 143,
157, 173, 190, 209, 230, 253, 279, 307,
337, 371, 408, 449, 494, 544, 598, 658,
724, 796, 876, 963, 1060, 1166, 1282, 1411,
1552, 1707, 1878, 2066, 2272, 2499, 2749, 3024,
3327, 3660, 4026, 4428, 4871, 5358, 5894, 6484,
7132, 7845, 8630, 9493, 10442, 11487, 12635, 13899,
15289, 16818, 18500, 20350, 22385, 24623, 27086, 29794,
32767
};
#endregion
#region ADPCM decompression present in Starcraft I BETA
public static readonly uint[] adpcm_values_2 = { 0x33, 0x66 };
public static readonly uint[] adpcm_values_3 = { 0x3A, 0x3A, 0x50, 0x70 };
public static readonly uint[] adpcm_values_4 = { 0x3A, 0x3A, 0x3A, 0x3A, 0x4D, 0x66, 0x80, 0x9A };
public static readonly uint[] adpcm_values_6 =
{
0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A, 0x3A,
0x46, 0x53, 0x60, 0x6D, 0x7A, 0x86, 0x93, 0xA0, 0xAD, 0xBA, 0xC6, 0xD3, 0xE0, 0xED, 0xFA, 0x106
};
#endregion
}
}

View File

@@ -0,0 +1,205 @@
using static BurnOutSharp.Compression.ADPCM.Constants;
using static BurnOutSharp.Compression.ADPCM.Helper;
namespace BurnOutSharp.Compression.ADPCM
{
public unsafe class Decompressor
{
/// <summary>
/// Decompression routine
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public int DecompressADPCM(void* pvOutBuffer, int cbOutBuffer, void* pvInBuffer, int cbInBuffer, int ChannelCount)
{
TADPCMStream os = new TADPCMStream(pvOutBuffer, cbOutBuffer); // Output stream
TADPCMStream @is = new TADPCMStream(pvInBuffer, cbInBuffer); // Input stream
byte EncodedSample = 0;
byte BitShift = 0;
short[] PredictedSamples = new short[MAX_ADPCM_CHANNEL_COUNT]; // Predicted sample for each channel
short[] StepIndexes = new short[MAX_ADPCM_CHANNEL_COUNT]; // Predicted step index for each channel
int ChannelIndex; // Current channel index
// Initialize the StepIndex for each channel
PredictedSamples[0] = PredictedSamples[1] = 0;
StepIndexes[0] = StepIndexes[1] = INITIAL_ADPCM_STEP_INDEX;
// The first byte is always zero, the second one contains bit shift (compression level - 1)
@is.ReadByteSample(ref BitShift);
@is.ReadByteSample(ref BitShift);
// Next, InitialSample value for each channel follows
for (int i = 0; i < ChannelCount; i++)
{
// Get the initial sample from the input stream
short InitialSample = 0;
// Attempt to read the initial sample
if (!@is.ReadWordSample(ref InitialSample))
return os.LengthProcessed(pvOutBuffer);
// Store the initial sample to our sample array
PredictedSamples[i] = InitialSample;
// Also store the loaded sample to the output stream
if (!os.WriteWordSample(InitialSample))
return os.LengthProcessed(pvOutBuffer);
}
// Get the initial index
ChannelIndex = ChannelCount - 1;
// Keep reading as long as there is something in the input buffer
while (@is.ReadByteSample(ref EncodedSample))
{
// If we have two channels, we need to flip the channel index
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
if (EncodedSample == 0x80)
{
if (StepIndexes[ChannelIndex] != 0)
StepIndexes[ChannelIndex]--;
if (!os.WriteWordSample(PredictedSamples[ChannelIndex]))
return os.LengthProcessed(pvOutBuffer);
}
else if (EncodedSample == 0x81)
{
// Modify the step index
StepIndexes[ChannelIndex] += 8;
if (StepIndexes[ChannelIndex] > 0x58)
StepIndexes[ChannelIndex] = 0x58;
// Next pass, keep going on the same channel
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
}
else
{
int StepIndex = StepIndexes[ChannelIndex];
int StepSize = StepSizeTable[StepIndex];
// Encode one sample
PredictedSamples[ChannelIndex] = (short)DecodeSample(PredictedSamples[ChannelIndex],
EncodedSample,
StepSize,
StepSize >> BitShift);
// Write the decoded sample to the output stream
if (!os.WriteWordSample(PredictedSamples[ChannelIndex]))
break;
// Calculates the step index to use for the next encode
StepIndexes[ChannelIndex] = GetNextStepIndex(StepIndex, EncodedSample);
}
}
// Return total bytes written since beginning of the output buffer
return os.LengthProcessed(pvOutBuffer);
}
/// <summary>
/// ADPCM decompression present in Starcraft I BETA
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public int DecompressADPCM_SC1B(void* pvOutBuffer, int cbOutBuffer, void* pvInBuffer, int cbInBuffer, int ChannelCount)
{
TADPCMStream os = new TADPCMStream(pvOutBuffer, cbOutBuffer); // Output stream
TADPCMStream @is = new TADPCMStream(pvInBuffer, cbInBuffer); // Input stream
ADPCM_DATA AdpcmData = new ADPCM_DATA();
int[] LowBitValues = new int[MAX_ADPCM_CHANNEL_COUNT];
int[] UpperBits = new int[MAX_ADPCM_CHANNEL_COUNT];
int[] BitMasks = new int[MAX_ADPCM_CHANNEL_COUNT];
int[] PredictedSamples = new int[MAX_ADPCM_CHANNEL_COUNT];
int ChannelIndex;
int ChannelIndexMax;
int OutputSample;
byte BitCount = 0;
byte EncodedSample = 0;
short InputValue16 = 0;
int reg_eax;
int Difference;
// The first byte contains number of bits
if (!@is.ReadByteSample(ref BitCount))
return os.LengthProcessed(pvOutBuffer);
if (InitAdpcmData(AdpcmData, BitCount) == null)
return os.LengthProcessed(pvOutBuffer);
//assert(AdpcmData.pValues != NULL);
// Init bit values
for (int i = 0; i < ChannelCount; i++)
{
byte OneByte = 0;
if (!@is.ReadByteSample(ref OneByte))
return os.LengthProcessed(pvOutBuffer);
LowBitValues[i] = OneByte & 0x01;
UpperBits[i] = OneByte >> 1;
}
//
for (int i = 0; i < ChannelCount; i++)
{
if (!@is.ReadWordSample(ref InputValue16))
return os.LengthProcessed(pvOutBuffer);
BitMasks[i] = InputValue16 << AdpcmData.BitCount;
}
// Next, InitialSample value for each channel follows
for (int i = 0; i < ChannelCount; i++)
{
if (!@is.ReadWordSample(ref InputValue16))
return os.LengthProcessed(pvOutBuffer);
PredictedSamples[i] = InputValue16;
os.WriteWordSample(InputValue16);
}
// Get the initial index
ChannelIndexMax = ChannelCount - 1;
ChannelIndex = 0;
// Keep reading as long as there is something in the input buffer
while (@is.ReadByteSample(ref EncodedSample))
{
reg_eax = ((PredictedSamples[ChannelIndex] * 3) << 3) - PredictedSamples[ChannelIndex];
PredictedSamples[ChannelIndex] = ((reg_eax * 10) + 0x80) >> 8;
Difference = (((EncodedSample >> 1) + 1) * BitMasks[ChannelIndex] + AdpcmData.field_10) >> AdpcmData.BitCount;
PredictedSamples[ChannelIndex] = UpdatePredictedSample(PredictedSamples[ChannelIndex], EncodedSample, Difference, 0x01);
BitMasks[ChannelIndex] = (int)((AdpcmData.pValues[EncodedSample >> 1] * BitMasks[ChannelIndex] + 0x80) >> 6);
if (BitMasks[ChannelIndex] < AdpcmData.field_8)
BitMasks[ChannelIndex] = AdpcmData.field_8;
if (BitMasks[ChannelIndex] > AdpcmData.field_C)
BitMasks[ChannelIndex] = AdpcmData.field_C;
reg_eax = (cbInBuffer - @is.LengthProcessed(pvInBuffer)) >> ChannelIndexMax;
OutputSample = PredictedSamples[ChannelIndex];
if (reg_eax < UpperBits[ChannelIndex])
{
if (LowBitValues[ChannelIndex] != 0)
{
OutputSample += (UpperBits[ChannelIndex] - reg_eax);
if (OutputSample > 32767)
OutputSample = 32767;
}
else
{
OutputSample += (reg_eax - UpperBits[ChannelIndex]);
if (OutputSample < -32768)
OutputSample = -32768;
}
}
// Write the word sample and swap channel
os.WriteWordSample((short)(OutputSample));
ChannelIndex = (ChannelIndex + 1) % ChannelCount;
}
return os.LengthProcessed(pvOutBuffer);
}
}
}

View File

@@ -0,0 +1,104 @@
using static BurnOutSharp.Compression.ADPCM.Constants;
namespace BurnOutSharp.Compression.ADPCM
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
internal static unsafe class Helper
{
#region Local functions
public static short GetNextStepIndex(int StepIndex, uint EncodedSample)
{
// Get the next step index
StepIndex = StepIndex + NextStepTable[EncodedSample & 0x1F];
// Don't make the step index overflow
if (StepIndex < 0)
StepIndex = 0;
else if (StepIndex > 88)
StepIndex = 88;
return (short)StepIndex;
}
public static int UpdatePredictedSample(int PredictedSample, int EncodedSample, int Difference, int BitMask = 0x40)
{
// Is the sign bit set?
if ((EncodedSample & BitMask) != 0)
{
PredictedSample -= Difference;
if (PredictedSample <= -32768)
PredictedSample = -32768;
}
else
{
PredictedSample += Difference;
if (PredictedSample >= 32767)
PredictedSample = 32767;
}
return PredictedSample;
}
public static int DecodeSample(int PredictedSample, int EncodedSample, int StepSize, int Difference)
{
if ((EncodedSample & 0x01) != 0)
Difference += (StepSize >> 0);
if ((EncodedSample & 0x02) != 0)
Difference += (StepSize >> 1);
if ((EncodedSample & 0x04) != 0)
Difference += (StepSize >> 2);
if ((EncodedSample & 0x08) != 0)
Difference += (StepSize >> 3);
if ((EncodedSample & 0x10) != 0)
Difference += (StepSize >> 4);
if ((EncodedSample & 0x20) != 0)
Difference += (StepSize >> 5);
return UpdatePredictedSample(PredictedSample, EncodedSample, Difference);
}
#endregion
#region ADPCM decompression present in Starcraft I BETA
public static uint[] InitAdpcmData(ADPCM_DATA pData, byte BitCount)
{
switch (BitCount)
{
case 2:
pData.pValues = adpcm_values_2;
break;
case 3:
pData.pValues = adpcm_values_3;
break;
case 4:
pData.pValues = adpcm_values_4;
break;
default:
pData.pValues = null;
break;
case 6:
pData.pValues = adpcm_values_6;
break;
}
pData.BitCount = BitCount;
pData.field_C = 0x20000;
pData.field_8 = 1 << BitCount;
pData.field_10 = (1 << BitCount) / 2;
return pData.pValues;
}
#endregion
}
}

View File

@@ -0,0 +1,67 @@
namespace BurnOutSharp.Compression.ADPCM
{
/// <summary>
/// Helper class for writing output ADPCM data
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/adpcm/adpcm.cpp"/>
public unsafe class TADPCMStream
{
private byte* pbBufferEnd;
private byte* pbBuffer;
public TADPCMStream(void* pvBuffer, int cbBuffer)
{
pbBufferEnd = (byte*)pvBuffer + cbBuffer;
pbBuffer = (byte*)pvBuffer;
}
public bool ReadByteSample(ref byte ByteSample)
{
// Check if there is enough space in the buffer
if (pbBuffer >= pbBufferEnd)
return false;
ByteSample = *pbBuffer++;
return true;
}
public bool WriteByteSample(byte ByteSample)
{
// Check if there is enough space in the buffer
if (pbBuffer >= pbBufferEnd)
return false;
*pbBuffer++ = ByteSample;
return true;
}
public bool ReadWordSample(ref short OneSample)
{
// Check if we have enough space in the output buffer
if ((int)(pbBufferEnd - pbBuffer) < sizeof(short))
return false;
// Write the sample
OneSample = (short)(pbBuffer[0] + ((pbBuffer[1]) << 0x08));
pbBuffer += sizeof(short);
return true;
}
public bool WriteWordSample(short OneSample)
{
// Check if we have enough space in the output buffer
if ((int)(pbBufferEnd - pbBuffer) < sizeof(short))
return false;
// Write the sample
*pbBuffer++ = (byte)(OneSample & 0xFF);
*pbBuffer++ = (byte)(OneSample >> 0x08);
return true;
}
public int LengthProcessed(void* pvOutBuffer)
{
return (int)((byte*)pbBuffer - (byte*)pvOutBuffer);
}
}
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BurnOutSharp.Compression</Title>
<AssemblyName>BurnOutSharp.Compression</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.7</Version>
<AssemblyVersion>2.7</AssemblyVersion>
<FileVersion>2.7</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="SharpCompress" Version="0.32.2" />
<PackageReference Include="SharpZipLib" Version="1.4.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\BurnOutSharp.Models\BurnOutSharp.Models.csproj" />
<ProjectReference Include="..\BurnOutSharp.Utilities\BurnOutSharp.Utilities.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,525 @@
using System.IO;
using System.Linq;
using System.Text;
using BurnOutSharp.Models.Compression.LZ;
using BurnOutSharp.Utilities;
using static BurnOutSharp.Models.Compression.LZ.Constants;
namespace BurnOutSharp.Compression
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/kernel32/lzexpand.c"/>
public class LZ
{
#region Constructors
/// <summary>
/// Constructor
/// </summary>
public LZ() { }
#endregion
#region Static Methods
/// <summary>
/// Decompress LZ-compressed data
/// </summary>
/// <param name="compressed">Byte array representing the compressed data</param>
/// <returns>Decompressed data as a byte array, null on error</returns>
public static byte[] Decompress(byte[] compressed)
{
// If we have and invalid input
if (compressed == null || compressed.Length == 0)
return null;
// Create a memory stream for the input and decompress that
var compressedStream = new MemoryStream(compressed);
return Decompress(compressedStream);
}
/// <summary>
/// Decompress LZ-compressed data
/// </summary>
/// <param name="compressed">Stream representing the compressed data</param>
/// <returns>Decompressed data as a byte array, null on error</returns>
public static byte[] Decompress(Stream compressed)
{
// If we have and invalid input
if (compressed == null || compressed.Length == 0)
return null;
// Create a new LZ for decompression
var lz = new LZ();
// Open the input data
var sourceState = lz.Open(compressed, out _);
if (sourceState?.Window == null)
return null;
// Create the output data and open it
var decompressedStream = new MemoryStream();
var destState = lz.Open(decompressedStream, out _);
if (destState == null)
return null;
// Decompress the data by copying
long read = lz.CopyTo(sourceState, destState, out LZERROR error);
// Copy the data to the buffer
byte[] decompressed;
if (read == 0 || (error != LZERROR.LZERROR_OK && error != LZERROR.LZERROR_NOT_LZ))
{
decompressed = null;
}
else
{
int dataEnd = (int)decompressedStream.Position;
decompressedStream.Seek(0, SeekOrigin.Begin);
decompressed = decompressedStream.ReadBytes(dataEnd);
}
// Close the streams
lz.Close(sourceState);
lz.Close(destState);
return decompressed;
}
/// <summary>
/// Reconstructs the full filename of the compressed file
/// </summary>
public static string GetExpandedName(string input, out LZERROR error)
{
// Try to open the file as a compressed stream
var fileStream = File.Open(input, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
var state = new LZ().Open(fileStream, out error);
if (state?.Window == null)
return null;
// Get the extension for modification
string inputExtension = Path.GetExtension(input).TrimStart('.');
// If we have no extension
if (string.IsNullOrWhiteSpace(inputExtension))
return Path.GetFileNameWithoutExtension(input);
// If we have an extension of length 1
if (inputExtension.Length == 1)
{
if (inputExtension == "_")
return $"{Path.GetFileNameWithoutExtension(input)}.{char.ToLower(state.LastChar)}";
else
return Path.GetFileNameWithoutExtension(input);
}
// If we have an extension that doesn't end in an underscore
if (!inputExtension.EndsWith("_"))
return Path.GetFileNameWithoutExtension(input);
// Build the new filename
bool isLowerCase = char.IsUpper(input[0]);
char replacementChar = isLowerCase ? char.ToLower(state.LastChar) : char.ToUpper(state.LastChar);
string outputExtension = inputExtension.Substring(0, inputExtension.Length - 1) + replacementChar;
return $"{Path.GetFileNameWithoutExtension(input)}.{outputExtension}";
}
#endregion
#region State Management
/// <summary>
/// Opens a stream and creates a state from it
/// </summary>
/// <param name="stream">Source stream to create a state from</stream>
/// <param name="error">Output representing the last error</param>
/// <returns>An initialized State, null on error</returns>
/// <remarks>Uncompressed streams are represented by a State with no buffer</remarks>
public State Open(Stream stream, out LZERROR error)
{
State lzs = Init(stream, out error);
if (error == LZERROR.LZERROR_OK || error == LZERROR.LZERROR_NOT_LZ)
return lzs;
return null;
}
/// <summary>
/// Closes a state by invalidating the source
/// </summary>
/// <param name="stream">State object to close</stream>
public void Close(State state)
{
try
{
state?.Source?.Close();
}
catch { }
}
/// <summary>
/// Initializes internal decompression buffers
/// </summary>
/// <param name="source">Input stream to create a state from</param>
/// <param name="error">Output representing the last error</param>
/// <returns>An initialized State, null on error</returns>
/// <remarks>Uncompressed streams are represented by a State with no buffer</remarks>
public State Init(Stream source, out LZERROR error)
{
// If we have an invalid source
if (source == null)
{
error = LZERROR.LZERROR_BADVALUE;
return null;
}
// Attempt to read the header
var fileHeader = ParseFileHeader(source, out error);
// If we had a valid but uncompressed stream
if (error == LZERROR.LZERROR_NOT_LZ)
{
source.Seek(0, SeekOrigin.Begin);
return new State { Source = source };
}
// If we had any error
else if (error != LZERROR.LZERROR_OK)
{
source.Seek(0, SeekOrigin.Begin);
return null;
}
// Initialize the table with all spaces
byte[] table = Enumerable.Repeat((byte)' ', LZ_TABLE_SIZE).ToArray();
// Build the state
var state = new State
{
Source = source,
LastChar = fileHeader.LastChar,
RealLength = fileHeader.RealLength,
Window = new byte[GETLEN],
WindowLength = 0,
WindowCurrent = 0,
Table = table,
CurrentTableEntry = 0xff0,
};
// Return the state
return state;
}
#endregion
#region Stream Functionality
/// <summary>
/// Attempt to read the specified number of bytes from the State
/// </summary>
/// <param name="source">Source State to read from</param>
/// <param name="buffer">Byte buffer to read into</param>
/// <param name="offset">Offset within the buffer to read</param>
/// <param name="count">Number of bytes to read</param>
/// <param name="error">Output representing the last error</param>
/// <returns>The number of bytes read, if possible</returns>
/// <remarks>
/// If the source data is compressed, this will decompress the data.
/// If the source data is uncompressed, it is copied directly
/// </remarks>
public int Read(State source, byte[] buffer, int offset, int count, out LZERROR error)
{
// If we have an uncompressed input
if (source.Window == null)
{
error = LZERROR.LZERROR_NOT_LZ;
return source.Source.Read(buffer, offset, count);
}
// If seeking has occurred, we need to perform the seek
if (source.RealCurrent != source.RealWanted)
{
// If the requested position is before the current, we need to reset
if (source.RealCurrent > source.RealWanted)
{
// Reset the decompressor state
source.Source.Seek(LZ_HEADER_LEN, SeekOrigin.Begin);
FlushWindow(source);
source.RealCurrent = 0;
source.ByteType = 0;
source.StringLength = 0;
source.Table = Enumerable.Repeat((byte)' ', LZ_TABLE_SIZE).ToArray();
source.CurrentTableEntry = 0xFF0;
}
// While we are not at the right offset
while (source.RealCurrent < source.RealWanted)
{
_ = DecompressByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
}
}
int bytesRemaining = count;
while (bytesRemaining > 0)
{
byte b = DecompressByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return count - bytesRemaining;
source.RealWanted++;
buffer[offset++] = b;
bytesRemaining--;
}
error = LZERROR.LZERROR_OK;
return count;
}
/// <summary>
/// Perform a seek on the source data
/// </summary>
/// <param name="state">State to seek within</param>
/// <param name="offset">Data offset to seek to</state>
/// <param name="seekOrigin">SeekOrigin representing how to seek</state>
/// <param name="error">Output representing the last error</param>
/// <returns>The position that was seeked to, -1 on error</returns>
public long Seek(State state, long offset, SeekOrigin seekOrigin, out LZERROR error)
{
// If we have an invalid state
if (state == null)
{
error = LZERROR.LZERROR_BADVALUE;
return -1;
}
// If we have an uncompressed input
if (state.Window == null)
{
error = LZERROR.LZERROR_NOT_LZ;
return state.Source.Seek(offset, seekOrigin);
}
// Otherwise, generate the new offset
long newWanted = state.RealWanted;
switch (seekOrigin)
{
case SeekOrigin.Current:
newWanted += offset;
break;
case SeekOrigin.End:
newWanted = state.RealLength - offset;
break;
default:
newWanted = offset;
break;
}
// If we have an invalid new offset
if (newWanted < 0 && newWanted > state.RealLength)
{
error = LZERROR.LZERROR_BADVALUE;
return -1;
}
error = LZERROR.LZERROR_OK;
state.RealWanted = (uint)newWanted;
return newWanted;
}
/// <summary>
/// Copies all data from the source to the destination
/// </summary>
/// <param name="source">Source State to read from</param>
/// <param name="dest">Destination state to write to</param>
/// <param name="error">Output representing the last error</param>
/// <returns>The number of bytes written, -1 on error</returns>
/// <remarks>
/// If the source data is compressed, this will decompress the data.
/// If the source data is uncompressed, it is copied directly
/// </remarks>
public long CopyTo(State source, State dest, out LZERROR error)
{
error = LZERROR.LZERROR_OK;
// If we have an uncompressed input
if (source.Window == null)
{
source.Source.CopyTo(dest.Source);
return source.Source.Length;
}
// Loop until we have read everything
long length = 0;
while (true)
{
// Read at most 1000 bytes
byte[] buf = new byte[1000];
int read = Read(source, buf, 0, buf.Length, out error);
// If we had an error
if (read == 0)
{
if (error == LZERROR.LZERROR_NOT_LZ)
{
error = LZERROR.LZERROR_OK;
break;
}
else if (error != LZERROR.LZERROR_OK)
{
error = LZERROR.LZERROR_READ;
return 0;
}
}
// Otherwise, append the length read and write the data
length += read;
dest.Source.Write(buf, 0, read);
}
return length;
}
/// <summary>
/// Decompress a single byte of data from the source State
/// </summary>
/// <param name="source">Source State to read from</param>
/// <param name="error">Output representing the last error</param>
/// <returns>The read byte, if possible</returns>
private byte DecompressByte(State source, out LZERROR error)
{
byte b;
if (source.StringLength != 0)
{
b = source.Table[source.StringPosition];
source.StringPosition = (source.StringPosition + 1) & 0xFFF;
source.StringLength--;
}
else
{
if ((source.ByteType & 0x100) == 0)
{
b = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
source.ByteType = (ushort)(b | 0xFF00);
}
if ((source.ByteType & 1) != 0)
{
b = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
}
else
{
byte b1 = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
byte b2 = ReadByte(source, out error);
if (error != LZERROR.LZERROR_OK)
return 0;
// Format:
// b1 b2
// AB CD
// where CAB is the stringoffset in the table
// and D+3 is the len of the string
source.StringPosition = (uint)(b1 | ((b2 & 0xf0) << 4));
source.StringLength = (byte)((b2 & 0xf) + 2);
// 3, but we use a byte already below...
b = source.Table[source.StringPosition];
source.StringPosition = (source.StringPosition + 1) & 0xFFF;
}
source.ByteType >>= 1;
}
// Store b in table
source.Table[source.CurrentTableEntry++] = b;
source.CurrentTableEntry &= 0xFFF;
source.RealCurrent++;
error = LZERROR.LZERROR_OK;
return b;
}
/// <summary>
/// Reads one compressed byte, including buffering
/// </summary>
/// <param name="state">State to read using</param>
/// <param name="error">Output representing the last error</param>
/// <returns>Byte value that was read, if possible</returns>
private byte ReadByte(State state, out LZERROR error)
{
// If we have enough data in the buffer
if (state.WindowCurrent < state.WindowLength)
{
error = LZERROR.LZERROR_OK;
return state.Window[state.WindowCurrent++];
}
// Otherwise, read from the source
int ret = state.Source.Read(state.Window, 0, GETLEN);
if (ret == 0)
{
error = LZERROR.LZERROR_NOT_LZ;
return 0;
}
// Reset the window state
state.WindowLength = (uint)ret;
state.WindowCurrent = 1;
error = LZERROR.LZERROR_OK;
return state.Window[0];
}
/// <summary>
/// Reset the current window position to the length
/// </summary>
/// <param name="state">State to flush</param>
private void FlushWindow(State state)
{
state.WindowCurrent = state.WindowLength;
}
/// <summary>
/// Parse a Stream into a file header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="error">Output representing the last error</param>
/// <returns>Filled file header on success, null on error</returns>
private FileHeaader ParseFileHeader(Stream data, out LZERROR error)
{
error = LZERROR.LZERROR_OK;
FileHeaader fileHeader = new FileHeaader();
byte[] magic = data.ReadBytes(LZ_MAGIC_LEN);
fileHeader.Magic = Encoding.ASCII.GetString(magic);
if (fileHeader.Magic != MagicString)
{
error = LZERROR.LZERROR_NOT_LZ;
return null;
}
fileHeader.CompressionType = data.ReadByteValue();
if (fileHeader.CompressionType != (byte)'A')
{
error = LZERROR.LZERROR_UNKNOWNALG;
return null;
}
fileHeader.LastChar = (char)data.ReadByteValue();
fileHeader.RealLength = data.ReadUInt32();
return fileHeader;
}
#endregion
}
}

View File

@@ -0,0 +1,12 @@
namespace BurnOutSharp.Compression.LZX
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public class Bits
{
public uint BitBuffer;
public int BitsLeft;
public int InputPosition; //byte*
}
}

View File

@@ -0,0 +1,759 @@
using System;
using BurnOutSharp.Models.Compression.LZX;
using static BurnOutSharp.Models.Compression.LZX.Constants;
using static BurnOutSharp.Models.MicrosoftCabinet.Constants;
namespace BurnOutSharp.Compression.LZX
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/fdi.c"/>
public class Decompressor
{
/// <summary>
/// Initialize an LZX decompressor state
/// </summary>
public static bool Init(int window, State state)
{
uint wndsize = (uint)(1 << window);
int posn_slots;
/* LZX supports window sizes of 2^15 (32Kb) through 2^21 (2Mb) */
/* if a previously allocated window is big enough, keep it */
if (window < 15 || window > 21)
return false;
if (state.actual_size < wndsize)
state.window = null;
if (state.window == null)
{
state.window = new byte[wndsize];
state.actual_size = wndsize;
}
state.window_size = wndsize;
/* calculate required position slots */
if (window == 20) posn_slots = 42;
else if (window == 21) posn_slots = 50;
else posn_slots = window << 1;
/*posn_slots=i=0; while (i < wndsize) i += 1 << CAB(extra_bits)[posn_slots++]; */
state.R0 = state.R1 = state.R2 = 1;
state.main_elements = (ushort)(LZX_NUM_CHARS + (posn_slots << 3));
state.header_read = 0;
state.frames_read = 0;
state.block_remaining = 0;
state.block_type = LZX_BLOCKTYPE_INVALID;
state.intel_curpos = 0;
state.intel_started = 0;
state.window_posn = 0;
/* initialize tables to 0 (because deltas will be applied to them) */
// memset(state.MAINTREE_len, 0, sizeof(state.MAINTREE_len));
// memset(state.LENGTH_len, 0, sizeof(state.LENGTH_len));
return true;
}
/// <summary>
/// Decompress a byte array using a given State
/// </summary>
public static bool Decompress(State state, int inlen, byte[] inbuf, int outlen, byte[] outbuf)
{
int inpos = 0; // inbuf[0];
int endinp = inpos + inlen;
int window = 0; // state.window[0];
int runsrc, rundest; // byte*
uint window_posn = state.window_posn;
uint window_size = state.window_size;
uint R0 = state.R0;
uint R1 = state.R1;
uint R2 = state.R2;
uint match_offset, i, j, k; /* ijk used in READ_HUFFSYM macro */
Bits lb = new Bits(); /* used in READ_LENGTHS macro */
int togo = outlen, this_run, main_element, aligned_bits;
int match_length, copy_length, length_footer, extra, verbatim_bits;
INIT_BITSTREAM(out int bitsleft, out uint bitbuf);
/* read header if necessary */
if (state.header_read == 0)
{
i = j = 0;
k = READ_BITS(1, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (k != 0)
{
i = READ_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
j = READ_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
}
state.intel_filesize = (int)((i << 16) | j); /* or 0 if not encoded */
state.header_read = 1;
}
/* main decoding loop */
while (togo > 0)
{
/* last block finished, new block expected */
if (state.block_remaining == 0)
{
if (state.block_type == LZX_BLOCKTYPE_UNCOMPRESSED)
{
if ((state.block_length & 1) != 0)
inpos++; /* realign bitstream to word */
INIT_BITSTREAM(out bitsleft, out bitbuf);
}
state.block_type = (ushort)READ_BITS(3, inbuf, ref inpos, ref bitsleft, ref bitbuf);
i = READ_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
j = READ_BITS(8, inbuf, ref inpos, ref bitsleft, ref bitbuf);
state.block_remaining = state.block_length = (i << 8) | j;
switch (state.block_type)
{
case LZX_BLOCKTYPE_ALIGNED:
for (i = 0; i < 8; i++)
{
j = READ_BITS(3, inbuf, ref inpos, ref bitsleft, ref bitbuf);
state.tblALIGNED_len[i] = (byte)j;
}
make_decode_table(LZX_ALIGNED_MAXSYMBOLS, LZX_ALIGNED_TABLEBITS, state.tblALIGNED_len, state.tblALIGNED_table);
/* rest of aligned header is same as verbatim */
goto case LZX_BLOCKTYPE_VERBATIM;
case LZX_BLOCKTYPE_VERBATIM:
READ_LENGTHS(state.tblMAINTREE_len, 0, 256, lb, state, inbuf, ref inpos, ref bitsleft, ref bitbuf);
READ_LENGTHS(state.tblMAINTREE_len, 256, state.main_elements, lb, state, inbuf, ref inpos, ref bitsleft, ref bitbuf);
make_decode_table(LZX_MAINTREE_MAXSYMBOLS, LZX_MAINTREE_TABLEBITS, state.tblMAINTREE_len, state.tblMAINTREE_table);
if (state.tblMAINTREE_len[0xE8] != 0)
state.intel_started = 1;
READ_LENGTHS(state.tblLENGTH_len, 0, LZX_NUM_SECONDARY_LENGTHS, lb, state, inbuf, ref inpos, ref bitsleft, ref bitbuf);
make_decode_table(LZX_LENGTH_MAXSYMBOLS, LZX_LENGTH_TABLEBITS, state.tblLENGTH_len, state.tblLENGTH_table);
break;
case LZX_BLOCKTYPE_UNCOMPRESSED:
state.intel_started = 1; /* because we can't assume otherwise */
ENSURE_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf); /* get up to 16 pad bits into the buffer */
/* and align the bitstream! */
if (bitsleft > 16)
inpos -= 2;
R0 = (uint)(inbuf[inpos + 0] | (inbuf[inpos + 1] << 8) | (inbuf[inpos + 2] << 16) | (inbuf[inpos + 3] << 24)); inpos += 4;
R1 = (uint)(inbuf[inpos + 0] | (inbuf[inpos + 1] << 8) | (inbuf[inpos + 2] << 16) | (inbuf[inpos + 3] << 24)); inpos += 4;
R2 = (uint)(inbuf[inpos + 0] | (inbuf[inpos + 1] << 8) | (inbuf[inpos + 2] << 16) | (inbuf[inpos + 3] << 24)); inpos += 4;
break;
default:
return false;
}
}
/* buffer exhaustion check */
if (inpos > endinp)
{
/* it's possible to have a file where the next run is less than
* 16 bits in size. In this case, the READ_HUFFSYM() macro used
* in building the tables will exhaust the buffer, so we should
* allow for this, but not allow those accidentally read bits to
* be used (so we check that there are at least 16 bits
* remaining - in this boundary case they aren't really part of
* the compressed data)
*/
if (inpos > (endinp + 2) || bitsleft < 16)
return false;
}
while ((this_run = (int)state.block_remaining) > 0 && togo > 0)
{
if (this_run > togo) this_run = togo;
togo -= this_run;
state.block_remaining -= (uint)this_run;
/* apply 2^x-1 mask */
window_posn &= window_size - 1;
/* runs can't straddle the window wraparound */
if ((window_posn + this_run) > window_size)
return false;
switch (state.block_type)
{
case LZX_BLOCKTYPE_VERBATIM:
while (this_run > 0)
{
main_element = READ_HUFFSYM(state.tblMAINTREE_table, state.tblMAINTREE_len, LZX_MAINTREE_TABLEBITS, LZX_MAINTREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (main_element < LZX_NUM_CHARS)
{
/* literal: 0 to LZX_NUM_CHARS-1 */
state.window[window + window_posn++] = (byte)main_element;
this_run--;
}
else
{
/* match: LZX_NUM_CHARS + ((slot<<3) | length_header (3 bits)) */
main_element -= LZX_NUM_CHARS;
match_length = main_element & LZX_NUM_PRIMARY_LENGTHS;
if (match_length == LZX_NUM_PRIMARY_LENGTHS)
{
length_footer = READ_HUFFSYM(state.tblLENGTH_table, state.tblLENGTH_len, LZX_LENGTH_TABLEBITS, LZX_LENGTH_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_length += length_footer;
}
match_length += LZX_MIN_MATCH;
match_offset = (uint)(main_element >> 3);
if (match_offset > 2)
{
/* not repeated offset */
if (match_offset != 3)
{
extra = state.ExtraBits[match_offset];
verbatim_bits = (int)READ_BITS(extra, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset = (uint)(state.PositionSlotBases[match_offset] - 2 + verbatim_bits);
}
else
{
match_offset = 1;
}
/* update repeated offset LRU queue */
R2 = R1; R1 = R0; R0 = match_offset;
}
else if (match_offset == 0)
{
match_offset = R0;
}
else if (match_offset == 1)
{
match_offset = R1;
R1 = R0; R0 = match_offset;
}
else /* match_offset == 2 */
{
match_offset = R2;
R2 = R0; R0 = match_offset;
}
rundest = (int)(window + window_posn);
this_run -= match_length;
/* copy any wrapped around source data */
if (window_posn >= match_offset)
{
/* no wrap */
runsrc = (int)(rundest - match_offset);
}
else
{
runsrc = (int)(rundest + (window_size - match_offset));
copy_length = (int)(match_offset - window_posn);
if (copy_length < match_length)
{
match_length -= copy_length;
window_posn += (uint)copy_length;
while (copy_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
runsrc = window;
}
}
window_posn += (uint)match_length;
/* copy match data - no worries about destination wraps */
while (match_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
}
}
break;
case LZX_BLOCKTYPE_ALIGNED:
while (this_run > 0)
{
main_element = READ_HUFFSYM(state.tblMAINTREE_table, state.tblMAINTREE_len, LZX_MAINTREE_TABLEBITS, LZX_MAINTREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (main_element < LZX_NUM_CHARS)
{
/* literal: 0 to LZX_NUM_CHARS-1 */
state.window[window + window_posn++] = (byte)main_element;
this_run--;
}
else
{
/* mverbatim_bitsatch: LZX_NUM_CHARS + ((slot<<3) | length_header (3 bits)) */
main_element -= LZX_NUM_CHARS;
match_length = main_element & LZX_NUM_PRIMARY_LENGTHS;
if (match_length == LZX_NUM_PRIMARY_LENGTHS)
{
length_footer = READ_HUFFSYM(state.tblLENGTH_table, state.tblLENGTH_len, LZX_LENGTH_TABLEBITS, LZX_LENGTH_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_length += length_footer;
}
match_length += LZX_MIN_MATCH;
match_offset = (uint)(main_element >> 3);
if (match_offset > 2)
{
/* not repeated offset */
extra = state.ExtraBits[match_offset];
match_offset = state.PositionSlotBases[match_offset] - 2;
if (extra > 3)
{
/* verbatim and aligned bits */
extra -= 3;
verbatim_bits = (int)READ_BITS(extra, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)(verbatim_bits << 3);
aligned_bits = READ_HUFFSYM(state.tblALIGNED_table, state.tblALIGNED_len, LZX_ALIGNED_TABLEBITS, LZX_ALIGNED_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)aligned_bits;
}
else if (extra == 3)
{
/* aligned bits only */
aligned_bits = READ_HUFFSYM(state.tblALIGNED_table, state.tblALIGNED_len, LZX_ALIGNED_TABLEBITS, LZX_ALIGNED_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)aligned_bits;
}
else if (extra > 0)
{
/* extra==1, extra==2 */
/* verbatim bits only */
verbatim_bits = (int)READ_BITS(extra, inbuf, ref inpos, ref bitsleft, ref bitbuf);
match_offset += (uint)verbatim_bits;
}
else /* extra == 0 */
{
/* ??? */
match_offset = 1;
}
/* update repeated offset LRU queue */
R2 = R1; R1 = R0; R0 = match_offset;
}
else if (match_offset == 0)
{
match_offset = R0;
}
else if (match_offset == 1)
{
match_offset = R1;
R1 = R0; R0 = match_offset;
}
else /* match_offset == 2 */
{
match_offset = R2;
R2 = R0; R0 = match_offset;
}
rundest = (int)(window + window_posn);
this_run -= match_length;
/* copy any wrapped around source data */
if (window_posn >= match_offset)
{
/* no wrap */
runsrc = (int)(rundest - match_offset);
}
else
{
runsrc = (int)(rundest + (window_size - match_offset));
copy_length = (int)(match_offset - window_posn);
if (copy_length < match_length)
{
match_length -= copy_length;
window_posn += (uint)copy_length;
while (copy_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
runsrc = window;
}
}
window_posn += (uint)match_length;
/* copy match data - no worries about destination wraps */
while (match_length-- > 0)
{
state.window[rundest++] = state.window[runsrc++];
}
}
}
break;
case LZX_BLOCKTYPE_UNCOMPRESSED:
if ((inpos + this_run) > endinp)
return false;
Array.Copy(inbuf, inpos, state.window, window + window_posn, this_run);
inpos += this_run;
window_posn += (uint)this_run;
break;
default:
return false; /* might as well */
}
}
}
if (togo != 0)
return false;
Array.Copy(state.window, window + ((window_posn == 0) ? window_size : window_posn) - outlen, outbuf, 0, outlen);
state.window_posn = window_posn;
state.R0 = R0;
state.R1 = R1;
state.R2 = R2;
/* intel E8 decoding */
if ((state.frames_read++ < 32768) && state.intel_filesize != 0)
{
if (outlen <= 6 || state.intel_started == 0)
{
state.intel_curpos += outlen;
}
else
{
int data = 0; // outbuf[0];
int dataend = data + outlen - 10;
int curpos = state.intel_curpos;
int filesize = state.intel_filesize;
int abs_off, rel_off;
state.intel_curpos = curpos + outlen;
while (data < dataend)
{
if (outbuf[data++] != 0xE8)
{
curpos++;
continue;
}
abs_off = outbuf[data + 0] | (outbuf[data + 1] << 8) | (outbuf[data + 2] << 16) | (outbuf[data + 3] << 24);
if ((abs_off >= -curpos) && (abs_off < filesize))
{
rel_off = (abs_off >= 0) ? abs_off - curpos : abs_off + filesize;
outbuf[data + 0] = (byte)rel_off;
outbuf[data + 1] = (byte)(rel_off >> 8);
outbuf[data + 2] = (byte)(rel_off >> 16);
outbuf[data + 3] = (byte)(rel_off >> 24);
}
data += 4;
curpos += 5;
}
}
}
return true;
}
/// <summary>
/// Read and build the Huffman tree from the lengths
/// </summary>
private static int ReadLengths(byte[] lengths, uint first, uint last, Bits lb, State state, byte[] inbuf)
{
uint x, y;
uint bitbuf = lb.BitBuffer;
int bitsleft = lb.BitsLeft;
int inpos = lb.InputPosition;
for (x = 0; x < 20; x++)
{
y = READ_BITS(4, inbuf, ref inpos, ref bitsleft, ref bitbuf);
state.tblPRETREE_len[x] = (byte)y;
}
make_decode_table(LZX_PRETREE_MAXSYMBOLS, LZX_PRETREE_TABLEBITS, state.tblPRETREE_len, state.tblPRETREE_table);
for (x = first; x < last;)
{
int z = READ_HUFFSYM(state.tblPRETREE_table, state.tblPRETREE_len, LZX_PRETREE_TABLEBITS, LZX_PRETREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if (z == 17)
{
y = READ_BITS(4, inbuf, ref inpos, ref bitsleft, ref bitbuf);
y += 4;
while (y-- > 0)
{
lengths[x++] = 0;
}
}
else if (z == 18)
{
y = READ_BITS(5, inbuf, ref inpos, ref bitsleft, ref bitbuf);
y += 20;
while (y-- > 0)
{
lengths[x++] = 0;
}
}
else if (z == 19)
{
y = READ_BITS(1, inbuf, ref inpos, ref bitsleft, ref bitbuf);
y += 4;
z = READ_HUFFSYM(state.tblPRETREE_table, state.tblPRETREE_len, LZX_PRETREE_TABLEBITS, LZX_PRETREE_MAXSYMBOLS, inbuf, ref inpos, ref bitsleft, ref bitbuf);
z = lengths[x] - z;
if (z < 0)
z += 17;
while (y-- > 0)
{
lengths[x++] = (byte)z;
}
}
else
{
z = lengths[x] - z;
if (z < 0)
z += 17;
lengths[x++] = (byte)z;
}
}
lb.BitBuffer = bitbuf;
lb.BitsLeft = bitsleft;
lb.InputPosition = inpos;
return 0;
}
// Bitstream reading macros (LZX / intel little-endian byte order)
#region Bitstream Reading Macros
/*
* These bit access routines work by using the area beyond the MSB and the
* LSB as a free source of zeroes. This avoids having to mask any bits.
* So we have to know the bit width of the bitbuffer variable.
*/
/// <summary>
/// Should be used first to set up the system
/// </summary>
private static void INIT_BITSTREAM(out int bitsleft, out uint bitbuf)
{
bitsleft = 0;
bitbuf = 0;
}
/// <summary>
/// Ensures there are at least N bits in the bit buffer. It can guarantee
// up to 17 bits (i.e. it can read in 16 new bits when there is down to
/// 1 bit in the buffer, and it can read 32 bits when there are 0 bits in
/// the buffer).
/// </summary>
/// <remarks>Quantum reads bytes in normal order; LZX is little-endian order</remarks>
private static void ENSURE_BITS(int n, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
while (bitsleft < n)
{
byte b0 = inpos + 0 < inbuf.Length ? inbuf[inpos + 0] : (byte)0;
byte b1 = inpos + 1 < inbuf.Length ? inbuf[inpos + 1] : (byte)0;
bitbuf |= (uint)(((b1 << 8) | b0) << (16 - bitsleft));
bitsleft += 16;
inpos += 2;
}
}
/// <summary>
/// Extracts (without removing) N bits from the bit buffer
/// </summary>
private static uint PEEK_BITS(int n, uint bitbuf)
{
return bitbuf >> (32 - n);
}
/// <summary>
/// Removes N bits from the bit buffer
/// </summary>
private static void REMOVE_BITS(int n, ref int bitsleft, ref uint bitbuf)
{
bitbuf <<= n;
bitsleft -= n;
}
/// <summary>
/// Takes N bits from the buffer and puts them in v.
/// </summary>
private static uint READ_BITS(int n, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
uint v = 0;
if (n > 0)
{
ENSURE_BITS(n, inbuf, ref inpos, ref bitsleft, ref bitbuf);
v = PEEK_BITS(n, bitbuf);
REMOVE_BITS(n, ref bitsleft, ref bitbuf);
}
return v;
}
#endregion
#region Huffman Methods
/// <summary>
/// This function was coded by David Tritscher. It builds a fast huffman
/// decoding table out of just a canonical huffman code lengths table.
/// </summary>
/// <param name="nsyms">Total number of symbols in this huffman tree.</param>
/// <param name="nbits">
/// Any symbols with a code length of nbits or less can be decoded
/// in one lookup of the table.
/// </param>
/// <param name="length">A table to get code lengths from [0 to syms-1]</param>
/// <param name="table">The table to fill up with decoded symbols and pointers.</param>
/// <returns>
/// OK: 0
/// error: 1
/// </returns>
private static int make_decode_table(uint nsyms, uint nbits, byte[] length, ushort[] table)
{
ushort sym;
uint leaf;
byte bit_num = 1;
uint fill;
uint pos = 0; /* the current position in the decode table */
uint table_mask = (uint)(1 << (int)nbits);
uint bit_mask = table_mask >> 1; /* don't do 0 length codes */
uint next_symbol = bit_mask; /* base of allocation for long codes */
/* fill entries for codes short enough for a direct mapping */
while (bit_num <= nbits)
{
for (sym = 0; sym < nsyms; sym++)
{
if (length[sym] == bit_num)
{
leaf = pos;
if ((pos += bit_mask) > table_mask) return 1; /* table overrun */
/* fill all possible lookups of this symbol with the symbol itself */
fill = bit_mask;
while (fill-- > 0) table[leaf++] = sym;
}
}
bit_mask >>= 1;
bit_num++;
}
/* if there are any codes longer than nbits */
if (pos != table_mask)
{
/* clear the remainder of the table */
for (sym = (ushort)pos; sym < table_mask; sym++) table[sym] = 0;
/* give ourselves room for codes to grow by up to 16 more bits */
pos <<= 16;
table_mask <<= 16;
bit_mask = 1 << 15;
while (bit_num <= 16)
{
for (sym = 0; sym < nsyms; sym++)
{
if (length[sym] == bit_num)
{
leaf = pos >> 16;
for (fill = 0; fill < bit_num - nbits; fill++)
{
/* if this path hasn't been taken yet, 'allocate' two entries */
if (table[leaf] == 0)
{
table[(next_symbol << 1)] = 0;
table[(next_symbol << 1) + 1] = 0;
table[leaf] = (ushort)next_symbol++;
}
/* follow the path and select either left or right for next bit */
leaf = (uint)(table[leaf] << 1);
if (((pos >> (int)(15 - fill)) & 1) != 0) leaf++;
}
table[leaf] = sym;
if ((pos += bit_mask) > table_mask) return 1; /* table overflow */
}
}
bit_mask >>= 1;
bit_num++;
}
}
/* full table? */
if (pos == table_mask) return 0;
/* either erroneous table, or all elements are 0 - let's find out. */
for (sym = 0; sym < nsyms; sym++) if (length[sym] != 0) return 1;
return 0;
}
#endregion
// Huffman macros
#region Huffman Macros
/// <summary>
/// Decodes one huffman symbol from the bitstream using the stated table and
/// puts it in v.
/// </summary>
private static int READ_HUFFSYM(ushort[] hufftbl, byte[] lentable, int tablebits, int maxsymbols, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
int v = 0, i, j = 0;
ENSURE_BITS(16, inbuf, ref inpos, ref bitsleft, ref bitbuf);
if ((i = hufftbl[PEEK_BITS(tablebits, bitbuf)]) >= maxsymbols)
{
j = 1 << (32 - tablebits);
do
{
j >>= 1;
i <<= 1;
i |= (bitbuf & j) != 0 ? 1 : 0;
if (j == 0)
throw new System.Exception();
} while ((i = hufftbl[i]) >= maxsymbols);
}
j = lentable[v = i];
REMOVE_BITS(j, ref bitsleft, ref bitbuf);
return v;
}
/// <summary>
/// Reads in code lengths for symbols first to last in the given table. The
/// code lengths are stored in their own special LZX way.
/// </summary>
private static bool READ_LENGTHS(byte[] lentable, uint first, uint last, Bits lb, State state, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
lb.BitBuffer = bitbuf;
lb.BitsLeft = bitsleft;
lb.InputPosition = inpos;
if (ReadLengths(lentable, first, last, lb, state, inbuf) != 0)
return false;
bitbuf = lb.BitBuffer;
bitsleft = lb.BitsLeft;
inpos = lb.InputPosition;
return true;
}
#endregion
}
}

View File

@@ -0,0 +1,119 @@
using static BurnOutSharp.Models.Compression.LZX.Constants;
namespace BurnOutSharp.Compression.LZX
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public class State
{
/// <summary>
/// the actual decoding window
/// </summary>
public byte[] window;
/// <summary>
/// window size (32Kb through 2Mb)
/// </summary>
public uint window_size;
/// <summary>
/// window size when it was first allocated
/// </summary>
public uint actual_size;
/// <summary>
/// current offset within the window
/// </summary>
public uint window_posn;
/// <summary>
/// for the LRU offset system
/// </summary>
public uint R0, R1, R2;
/// <summary>
/// number of main tree elements
/// </summary>
public ushort main_elements;
/// <summary>
/// have we started decoding at all yet?
/// </summary>
public int header_read;
/// <summary>
/// type of this block
/// </summary>
public ushort block_type;
/// <summary>
/// uncompressed length of this block
/// </summary>
public uint block_length;
/// <summary>
/// uncompressed bytes still left to decode
/// </summary>
public uint block_remaining;
/// <summary>
/// the number of CFDATA blocks processed
/// </summary>
public uint frames_read;
/// <summary>
/// magic header value used for transform
/// </summary>
public int intel_filesize;
/// <summary>
/// current offset in transform space
/// </summary>
public int intel_curpos;
/// <summary>
/// have we seen any translatable data yet?
/// </summary>
public int intel_started;
public ushort[] tblPRETREE_table = new ushort[(1 << LZX_PRETREE_TABLEBITS) + (LZX_PRETREE_MAXSYMBOLS << 1)];
public byte[] tblPRETREE_len = new byte[LZX_PRETREE_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
public ushort[] tblMAINTREE_table = new ushort[(1 << LZX_MAINTREE_TABLEBITS) + (LZX_MAINTREE_MAXSYMBOLS << 1)];
public byte[] tblMAINTREE_len = new byte[LZX_MAINTREE_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
public ushort[] tblLENGTH_table = new ushort[(1 << LZX_LENGTH_TABLEBITS) + (LZX_LENGTH_MAXSYMBOLS << 1)];
public byte[] tblLENGTH_len = new byte[LZX_LENGTH_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
public ushort[] tblALIGNED_table = new ushort[(1 << LZX_ALIGNED_TABLEBITS) + (LZX_ALIGNED_MAXSYMBOLS << 1)];
public byte[] tblALIGNED_len = new byte[LZX_ALIGNED_MAXSYMBOLS + LZX_LENTABLE_SAFETY];
#region Decompression Tables
/// <summary>
/// An index to the position slot bases
/// </summary>
public uint[] PositionSlotBases = new uint[]
{
0, 1, 2, 3, 4, 6, 8, 12,
16, 24, 32, 48, 64, 96, 128, 192,
256, 384, 512, 768, 1024, 1536, 2048, 3072,
4096, 6144, 8192, 12288, 16384, 24576, 32768, 49152,
65536, 98304, 131072, 196608, 262144, 393216, 524288, 655360,
786432, 917504, 1048576, 1179648, 1310720, 1441792, 1572864, 1703936,
1835008, 1966080, 2097152
};
/// <summary>
/// How many bits of offset-from-base data is needed
/// </summary>
public byte[] ExtraBits = new byte[]
{
0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14,
15, 15, 16, 16, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17
};
#endregion
}
}

View File

@@ -0,0 +1,636 @@
using System;
using System.Runtime.InteropServices;
using BurnOutSharp.Models.Compression.MSZIP;
using static BurnOutSharp.Models.Compression.MSZIP.Constants;
namespace BurnOutSharp.Compression.MSZIP
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/fdi.c"/>
public unsafe class Decompressor
{
/// <summary>
/// Decompress a byte array using a given State
/// </summary>
public static bool Decompress(State state, int inlen, byte[] inbuf, int outlen, byte[] outbuf)
{
fixed (byte* inpos = inbuf)
{
state.inpos = inpos;
state.bb = state.bk = state.window_posn = 0;
if (outlen > ZIPWSIZE)
return false;
// CK = Chris Kirmse, official Microsoft purloiner
if (state.inpos[0] != 0x43 || state.inpos[1] != 0x48)
return false;
state.inpos += 2;
int lastBlockFlag = 0;
do
{
if (InflateBlock(&lastBlockFlag, state, inbuf, outbuf) != 0)
return false;
} while (lastBlockFlag == 0);
// Return success
return true;
}
}
/// <summary>
/// Decompress a deflated block
/// </summary>
private static uint InflateBlock(int* e, State state, byte[] inbuf, byte[] outbuf)
{
// Make local bit buffer
uint b = state.bb;
uint k = state.bk;
// Read the deflate block header
var header = new DeflateBlockHeader();
// Read in last block bit
ZIPNEEDBITS(1, state, ref b, ref k);
header.BFINAL = (*e = (int)b & 1) != 0;
ZIPDUMPBITS(1, ref b, ref k);
// Read in block type
ZIPNEEDBITS(2, state, ref b, ref k);
header.BTYPE = (CompressionType)(b & 3);
ZIPDUMPBITS(2, ref b, ref k);
// Restore the global bit buffer
state.bb = b;
state.bk = k;
// Inflate that block type
switch (header.BTYPE)
{
case CompressionType.NoCompression:
return (uint)DecompressStored(state, inbuf, outbuf);
case CompressionType.FixedHuffman:
return (uint)DecompressFixed(state, inbuf, outbuf);
case CompressionType.DynamicHuffman:
return (uint)DecompressDynamic(state, inbuf, outbuf);
// Bad block type
case CompressionType.Reserved:
default:
return 2;
}
}
/// <summary>
/// "Decompress" a stored block
/// </summary>
private static int DecompressStored(State state, byte[] inbuf, byte[] outbuf)
{
// Make local copies of globals
uint b = state.bb;
uint k = state.bk;
uint w = state.window_posn;
// Go to byte boundary
int n = (int)(k & 7);
ZIPDUMPBITS(n, ref b, ref k);
// Read the stored block header
var header = new NonCompressedBlockHeader();
// Get the length and its compliment
ZIPNEEDBITS(16, state, ref b, ref k);
header.LEN = (ushort)(b & 0xffff);
ZIPDUMPBITS(16, ref b, ref k);
ZIPNEEDBITS(16, state, ref b, ref k);
header.NLEN = (ushort)(b & 0xffff);
if (header.LEN != (~header.NLEN & 0xffff))
return 1; // Error in compressed data
ZIPDUMPBITS(16, ref b, ref k);
// Read and output the compressed data
while (n-- > 0)
{
ZIPNEEDBITS(8, state, ref b, ref k);
outbuf[w++] = (byte)b;
ZIPDUMPBITS(8, ref b, ref k);
}
// Restore the globals from the locals
state.window_posn = w;
state.bb = b;
state.bk = k;
return 0;
}
/// <summary>
/// Decompress a block originally compressed with fixed Huffman codes
/// </summary>
private static int DecompressFixed(State state, byte[] inbuf, byte[] outbuf)
{
// Create the block header
FixedHuffmanCompressedBlockHeader header = new FixedHuffmanCompressedBlockHeader();
fixed (uint* l = state.ll)
fixed (ushort* Zipcplens = CopyLengths)
fixed (ushort* Zipcplext = LiteralExtraBits)
fixed (ushort* Zipcpdist = CopyOffsets)
fixed (ushort* Zipcpdext = DistanceExtraBits)
{
// Assign the literal lengths
state.ll = header.LiteralLengths;
HuffmanNode* fixed_tl;
int fixed_bl = 7;
// Build the literal length tree
int i = BuildHuffmanTree(l, 288, 257, Zipcplens, Zipcplext, &fixed_tl, &fixed_bl, state);
if (i != 0)
return i;
// Assign the distance codes
state.ll = header.DistanceCodes;
HuffmanNode* fixed_td;
int fixed_bd = 5;
// Build the distance code tree
i = BuildHuffmanTree(l, 30, 0, Zipcpdist, Zipcpdext, &fixed_td, &fixed_bd, state);
if (i != 0)
return i;
// Decompress until an end-of-block code
return InflateCodes(fixed_tl, fixed_td, fixed_bl, fixed_bd, state, inbuf, outbuf);
}
}
/// <summary>
/// Decompress a block originally compressed with dynamic Huffman codes
/// </summary>
private static int DecompressDynamic(State state, byte[] inbuf, byte[] outbuf)
{
int i; /* temporary variables */
uint j;
uint l; /* last length */
uint m; /* mask for bit lengths table */
uint n; /* number of lengths to get */
HuffmanNode* tl; /* literal/length code table */
HuffmanNode* td; /* distance code table */
int bl; /* lookup bits for tl */
int bd; /* lookup bits for td */
uint nb; /* number of bit length codes */
uint nl; /* number of literal/length codes */
uint nd; /* number of distance codes */
uint b; /* bit buffer */
uint k; /* number of bits in bit buffer */
/* make local bit buffer */
b = state.bb;
k = state.bk;
fixed (uint* ll = state.ll)
{
/* read in table lengths */
ZIPNEEDBITS(5, state, ref b, ref k);
nl = 257 + (b & 0x1f); /* number of literal/length codes */
ZIPDUMPBITS(5, ref b, ref k);
ZIPNEEDBITS(5, state, ref b, ref k);
nd = 1 + (b & 0x1f); /* number of distance codes */
ZIPDUMPBITS(5, ref b, ref k);
ZIPNEEDBITS(4, state, ref b, ref k);
nb = 4 + (b & 0xf); /* number of bit length codes */
ZIPDUMPBITS(4, ref b, ref k);
if (nl > 288 || nd > 32)
return 1; /* bad lengths */
/* read in bit-length-code lengths */
for (j = 0; j < nb; j++)
{
ZIPNEEDBITS(3, state, ref b, ref k);
state.ll[BitLengthOrder[j]] = b & 7;
ZIPDUMPBITS(3, ref b, ref k);
}
for (; j < 19; j++)
state.ll[BitLengthOrder[j]] = 0;
/* build decoding table for trees--single level, 7 bit lookup */
bl = 7;
if ((i = BuildHuffmanTree(ll, 19, 19, null, null, &tl, &bl, state)) != 0)
return i; /* incomplete code set */
/* read in literal and distance code lengths */
n = nl + nd;
m = BitMasks[bl];
i = (int)(l = 0);
while ((uint)i < n)
{
ZIPNEEDBITS(bl, state, ref b, ref k);
j = (td = tl + (b & m))->b;
ZIPDUMPBITS((int)j, ref b, ref k);
j = td->n;
if (j < 16) /* length of code in bits (0..15) */
{
state.ll[i++] = l = j; /* save last length in l */
}
else if (j == 16) /* repeat last length 3 to 6 times */
{
ZIPNEEDBITS(2, state, ref b, ref k);
j = 3 + (b & 3);
ZIPDUMPBITS(2, ref b, ref k);
if ((uint)i + j > n)
return 1;
while (j-- > 0)
{
state.ll[i++] = l;
}
}
else if (j == 17) /* 3 to 10 zero length codes */
{
ZIPNEEDBITS(3, state, ref b, ref k);
j = 3 + (b & 7);
ZIPDUMPBITS(3, ref b, ref k);
if ((uint)i + j > n)
return 1;
while (j-- > 0)
state.ll[i++] = 0;
l = 0;
}
else /* j == 18: 11 to 138 zero length codes */
{
ZIPNEEDBITS(7, state, ref b, ref k);
j = 11 + (b & 0x7f);
ZIPDUMPBITS(7, ref b, ref k);
if ((uint)i + j > n)
return 1;
while (j-- > 0)
state.ll[i++] = 0;
l = 0;
}
}
/* restore the global bit buffer */
state.bb = b;
state.bk = k;
fixed (ushort* Zipcplens = CopyLengths)
fixed (ushort* Zipcplext = LiteralExtraBits)
fixed (ushort* Zipcpdist = CopyOffsets)
fixed (ushort* Zipcpdext = DistanceExtraBits)
{
/* build the decoding tables for literal/length and distance codes */
bl = ZIPLBITS;
if ((i = BuildHuffmanTree(ll, nl, 257, Zipcplens, Zipcplext, &tl, &bl, state)) != 0)
{
return i; /* incomplete code set */
}
bd = ZIPDBITS;
BuildHuffmanTree(ll + nl, nd, 0, Zipcpdist, Zipcpdext, &td, &bd, state);
/* decompress until an end-of-block code */
if (InflateCodes(tl, td, bl, bd, state, inbuf, outbuf) != 0)
return 1;
return 0;
}
}
}
/// <summary>
/// Build a Huffman tree from a set of lengths
/// </summary>
private static int BuildHuffmanTree(uint* b, uint n, uint s, ushort* d, ushort* e, HuffmanNode** t, int* m, State state)
{
uint a; /* counter for codes of length k */
uint el; /* length of EOB code (value 256) */
uint f; /* i repeats in table every f entries */
int g; /* maximum code length */
int h; /* table level */
uint i; /* counter, current code */
uint j; /* counter */
int k; /* number of bits in current code */
int* l; /* stack of bits per table */
uint* p; /* pointer into state.c[],state.b[],state.v[] */
HuffmanNode* q; /* points to current table */
HuffmanNode r = new HuffmanNode(); /* table entry for structure assignment */
int w; /* bits before this table == (l * h) */
uint* xp; /* pointer into x */
int y; /* number of dummy codes added */
uint z; /* number of entries in current table */
fixed (int* state_lx_ptr = state.lx)
{
l = state_lx_ptr + 1;
/* Generate counts for each bit length */
el = n > 256 ? b[256] : ZIPBMAX; /* set length of EOB code, if any */
for (i = 0; i < ZIPBMAX + 1; ++i)
state.c[i] = 0;
p = b; i = n;
do
{
state.c[*p]++; p++; /* assume all entries <= ZIPBMAX */
} while (--i > 0);
if (state.c[0] == n) /* null input--all zero length codes */
{
*t = null;
*m = 0;
return 0;
}
/* Find minimum and maximum length, bound *m by those */
for (j = 1; j <= ZIPBMAX; j++)
{
if (state.c[j] > 0)
break;
}
k = (int)j; /* minimum code length */
if ((uint)*m < j)
*m = (int)j;
for (i = ZIPBMAX; i > 0; i--)
{
if (state.c[i] > 0)
break;
}
g = (int)i; /* maximum code length */
if ((uint)*m > i)
*m = (int)i;
/* Adjust last length count to fill out codes, if needed */
for (y = 1 << (int)j; j < i; j++, y <<= 1)
{
if ((y -= (int)state.c[j]) < 0)
return 2; /* bad input: more codes than bits */
}
if ((y -= (int)state.c[i]) < 0)
return 2;
state.c[i] += (uint)y;
/* Generate starting offsets LONGo the value table for each length */
state.x[1] = j = 0;
fixed (uint* state_c_ptr = state.c)
fixed (uint* state_x_ptr = state.x)
{
p = state_c_ptr + 1;
xp = state_x_ptr + 2;
while (--i > 0)
{
/* note that i == g from above */
*xp++ = (j += *p++);
}
}
/* Make a table of values in order of bit lengths */
p = b; i = 0;
do
{
if ((j = *p++) != 0)
state.v[state.x[j]++] = i;
} while (++i < n);
/* Generate the Huffman codes and for each, make the table entries */
state.x[0] = i = 0; /* first Huffman code is zero */
fixed (uint* state_v_ptr = state.v)
{
p = state_v_ptr; /* grab values in bit order */
h = -1; /* no tables yet--level -1 */
w = l[-1] = 0; /* no bits decoded yet */
state.u[0] = default; /* just to keep compilers happy */
q = null; /* ditto */
z = 0; /* ditto */
/* go through the bit lengths (k already is bits in shortest code) */
for (; k <= g; k++)
{
a = state.c[k];
while (a-- > 0)
{
/* here i is the Huffman code of length k bits for value *p */
/* make tables up to required level */
while (k > w + l[h])
{
w += l[h++]; /* add bits already decoded */
/* compute minimum size table less than or equal to *m bits */
if ((z = (uint)(g - w)) > (uint)*m) /* upper limit */
z = (uint)*m;
if ((f = (uint)(1 << (int)(j = (uint)(k - w)))) > a + 1) /* try a k-w bit table */
{ /* too few codes for k-w bit table */
f -= a + 1; /* deduct codes from patterns left */
fixed (uint* state_c_ptr = state.c)
{
xp = state_c_ptr + k;
while (++j < z) /* try smaller tables up to z bits */
{
if ((f <<= 1) <= *++xp)
break; /* enough codes to use up j bits */
f -= *xp; /* else deduct codes from patterns */
}
}
}
if ((uint)w + j > el && (uint)w < el)
j = (uint)(el - w); /* make EOB code end at table */
z = (uint)(1 << (int)j); /* table entries for j-bit table */
l[h] = (int)j; /* set table size in stack */
/* allocate and link in new table */
q = (HuffmanNode*)Marshal.AllocHGlobal((int)((z + 1) * sizeof(HuffmanNode)));
*t = q + 1; /* link to list for HuffmanNode_free() */
*(t = &(*q).t) = null;
state.u[h] = ++q; /* table starts after link */
/* connect to last table, if there is one */
if (h > 0)
{
state.x[h] = i; /* save pattern for backing up */
r.b = (byte)l[h - 1]; /* bits to dump before this table */
r.e = (byte)(16 + j); /* bits in this table */
r.t = q; /* pointer to this table */
j = (uint)((i & ((1 << w) - 1)) >> (w - l[h - 1]));
state.u[h - 1][j] = r; /* connect to last table */
}
}
/* set up table entry in r */
r.b = (byte)(k - w);
fixed (uint* state_v_ptr_comp = state.v)
{
if (p >= state_v_ptr_comp + n)
{
r.e = 99; /* out of values--invalid code */
}
else if (*p < s)
{
r.e = (byte)(*p < 256 ? 16 : 15); /* 256 is end-of-block code */
r.n = (ushort)*p++; /* simple code is just the value */
}
else
{
r.e = (byte)e[*p - s]; /* non-simple--look up in lists */
r.n = d[*p++ - s];
}
}
/* fill code-like entries with r */
f = (uint)(1 << (k - w));
for (j = i >> w; j < z; j += f)
{
q[j] = r;
}
/* backwards increment the k-bit code i */
for (j = (uint)(1 << (k - 1)); (i & j) != 0; j >>= 1)
{
i ^= j;
}
i ^= j;
/* backup over finished tables */
while ((i & ((1 << w) - 1)) != state.x[h])
w -= l[--h]; /* don't need to update q */
}
}
}
/* return actual size of base table */
*m = l[0];
}
/* Return true (1) if we were given an incomplete table */
return y != 0 && g != 1 ? 1 : 0;
}
/// <summary>
/// Inflate codes into Huffman trees
/// </summary>
private static int InflateCodes(HuffmanNode* tl, HuffmanNode* td, int bl, int bd, State state, byte[] inbuf, byte[] outbuf)
{
uint e; /* table entry flag/number of extra bits */
uint n, d; /* length and index for copy */
uint w; /* current window position */
HuffmanNode* t; /* pointer to table entry */
uint ml, md; /* masks for bl and bd bits */
uint b; /* bit buffer */
uint k; /* number of bits in bit buffer */
/* make local copies of globals */
b = state.bb; /* initialize bit buffer */
k = state.bk;
w = state.window_posn; /* initialize window position */
/* inflate the coded data */
ml = BitMasks[bl]; /* precompute masks for speed */
md = BitMasks[bd];
for (; ; )
{
ZIPNEEDBITS(bl, state, ref b, ref k);
if ((e = (t = tl + (b & ml))->e) > 16)
{
do
{
if (e == 99)
return 1;
ZIPDUMPBITS(t->b, ref b, ref k);
e -= 16;
ZIPNEEDBITS((int)e, state, ref b, ref k);
} while ((e = (*(t = t->t + (b & BitMasks[e]))).e) > 16);
}
ZIPDUMPBITS(t->b, ref b, ref k);
if (e == 16) /* then it's a literal */
{
outbuf[w++] = (byte)t->n;
}
else /* it's an EOB or a length */
{
/* exit if end of block */
if (e == 15)
break;
/* get length of block to copy */
ZIPNEEDBITS((int)e, state, ref b, ref k);
n = t->n + (b & BitMasks[e]);
ZIPDUMPBITS((int)e, ref b, ref k);
/* decode distance of block to copy */
ZIPNEEDBITS(bd, state, ref b, ref k);
if ((e = (*(t = td + (b & md))).e) > 16)
do
{
if (e == 99)
return 1;
ZIPDUMPBITS(t->b, ref b, ref k);
e -= 16;
ZIPNEEDBITS((int)e, state, ref b, ref k);
} while ((e = (*(t = t->t + (b & BitMasks[e]))).e) > 16);
ZIPDUMPBITS(t->b, ref b, ref k);
ZIPNEEDBITS((int)e, state, ref b, ref k);
d = w - t->n - (b & BitMasks[e]);
ZIPDUMPBITS((int)e, ref b, ref k);
do
{
d &= ZIPWSIZE - 1;
e = ZIPWSIZE - Math.Max(d, w);
e = Math.Min(e, n);
n -= e;
do
{
outbuf[w++] = outbuf[d++];
} while (--e > 0);
} while (n > 0);
}
}
/* restore the globals from the locals */
state.window_posn = w; /* restore global window pointer */
state.bb = b; /* restore global bit buffer */
state.bk = k;
/* done */
return 0;
}
#region Macros
private static void ZIPNEEDBITS(int n, State state, ref uint bitBuffer, ref uint bitCount)
{
while (bitCount < n)
{
int c = *state.inpos++;
bitBuffer |= (uint)(c << (int)bitCount);
bitCount += 8;
}
}
private static void ZIPDUMPBITS(int n, ref uint bitBuffer, ref uint bitCount)
{
bitBuffer >>= n;
bitCount -= (uint)n;
}
#endregion
}
}

View File

@@ -0,0 +1,29 @@
namespace BurnOutSharp.Compression.MSZIP
{
public unsafe struct HuffmanNode
{
/// <summary>
/// Number of extra bits or operation
/// </summary>
public byte e;
/// <summary>
/// Number of bits in this code or subcode
/// </summary>
public byte b;
#region v
/// <summary>
/// Literal, length base, or distance base
/// </summary>
public ushort n;
/// <summary>
/// Pointer to next level of table
/// </summary>
public HuffmanNode* t;
#endregion
}
}

View File

@@ -0,0 +1,56 @@
using static BurnOutSharp.Models.Compression.MSZIP.Constants;
namespace BurnOutSharp.Compression.MSZIP
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public unsafe class State
{
/// <summary>
/// Current offset within the window
/// </summary>
public uint window_posn;
/// <summary>
/// Bit buffer
/// </summary>
public uint bb;
/// <summary>
/// Bits in bit buffer
/// </summary>
public uint bk;
/// <summary>
/// Literal/length and distance code lengths
/// </summary>
public uint[] ll = new uint[288 + 32];
/// <summary>
/// Bit length count table
/// </summary>
public uint[] c = new uint[ZIPBMAX + 1];
/// <summary>
/// Memory for l[-1..ZIPBMAX-1]
/// </summary>
public int[] lx = new int[ZIPBMAX + 1];
/// <summary>
/// Table stack
/// </summary>
public HuffmanNode*[] u = new HuffmanNode*[ZIPBMAX];
/// <summary>
/// Values in order of bit length
/// </summary>
public uint[] v = new uint[ZIPN_MAX];
/// <summary>
/// Bit offsets, then code stack
/// </summary>
public uint[] x = new uint[ZIPBMAX + 1];
/// <remarks>byte*</remarks>
public byte* inpos;
}
}

View File

@@ -0,0 +1,499 @@
using System;
using System.Linq;
using BurnOutSharp.Models.Compression.Quantum;
using BurnOutSharp.Models.MicrosoftCabinet;
namespace BurnOutSharp.Compression.Quantum
{
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/fdi.c"/>
/// <see href="https://github.com/wine-mirror/wine/blob/master/include/fdi.h"/>
/// <see href="http://www.russotto.net/quantumcomp.html"/>
public static class Decompressor
{
/// <summary>
/// Decompress a byte array using a given State
/// </summary>
public static int Decompress(State state, int inlen, byte[] inbuf, int outlen, byte[] outbuf)
{
int inpos = 0, outpos = 0; // inbuf[0], outbuf[0]
int window = 0; // state.Window[0]
int runsrc, rundest;
uint windowPosition = state.WindowPosition;
uint windowSize = state.WindowSize;
int extra, togo = outlen, matchLength = 0, copyLength;
byte selector, sym;
uint matchOffset = 0;
// Make local copies of state variables
uint bitBuffer = state.BitBuffer;
int bitsLeft = state.BitsLeft;
ushort H = 0xFFFF, L = 0;
// Read initial value of C
ushort C = (ushort)Q_READ_BITS(16, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
// Apply 2^x-1 mask
windowPosition &= windowSize - 1;
while (togo > 0)
{
selector = (byte)GET_SYMBOL(state.SelectorModel, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
switch (selector)
{
// Selector 0 = literal model, 64 entries, 0x00-0x3F
case 0:
sym = (byte)GET_SYMBOL(state.Model0, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 1 = literal model, 64 entries, 0x40-0x7F
case 1:
sym = (byte)GET_SYMBOL(state.Model1, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 2 = literal model, 64 entries, 0x80-0xBF
case 2:
sym = (byte)GET_SYMBOL(state.Model2, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 3 = literal model, 64 entries, 0xC0-0xFF
case 3:
sym = (byte)GET_SYMBOL(state.Model3, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
state.Window[window + windowPosition++] = sym;
togo--;
break;
// Selector 4 = fixed length of 3
case 4:
sym = (byte)GET_SYMBOL(state.Model4, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.ExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchOffset = (uint)(state.PositionSlotBases[sym] + extra + 1);
matchLength = 3;
break;
// Selector 5 = fixed length of 4
case 5:
sym = (byte)GET_SYMBOL(state.Model5, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.ExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchOffset = (uint)(state.PositionSlotBases[sym] + extra + 1);
matchLength = 4;
break;
// Selector 6 = variable length
case 6:
sym = (byte)GET_SYMBOL(state.Model6Length, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.LengthExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchLength = state.LengthBases[sym] + extra + 5;
sym = (byte)GET_SYMBOL(state.Model6Position, ref H, ref L, ref C, inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
extra = (int)Q_READ_BITS(state.ExtraBits[sym], inbuf, ref inpos, ref bitsLeft, ref bitBuffer);
matchOffset = (uint)(state.PositionSlotBases[sym] + extra + 1);
break;
default:
return inpos;
}
// If this is a match
if (selector >= 4)
{
rundest = (int)(window + windowPosition);
togo -= matchLength;
// Copy any wrapped around source data
if (windowPosition >= matchOffset)
{
// No wrap
runsrc = (int)(rundest - matchOffset);
}
else
{
runsrc = (int)(rundest + (windowSize - matchOffset));
copyLength = (int)(matchOffset - windowPosition);
if (copyLength < matchLength)
{
matchLength -= copyLength;
windowPosition += (uint)copyLength;
while (copyLength-- > 0)
{
state.Window[rundest++] = state.Window[rundest++];
}
runsrc = window;
}
}
windowPosition += (uint)matchLength;
// Copy match data - no worries about destination wraps
while (matchLength-- > 0)
{
state.Window[rundest++] = state.Window[runsrc++];
// Handle wraparounds that aren't supposed to happen
if (rundest >= state.Window.Length)
rundest = 0;
if (runsrc >= state.Window.Length)
runsrc = 0;
}
}
// If we hit the end of the window, copy to the output and wrap
if (windowPosition >= state.Window.Length)
{
Array.Copy(state.Window, 0, outbuf, outpos, Math.Min(windowSize, outlen));
outpos += (int)Math.Min(windowSize, outlen);
outlen -= (int)Math.Min(windowSize, outlen);
windowPosition = 0;
}
}
if (togo > 0)
return inpos;
if (outlen > 0)
{
int sourceIndex = (int)((windowPosition == 0 ? windowSize : windowPosition) - outlen);
Array.Copy(state.Window, sourceIndex, outbuf, outpos, outlen);
}
// Cache the decompression state variables
state.BitBuffer = bitBuffer;
state.BitsLeft = bitsLeft;
state.WindowPosition = windowPosition;
return inpos;
}
/// <summary>
/// Initialize a Quantum decompressor state
/// </summary>
public static bool InitState(State state, CFFOLDER folder)
{
int window = ((ushort)folder.CompressionType >> 8) & 0x1f;
int level = ((ushort)folder.CompressionType >> 4) & 0xF;
return InitState(state, window, level);
}
/// <summary>
/// Initialize a Quantum decompressor state
/// </summary>
public static bool InitState(State state, int window, int level)
{
uint windowSize = (uint)(1 << window);
int maxSize = window * 2;
// QTM supports window sizes of 2^10 (1Kb) through 2^21 (2Mb)
// If a previously allocated window is big enough, keep it
if (window < 10 || window > 21)
return false;
// If we don't have the proper window size
if (state.ActualSize < windowSize)
state.Window = null;
// If we have no window
if (state.Window == null)
{
state.Window = new byte[windowSize];
state.ActualSize = windowSize;
}
// Set the window size and position
state.WindowSize = windowSize;
state.WindowPosition = 0;
// Initialize arithmetic coding models
state.SelectorModel = CreateModel(state.SelectorModelSymbols, 7, 0);
state.Model0 = CreateModel(state.Model0Symbols, 0x40, 0x00);
state.Model1 = CreateModel(state.Model1Symbols, 0x40, 0x40);
state.Model2 = CreateModel(state.Model2Symbols, 0x40, 0x80);
state.Model3 = CreateModel(state.Model3Symbols, 0x40, 0xC0);
// Model 4 depends on table size, ranges from 20 to 24
state.Model4 = CreateModel(state.Model4Symbols, (maxSize < 24) ? maxSize : 24, 0);
// Model 5 depends on table size, ranges from 20 to 36
state.Model5 = CreateModel(symbols: state.Model5Symbols, (maxSize < 36) ? maxSize : 36, 0);
// Model 6 Position depends on table size, ranges from 20 to 42
state.Model6Position = CreateModel(state.Model6PositionSymbols, (maxSize < 42) ? maxSize : 42, 0);
state.Model6Length = CreateModel(state.Model6LengthSymbols, 27, 0);
return true;
}
/// <summary>
/// Initialize a Quantum model that decodes symbols from s to (s + n - 1)
/// </summary>
private static Model CreateModel(ModelSymbol[] symbols, int entryCount, int initialSymbol)
{
// Set the basic values
Model model = new Model
{
TimeToReorder = 4,
Entries = entryCount,
Symbols = symbols,
};
// Clear out the look-up table
model.LookupTable = Enumerable.Repeat<ushort>(0xFFFF, model.LookupTable.Length).ToArray();
// Loop through and build the look-up table
for (ushort i = 0; i < entryCount; i++)
{
// Set up a look-up entry for symbol
model.LookupTable[i + initialSymbol] = i;
// Create the symbol in the table
model.Symbols[i] = new ModelSymbol
{
Symbol = (ushort)(i + initialSymbol),
CumulativeFrequency = (ushort)(entryCount - i),
};
}
// Set the last symbol frequency to 0
model.Symbols[entryCount] = new ModelSymbol { CumulativeFrequency = 0 };
return model;
}
/// <summary>
/// Update the Quantum model for a particular symbol
/// </summary>
private static void UpdateModel(Model model, int symbol)
{
// Update the cumulative frequency for all symbols less than the provided
for (int i = 0; i < symbol; i++)
{
model.Symbols[i].CumulativeFrequency += 8;
}
// If the first symbol still has a cumulative frequency under 3800
if (model.Symbols[0].CumulativeFrequency <= 3800)
return;
// If we have more than 1 shift left in the model
if (--model.TimeToReorder != 0)
{
// Loop through the entries from highest to lowest,
// performing the shift on the cumulative frequencies
for (int i = model.Entries - 1; i >= 0; i--)
{
// -1, not -2; the 0 entry saves this
model.Symbols[i].CumulativeFrequency >>= 1;
if (model.Symbols[i].CumulativeFrequency <= model.Symbols[i + 1].CumulativeFrequency)
model.Symbols[i].CumulativeFrequency = (ushort)(model.Symbols[i + 1].CumulativeFrequency + 1);
}
}
// If we have no shifts left in the model
else
{
// Reset the shifts left value to 50
model.TimeToReorder = 50;
// Loop through the entries setting the cumulative frequencies
for (int i = 0; i < model.Entries; i++)
{
// No -1, want to include the 0 entry
// This converts cumfreqs into frequencies, then shifts right
model.Symbols[i].CumulativeFrequency -= model.Symbols[i + 1].CumulativeFrequency;
model.Symbols[i].CumulativeFrequency++; // Avoid losing things entirely
model.Symbols[i].CumulativeFrequency >>= 1;
}
// Now sort by frequencies, decreasing order -- this must be an
// inplace selection sort, or a sort with the same (in)stability
// characteristics
for (int i = 0; i < model.Entries - 1; i++)
{
for (int j = i + 1; j < model.Entries; j++)
{
if (model.Symbols[i].CumulativeFrequency < model.Symbols[j].CumulativeFrequency)
{
var temp = model.Symbols[i];
model.Symbols[i] = model.Symbols[j];
model.Symbols[j] = temp;
}
}
}
// Then convert frequencies back to cumfreq
for (int i = model.Entries - 1; i >= 0; i--)
{
model.Symbols[i].CumulativeFrequency += model.Symbols[i + 1].CumulativeFrequency;
}
// Then update the other part of the table
for (ushort i = 0; i < model.Entries; i++)
{
model.LookupTable[model.Symbols[i].Symbol] = i;
}
}
}
// Bitstream reading macros (Quantum / normal byte order)
#region Macros
/*
* These bit access routines work by using the area beyond the MSB and the
* LSB as a free source of zeroes. This avoids having to mask any bits.
* So we have to know the bit width of the bitbuffer variable. This is
* defined as Uint_BITS.
*
* Uint_BITS should be at least 16 bits. Unlike LZX's Huffman decoding,
* Quantum's arithmetic decoding only needs 1 bit at a time, it doesn't
* need an assured number. Retrieving larger bitstrings can be done with
* multiple reads and fills of the bitbuffer. The code should work fine
* for machines where Uint >= 32 bits.
*
* Also note that Quantum reads bytes in normal order; LZX is in
* little-endian order.
*/
/// <summary>
/// Should be used first to set up the system
/// </summary>
private static void Q_INIT_BITSTREAM(out int bitsleft, out uint bitbuf)
{
bitsleft = 0;
bitbuf = 0;
}
/// <summary>
/// Adds more data to the bit buffer, if there is room for another 16 bits.
/// </summary>
private static void Q_FILL_BUFFER(byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
if (bitsleft > 8)
return;
byte b0 = inpos + 0 < inbuf.Length ? inbuf[inpos + 0] : (byte)0;
byte b1 = inpos + 1 < inbuf.Length ? inbuf[inpos + 1] : (byte)0;
bitbuf |= (uint)(((b0 << 8) | b1) << (16 - bitsleft));
bitsleft += 16;
inpos += 2;
}
/// <summary>
/// Extracts (without removing) N bits from the bit buffer
/// </summary>
private static uint Q_PEEK_BITS(int n, uint bitbuf)
{
return bitbuf >> (32 - n);
}
/// <summary>
/// Removes N bits from the bit buffer
/// </summary>
private static void Q_REMOVE_BITS(int n, ref int bitsleft, ref uint bitbuf)
{
bitbuf <<= n;
bitsleft -= n;
}
/// <summary>
/// Takes N bits from the buffer and puts them in v. Unlike LZX, this can loop
/// several times to get the requisite number of bits.
/// </summary>
private static uint Q_READ_BITS(int n, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
uint v = 0; int bitrun;
for (int bitsneed = n; bitsneed != 0; bitsneed -= bitrun)
{
Q_FILL_BUFFER(inbuf, ref inpos, ref bitsleft, ref bitbuf);
bitrun = (bitsneed > bitsleft) ? bitsleft : bitsneed;
v = (v << bitrun) | Q_PEEK_BITS(bitrun, bitbuf);
Q_REMOVE_BITS(bitrun, ref bitsleft, ref bitbuf);
}
return v;
}
/// <summary>
/// Fetches the next symbol from the stated model and puts it in symbol.
/// It may need to read the bitstream to do this.
/// </summary>
private static ushort GET_SYMBOL(Model model, ref ushort H, ref ushort L, ref ushort C, byte[] inbuf, ref int inpos, ref int bitsleft, ref uint bitbuf)
{
ushort symf = GetFrequency(model.Symbols[0].CumulativeFrequency, H, L, C);
int i;
for (i = 1; i < model.Entries; i++)
{
if (model.Symbols[i].CumulativeFrequency <= symf)
break;
}
ushort symbol = model.Symbols[i - 1].Symbol;
GetCode(model.Symbols[i - 1].CumulativeFrequency,
model.Symbols[i].CumulativeFrequency,
model.Symbols[0].CumulativeFrequency,
ref H, ref L, ref C,
inbuf, ref inpos, ref bitsleft, ref bitbuf);
UpdateModel(model, i);
return symbol;
}
/// <summary>
/// Get the frequency for a given range and total frequency
/// </summary>
private static ushort GetFrequency(ushort totalFrequency, ushort H, ushort L, ushort C)
{
uint range = (uint)(((H - L) & 0xFFFF) + 1);
uint freq = (uint)(((C - L + 1) * totalFrequency - 1) / range);
return (ushort)(freq & 0xFFFF);
}
/// <summary>
/// The decoder renormalization loop
/// </summary>
private static void GetCode(int previousFrequency,
int cumulativeFrequency,
int totalFrequency,
ref ushort H,
ref ushort L,
ref ushort C,
byte[] inbuf,
ref int inpos,
ref int bitsleft,
ref uint bitbuf)
{
uint range = (uint)((H - L) + 1);
H = (ushort)(L + ((previousFrequency * range) / totalFrequency) - 1);
L = (ushort)(L + (cumulativeFrequency * range) / totalFrequency);
while (true)
{
if ((L & 0x8000) != (H & 0x8000))
{
if ((L & 0x4000) == 0 || (H & 0x4000) != 0)
break;
// Underflow case
C ^= 0x4000;
L &= 0x3FFF;
H |= 0x4000;
}
L <<= 1;
H = (ushort)((H << 1) | 1);
C = (ushort)((C << 1) | Q_READ_BITS(1, inbuf, ref inpos, ref bitsleft, ref bitbuf));
}
}
#endregion
}
}

View File

@@ -0,0 +1,193 @@
using BurnOutSharp.Models.Compression.Quantum;
namespace BurnOutSharp.Compression.Quantum
{
/// <see href="https://github.com/kyz/libmspack/blob/master/libmspack/mspack/qtmd.c"/>
/// <see href="https://github.com/wine-mirror/wine/blob/master/dlls/cabinet/cabinet.h"/>
public class State
{
/// <summary>
/// The actual decoding window
/// </summary>
public byte[] Window;
/// <summary>
/// Window size (1Kb through 2Mb)
/// </summary>
public uint WindowSize;
/// <summary>
/// Window size when it was first allocated
/// </summary>
public uint ActualSize;
/// <summary>
/// Current offset within the window
/// </summary>
public uint WindowPosition;
#region Models
/// <summary>
/// Symbol table for selector model
/// </summary>
public ModelSymbol[] SelectorModelSymbols = new ModelSymbol[7 + 1];
/// <summary>
/// Model for selector values
/// </summary>
public Model SelectorModel;
/// <summary>
/// Model for Selector 0
/// </summary>
public Model Model0;
/// <summary>
/// Model for Selector 1
/// </summary>
public Model Model1;
/// <summary>
/// Model for Selector 2
/// </summary>
public Model Model2;
/// <summary>
/// Model for Selector 3
/// </summary>
public Model Model3;
/// <summary>
/// Model for Selector 4
/// </summary>
public Model Model4;
/// <summary>
/// Model for Selector 5
/// </summary>
public Model Model5;
/// <summary>
/// Model for Selector 6 Position
/// </summary>
public Model Model6Position;
/// <summary>
/// Model for Selector 6 Length
/// </summary>
public Model Model6Length;
#endregion
#region Symbol Tables
/// <summary>
/// Symbol table for Selector 0
/// </summary>
public ModelSymbol[] Model0Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 1
/// </summary>
public ModelSymbol[] Model1Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 2
/// </summary>
public ModelSymbol[] Model2Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 3
/// </summary>
public ModelSymbol[] Model3Symbols = new ModelSymbol[0x40 + 1];
/// <summary>
/// Symbol table for Selector 4
/// </summary>
public ModelSymbol[] Model4Symbols = new ModelSymbol[0x18 + 1];
/// <summary>
/// Symbol table for Selector 5
/// </summary>
public ModelSymbol[] Model5Symbols = new ModelSymbol[0x24 + 1];
/// <summary>
/// Symbol table for Selector 6 Position
/// </summary>
public ModelSymbol[] Model6PositionSymbols = new ModelSymbol[0x2a + 1];
/// <summary>
/// Symbol table for Selector 6 Length
/// </summary>
public ModelSymbol[] Model6LengthSymbols = new ModelSymbol[0x1b + 1];
#endregion
#region Decompression Tables
/// <summary>
/// An index to the position slot bases
/// </summary>
public uint[] PositionSlotBases = new uint[42]
{
0x00000, 0x00001, 0x00002, 0x00003, 0x00004, 0x00006, 0x00008, 0x0000c,
0x00010, 0x00018, 0x00020, 0x00030, 0x00040, 0x00060, 0x00080, 0x000c0,
0x00100, 0x00180, 0x00200, 0x00300, 0x00400, 0x00600, 0x00800, 0x00c00,
0x01000, 0x01800, 0x02000, 0x03000, 0x04000, 0x06000, 0x08000, 0x0c000,
0x10000, 0x18000, 0x20000, 0x30000, 0x40000, 0x60000, 0x80000, 0xc0000,
0x100000, 0x180000
};
/// <summary>
/// How many bits of offset-from-base data is needed
/// </summary>
public byte[] ExtraBits = new byte[42]
{
0, 0, 0, 0, 1, 1, 2, 2,
3, 3, 4, 4, 5, 5, 6, 6,
7, 7, 8, 8, 9, 9, 10, 10,
11, 11, 12, 12, 13, 13, 14, 14,
15, 15, 16, 16, 17, 17, 18, 18,
19, 19
};
/// <summary>
/// An index to the position slot bases [Selector 6]
/// </summary>
public byte[] LengthBases = new byte[27]
{
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x08,
0x0a, 0x0c, 0x0e, 0x12, 0x16, 0x1a, 0x1e, 0x26,
0x2e, 0x36, 0x3e, 0x4e, 0x5e, 0x6e, 0x7e, 0x9e,
0xbe, 0xde, 0xfe
};
/// <summary>
/// How many bits of offset-from-base data is needed [Selector 6]
/// </summary>
public byte[] LengthExtraBits = new byte[27]
{
0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 2, 2, 2, 2, 3, 3,
3, 3, 4, 4, 4, 4, 5, 5,
5, 5, 0
};
#endregion
#region Decompression State
/// <summary>
/// Bit buffer to persist between runs
/// </summary>
public uint BitBuffer = 0;
/// <summary>
/// Bits remaining to persist between runs
/// </summary>
public int BitsLeft = 0;
#endregion
}
}

View File

@@ -0,0 +1,304 @@
namespace BurnOutSharp.Compression.bzip2
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib.h"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib_private.h"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/blocksort.c"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/crctable.c"/>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/randtable.c"/>
public static class Constants
{
#region bzlib.h
public const int BZ_RUN = 0;
public const int BZ_FLUSH = 1;
public const int BZ_FINISH = 2;
public const int BZ_OK = 0;
public const int BZ_RUN_OK = 1;
public const int BZ_FLUSH_OK = 2;
public const int BZ_FINISH_OK = 3;
public const int BZ_STREAM_END = 4;
public const int BZ_SEQUENCE_ERROR = (-1);
public const int BZ_PARAM_ERROR = (-2);
public const int BZ_MEM_ERROR = (-3);
public const int BZ_DATA_ERROR = (-4);
public const int BZ_DATA_ERROR_MAGIC = (-5);
public const int BZ_IO_ERROR = (-6);
public const int BZ_UNEXPECTED_EOF = (-7);
public const int BZ_OUTBUFF_FULL = (-8);
public const int BZ_CONFIG_ERROR = (-9);
public const int BZ_MAX_UNUSED = 5000;
#endregion
#region bzlib_private.h
internal const string BZ_VERSION = "1.0.5, 10-Dec-2007";
/*-- Header bytes. --*/
internal const byte BZ_HDR_B = 0x42; /* 'B' */
internal const byte BZ_HDR_Z = 0x5a; /* 'Z' */
internal const byte BZ_HDR_h = 0x68; /* 'h' */
internal const byte BZ_HDR_0 = 0x30; /* '0' */
/*-- Constants for the back end. --*/
internal const int BZ_MAX_ALPHA_SIZE = 258;
internal const int BZ_MAX_CODE_LEN = 23;
internal const int BZ_RUNA = 0;
internal const int BZ_RUNB = 1;
internal const int BZ_N_GROUPS = 6;
internal const int BZ_G_SIZE = 50;
internal const int BZ_N_ITERS = 4;
internal const int BZ_MAX_SELECTORS = (2 + (900000 / BZ_G_SIZE));
/*-- States and modes for compression. --*/
internal const int BZ_M_IDLE = 1;
internal const int BZ_M_RUNNING = 2;
internal const int BZ_M_FLUSHING = 3;
internal const int BZ_M_FINISHING = 4;
internal const int BZ_S_OUTPUT = 1;
internal const int BZ_S_INPUT = 2;
internal const int BZ_N_RADIX = 2;
internal const int BZ_N_QSORT = 12;
internal const int BZ_N_SHELL = 18;
internal const int BZ_N_OVERSHOOT = (BZ_N_RADIX + BZ_N_QSORT + BZ_N_SHELL + 2);
/*-- states for decompression. --*/
internal const int BZ_X_IDLE = 1;
internal const int BZ_X_OUTPUT = 2;
internal const int BZ_X_MAGIC_1 = 10;
internal const int BZ_X_MAGIC_2 = 11;
internal const int BZ_X_MAGIC_3 = 12;
internal const int BZ_X_MAGIC_4 = 13;
internal const int BZ_X_BLKHDR_1 = 14;
internal const int BZ_X_BLKHDR_2 = 15;
internal const int BZ_X_BLKHDR_3 = 16;
internal const int BZ_X_BLKHDR_4 = 17;
internal const int BZ_X_BLKHDR_5 = 18;
internal const int BZ_X_BLKHDR_6 = 19;
internal const int BZ_X_BCRC_1 = 20;
internal const int BZ_X_BCRC_2 = 21;
internal const int BZ_X_BCRC_3 = 22;
internal const int BZ_X_BCRC_4 = 23;
internal const int BZ_X_RANDBIT = 24;
internal const int BZ_X_ORIGPTR_1 = 25;
internal const int BZ_X_ORIGPTR_2 = 26;
internal const int BZ_X_ORIGPTR_3 = 27;
internal const int BZ_X_MAPPING_1 = 28;
internal const int BZ_X_MAPPING_2 = 29;
internal const int BZ_X_SELECTOR_1 = 30;
internal const int BZ_X_SELECTOR_2 = 31;
internal const int BZ_X_SELECTOR_3 = 32;
internal const int BZ_X_CODING_1 = 33;
internal const int BZ_X_CODING_2 = 34;
internal const int BZ_X_CODING_3 = 35;
internal const int BZ_X_MTF_1 = 36;
internal const int BZ_X_MTF_2 = 37;
internal const int BZ_X_MTF_3 = 38;
internal const int BZ_X_MTF_4 = 39;
internal const int BZ_X_MTF_5 = 40;
internal const int BZ_X_MTF_6 = 41;
internal const int BZ_X_ENDHDR_2 = 42;
internal const int BZ_X_ENDHDR_3 = 43;
internal const int BZ_X_ENDHDR_4 = 44;
internal const int BZ_X_ENDHDR_5 = 45;
internal const int BZ_X_ENDHDR_6 = 46;
internal const int BZ_X_CCRC_1 = 47;
internal const int BZ_X_CCRC_2 = 48;
internal const int BZ_X_CCRC_3 = 49;
internal const int BZ_X_CCRC_4 = 50;
/*-- Constants for the fast MTF decoder. --*/
internal const int MTFA_SIZE = 4096;
internal const int MTFL_SIZE = 16;
#endregion
#region blocksort.c
internal const int FALLBACK_QSORT_SMALL_THRESH = 10;
internal const int FALLBACK_QSORT_STACK_SIZE = 100;
/*--
Knuth's increments seem to work better
than Incerpi-Sedgewick here. Possibly
because the number of elems to sort is
usually small, typically <= 20.
--*/
internal static readonly int[] incs = new int[14]
{
1, 4, 13, 40, 121, 364, 1093, 3280,
9841, 29524, 88573, 265720, 797161, 2391484
};
/*--
The following is an implementation of
an elegant 3-way quicksort for strings,
described in a paper "Fast Algorithms for
Sorting and Searching Strings", by Robert
Sedgewick and Jon L. Bentley.
--*/
internal const int MAIN_QSORT_SMALL_THRESH = 20;
internal const int MAIN_QSORT_DEPTH_THRESH = (BZ_N_RADIX + BZ_N_QSORT);
internal const int MAIN_QSORT_STACK_SIZE = 100;
internal const uint SETMASK = 1 << 21;
internal const uint CLEARMASK = ~SETMASK;
#endregion
#region crctable.c
/// <summary>
/// Table for doing CRCs
/// </summary>
internal static readonly uint[] BZ2_crc32Table = new uint[256]
{
0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9,
0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005,
0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd,
0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9,
0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75,
0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011,
0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd,
0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039,
0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5,
0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81,
0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49,
0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95,
0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1,
0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d,
0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae,
0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072,
0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16,
0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca,
0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde,
0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02,
0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066,
0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba,
0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e,
0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692,
0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a,
0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e,
0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2,
0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686,
0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a,
0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637,
0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb,
0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f,
0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53,
0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47,
0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b,
0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff,
0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623,
0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7,
0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f,
0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3,
0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7,
0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b,
0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f,
0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3,
0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640,
0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c,
0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8,
0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24,
0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30,
0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec,
0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088,
0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654,
0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c,
0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18,
0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4,
0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0,
0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c,
0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668,
0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4
};
#endregion
#region randtable.c
/// <summary>
/// Table for randomising repetitive blocks
/// </summary>
internal static readonly int[] BZ2_rNums = new int[512]
{
619, 720, 127, 481, 931, 816, 813, 233, 566, 247,
985, 724, 205, 454, 863, 491, 741, 242, 949, 214,
733, 859, 335, 708, 621, 574, 73, 654, 730, 472,
419, 436, 278, 496, 867, 210, 399, 680, 480, 51,
878, 465, 811, 169, 869, 675, 611, 697, 867, 561,
862, 687, 507, 283, 482, 129, 807, 591, 733, 623,
150, 238, 59, 379, 684, 877, 625, 169, 643, 105,
170, 607, 520, 932, 727, 476, 693, 425, 174, 647,
73, 122, 335, 530, 442, 853, 695, 249, 445, 515,
909, 545, 703, 919, 874, 474, 882, 500, 594, 612,
641, 801, 220, 162, 819, 984, 589, 513, 495, 799,
161, 604, 958, 533, 221, 400, 386, 867, 600, 782,
382, 596, 414, 171, 516, 375, 682, 485, 911, 276,
98, 553, 163, 354, 666, 933, 424, 341, 533, 870,
227, 730, 475, 186, 263, 647, 537, 686, 600, 224,
469, 68, 770, 919, 190, 373, 294, 822, 808, 206,
184, 943, 795, 384, 383, 461, 404, 758, 839, 887,
715, 67, 618, 276, 204, 918, 873, 777, 604, 560,
951, 160, 578, 722, 79, 804, 96, 409, 713, 940,
652, 934, 970, 447, 318, 353, 859, 672, 112, 785,
645, 863, 803, 350, 139, 93, 354, 99, 820, 908,
609, 772, 154, 274, 580, 184, 79, 626, 630, 742,
653, 282, 762, 623, 680, 81, 927, 626, 789, 125,
411, 521, 938, 300, 821, 78, 343, 175, 128, 250,
170, 774, 972, 275, 999, 639, 495, 78, 352, 126,
857, 956, 358, 619, 580, 124, 737, 594, 701, 612,
669, 112, 134, 694, 363, 992, 809, 743, 168, 974,
944, 375, 748, 52, 600, 747, 642, 182, 862, 81,
344, 805, 988, 739, 511, 655, 814, 334, 249, 515,
897, 955, 664, 981, 649, 113, 974, 459, 893, 228,
433, 837, 553, 268, 926, 240, 102, 654, 459, 51,
686, 754, 806, 760, 493, 403, 415, 394, 687, 700,
946, 670, 656, 610, 738, 392, 760, 799, 887, 653,
978, 321, 576, 617, 626, 502, 894, 679, 243, 440,
680, 879, 194, 572, 640, 724, 926, 56, 204, 700,
707, 151, 457, 449, 797, 195, 791, 558, 945, 679,
297, 59, 87, 824, 713, 663, 412, 693, 342, 606,
134, 108, 571, 364, 631, 212, 174, 643, 304, 329,
343, 97, 430, 751, 497, 314, 983, 374, 822, 928,
140, 206, 73, 263, 980, 736, 876, 478, 430, 305,
170, 514, 364, 692, 829, 82, 855, 953, 676, 246,
369, 970, 294, 750, 807, 827, 150, 790, 288, 923,
804, 378, 215, 828, 592, 281, 565, 555, 710, 82,
896, 831, 547, 261, 524, 462, 293, 465, 502, 56,
661, 821, 976, 991, 658, 869, 905, 758, 745, 193,
768, 550, 608, 933, 378, 286, 215, 979, 792, 961,
61, 688, 793, 644, 986, 403, 106, 366, 905, 644,
372, 567, 466, 434, 645, 210, 389, 550, 919, 135,
780, 773, 635, 389, 707, 100, 626, 958, 165, 504,
920, 176, 193, 713, 857, 265, 203, 50, 668, 108,
645, 990, 626, 197, 510, 357, 358, 850, 858, 364,
936, 638
};
#endregion
}
}

View File

@@ -0,0 +1,100 @@
using static BurnOutSharp.Compression.bzip2.Constants;
namespace BurnOutSharp.Compression.bzip2
{
/// <summary>
/// Structure holding all the decompression-side stuff.
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib_private.h"/>
internal unsafe class DState
{
/* pointer back to the struct bz_stream */
public bz_stream strm;
/* state indicator for this stream */
public int state;
/* for doing the final run-length decoding */
public byte state_out_ch;
public int state_out_len;
public bool blockRandomised;
public int rNToGo;
public int rTPos;
/* the buffer for bit stream reading */
public uint bsBuff;
public int bsLive;
/* misc administratium */
public int blockSize100k;
public bool smallDecompress;
public int currBlockNo;
public int verbosity;
/* for undoing the Burrows-Wheeler transform */
public int origPtr;
public uint tPos;
public int k0;
public int[] unzftab = new int[256];
public int nblock_used;
public int[] cftab = new int[257];
public int[] cftabCopy = new int[257];
/* for undoing the Burrows-Wheeler transform (FAST) */
public uint* tt;
/* for undoing the Burrows-Wheeler transform (SMALL) */
public ushort* ll16;
public byte* ll4;
/* stored and calculated CRCs */
public uint storedBlockCRC;
public uint storedCombinedCRC;
public uint calculatedBlockCRC;
public uint calculatedCombinedCRC;
/* map of bytes used in block */
public int nInUse;
public bool[] inUse = new bool[256];
public bool[] inUse16 = new bool[16];
public byte[] seqToUnseq = new byte[256];
/* for decoding the MTF values */
public byte[] mtfa = new byte[MTFA_SIZE];
public int[] mtfbase = new int[256 / MTFL_SIZE];
public byte[] selector = new byte[BZ_MAX_SELECTORS];
public byte[] selectorMtf = new byte[BZ_MAX_SELECTORS];
public byte[,] len = new byte[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] limit = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] @base = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] perm = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[] minLens = new int[BZ_N_GROUPS];
/* save area for scalars in the main decompress code */
public int save_i;
public int save_j;
public int save_t;
public int save_alphaSize;
public int save_nGroups;
public int save_nSelectors;
public int save_EOB;
public int save_groupNo;
public int save_groupPos;
public int save_nextSym;
public int save_nblockMAX;
public int save_nblock;
public int save_es;
public int save_N;
public int save_curr;
public int save_zt;
public int save_zn;
public int save_zvec;
public int save_zj;
public int save_gSel;
public int save_gMinlen;
public int* save_gLimit;
public int* save_gBase;
public int* save_gPerm;
}
}

View File

@@ -0,0 +1,80 @@
using static BurnOutSharp.Compression.bzip2.Constants;
namespace BurnOutSharp.Compression.bzip2
{
/// <summary>
/// Structure holding all the compression-side stuff.
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib_private.h"/>
internal unsafe class EState
{
/* pointer back to the struct bz_stream */
public bz_stream* strm;
/* mode this stream is in, and whether inputting */
/* or outputting data */
public int mode;
public int state;
/* remembers avail_in when flush/finish requested */
public uint avail_in_expect;
/* for doing the block sorting */
public uint* arr1;
public uint* arr2;
public uint* ftab;
public int origPtr;
/* aliases for arr1 and arr2 */
public uint* ptr;
public byte* block;
public ushort* mtfv;
public byte* zbits;
/* for deciding when to use the fallback sorting algorithm */
public int workFactor;
/* run-length-encoding of the input */
public uint state_in_ch;
public int state_in_len;
public int rNToGo;
public int rTPos;
/* input and output limits and current posns */
public int nblock;
public int nblockMAX;
public int numZ;
public int state_out_pos;
/* map of bytes used in block */
public int nInUse;
public bool[] inUse = new bool[256];
public byte[] unseqToSeq = new byte[256];
/* the buffer for bit stream creation */
public uint bsBuff;
public int bsLive;
/* block and combined CRCs */
public uint blockCRC;
public uint combinedCRC;
/* misc administratium */
public int verbosity;
public int blockNo;
public int blockSize100k;
/* stuff for coding the MTF values */
public int nMTF;
public int[] mtfFreq = new int[BZ_MAX_ALPHA_SIZE];
public byte[] selector = new byte[BZ_MAX_SELECTORS];
public byte[] selectorMtf = new byte[BZ_MAX_SELECTORS];
public byte[,] len = new byte[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] code = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
public int[,] rfreq = new int[BZ_N_GROUPS, BZ_MAX_ALPHA_SIZE];
/* second dimension: only 3 needed; 4 makes index calculations faster */
public uint[,] len_pack = new uint[BZ_MAX_ALPHA_SIZE, 4];
}
}

View File

@@ -0,0 +1,217 @@
using static BurnOutSharp.Compression.bzip2.Constants;
namespace BurnOutSharp.Compression.bzip2
{
/// <summary>
/// Huffman coding low-level stuff
/// </summary>
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/huffman.c"/>
internal static unsafe class Huffman
{
public static void BZ2_hbMakeCodeLengths(byte* len, int* freq, int alphaSize, int maxLen)
{
/*--
Nodes and heap entries run from 1. Entry 0
for both the heap and nodes is a sentinel.
--*/
int nNodes, nHeap, n1, n2, i, j, k;
bool tooLong;
int[] heap = new int[BZ_MAX_ALPHA_SIZE + 2];
int[] weight = new int[BZ_MAX_ALPHA_SIZE * 2];
int[] parent = new int[BZ_MAX_ALPHA_SIZE * 2];
for (i = 0; i < alphaSize; i++)
{
weight[i + 1] = (freq[i] == 0 ? 1 : freq[i]) << 8;
}
while (true)
{
nNodes = alphaSize;
nHeap = 0;
heap[0] = 0;
weight[0] = 0;
parent[0] = -2;
for (i = 1; i <= alphaSize; i++)
{
parent[i] = -1;
nHeap++;
heap[nHeap] = i;
UPHEAP(nHeap, heap, weight);
}
//AssertH(nHeap < (BZ_MAX_ALPHA_SIZE + 2), 2001);
while (nHeap > 1)
{
n1 = heap[1]; heap[1] = heap[nHeap]; nHeap--; DOWNHEAP(1, nHeap, heap, weight);
n2 = heap[1]; heap[1] = heap[nHeap]; nHeap--; DOWNHEAP(1, nHeap, heap, weight);
nNodes++;
parent[n1] = parent[n2] = nNodes;
weight[nNodes] = ADDWEIGHTS(weight[n1], weight[n2]);
parent[nNodes] = -1;
nHeap++;
heap[nHeap] = nNodes;
UPHEAP(nHeap, heap, weight);
}
//AssertH(nNodes < (BZ_MAX_ALPHA_SIZE * 2), 2002);
tooLong = false;
for (i = 1; i <= alphaSize; i++)
{
j = 0;
k = i;
while (parent[k] >= 0) { k = parent[k]; j++; }
len[i - 1] = (byte)j;
if (j > maxLen) tooLong = true;
}
if (!tooLong) break;
/* 17 Oct 04: keep-going condition for the following loop used
to be 'i < alphaSize', which missed the last element,
theoretically leading to the possibility of the compressor
looping. However, this count-scaling step is only needed if
one of the generated Huffman code words is longer than
maxLen, which up to and including version 1.0.2 was 20 bits,
which is extremely unlikely. In version 1.0.3 maxLen was
changed to 17 bits, which has minimal effect on compression
ratio, but does mean this scaling step is used from time to
time, enough to verify that it works.
This means that bzip2-1.0.3 and later will only produce
Huffman codes with a maximum length of 17 bits. However, in
order to preserve backwards compatibility with bitstreams
produced by versions pre-1.0.3, the decompressor must still
handle lengths of up to 20. */
for (i = 1; i <= alphaSize; i++)
{
j = weight[i] >> 8;
j = 1 + (j / 2);
weight[i] = j << 8;
}
}
}
public static void BZ2_hbAssignCodes(int* code, byte* length, int minLen, int maxLen, int alphaSize)
{
int n, vec, i;
vec = 0;
for (n = minLen; n <= maxLen; n++)
{
for (i = 0; i < alphaSize; i++)
{
if (length[i] == n)
{
code[i] = vec;
vec++;
}
};
vec <<= 1;
}
}
public static void BZ2_hbCreateDecodeTables(int* limit, int* @base, int* perm, byte* length, int minLen, int maxLen, int alphaSize)
{
int pp, i, j, vec;
pp = 0;
for (i = minLen; i <= maxLen; i++)
{
for (j = 0; j < alphaSize; j++)
{
if (length[j] == i) { perm[pp] = j; pp++; }
}
};
for (i = 0; i < BZ_MAX_CODE_LEN; i++)
{
@base[i] = 0;
}
for (i = 0; i < alphaSize; i++)
{
@base[length[i] + 1]++;
}
for (i = 1; i < BZ_MAX_CODE_LEN; i++)
{
@base[i] += @base[i - 1];
}
for (i = 0; i < BZ_MAX_CODE_LEN; i++)
{
limit[i] = 0;
}
vec = 0;
for (i = minLen; i <= maxLen; i++)
{
vec += (@base[i + 1] - @base[i]);
limit[i] = vec - 1;
vec <<= 1;
}
for (i = minLen + 1; i <= maxLen; i++)
{
@base[i] = ((limit[i - 1] + 1) << 1) - @base[i];
}
}
#region Macros
private static int WEIGHTOF(int zz0) => (int)(zz0 & 0xffffff00);
private static int DEPTHOF(int zz1) => zz1 & 0x000000ff;
private static int MYMAX(int zz2, int zz3) => zz2 > zz3 ? zz2 : zz3;
private static int ADDWEIGHTS(int zw1, int zw2) => (WEIGHTOF(zw1) + WEIGHTOF(zw2)) | (1 + MYMAX(DEPTHOF(zw1), DEPTHOF(zw2)));
private static void UPHEAP(int z, int[] heap, int[] weight)
{
int zz, tmp;
zz = z; tmp = heap[zz];
while (weight[tmp] < weight[heap[zz >> 1]])
{
heap[zz] = heap[zz >> 1];
zz >>= 1;
}
heap[zz] = tmp;
}
private static void DOWNHEAP(int z, int nHeap, int[] heap, int[] weight)
{
int zz, yy, tmp;
zz = z; tmp = heap[zz];
while (true)
{
yy = zz << 1;
if (yy > nHeap)
break;
if (yy < nHeap && weight[heap[yy + 1]] < weight[heap[yy]])
yy++;
if (weight[tmp] < weight[heap[yy]])
break;
heap[zz] = heap[yy];
zz = yy;
}
heap[zz] = tmp;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,22 @@
namespace BurnOutSharp.Compression.bzip2
{
/// <see href="https://github.com/ladislav-zezula/StormLib/blob/master/src/bzip2/bzlib.h"/>
public unsafe struct bz_stream
{
public char* next_in;
public uint avail_in;
public uint total_in_lo32;
public uint total_in_hi32;
public char* next_out;
public uint avail_out;
public uint total_out_lo32;
public uint total_out_hi32;
public void* state;
// void *(*bzalloc)(void *,int,int);
// void (*bzfree)(void *,void *);
// void *opaque;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,23 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net48;net6.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
<Title>BurnOutSharp.Matching</Title>
<AssemblyName>BurnOutSharp.Matching</AssemblyName>
<Authors>Matt Nadareski</Authors>
<Product>BurnOutSharp</Product>
<Copyright>Copyright (c)2018-2022 Matt Nadareski</Copyright>
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
<Version>2.7</Version>
<AssemblyVersion>2.7</AssemblyVersion>
<FileVersion>2.7</FileVersion>
<IncludeSource>true</IncludeSource>
<IncludeSymbols>true</IncludeSymbols>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
</Project>

View File

@@ -0,0 +1,200 @@
using System.IO;
namespace BurnOutSharp.Matching
{
/// <summary>
/// Content matching criteria
/// </summary>
public class ContentMatch : IMatch<byte?[]>
{
/// <summary>
/// Content to match
/// </summary>
public byte?[] Needle { get; set; }
/// <summary>
/// Starting index for matching
/// </summary>
public int Start { get; set; }
/// <summary>
/// Ending index for matching
/// </summary>
public int End { get; set; }
/// <summary>
/// Constructor
/// </summary>
/// <param name="needle">Byte array representing the search</param>
/// <param name="start">Optional starting index</param>
/// <param name="end">Optional ending index</param>
public ContentMatch(byte?[] needle, int start = -1, int end = -1)
{
Needle = needle;
Start = start;
End = end;
}
#region Array Matching
/// <summary>
/// Get if this match can be found in a stack
/// </summary>
/// <param name="stack">Array to search for the given content</param>
/// <param name="reverse">True to search from the end of the array, false from the start</param>
/// <returns>Tuple of success and found position</returns>
public (bool success, int position) Match(byte[] stack, bool reverse = false)
{
// If either array is null or empty, we can't do anything
if (stack == null || stack.Length == 0 || Needle == null || Needle.Length == 0)
return (false, -1);
// If the needle array is larger than the stack array, it can't be contained within
if (Needle.Length > stack.Length)
return (false, -1);
// Set the default start and end values
int start = Start;
int end = End;
// If start or end are not set properly, set them to defaults
if (start < 0)
start = 0;
if (end < 0)
end = stack.Length - Needle.Length;
for (int i = reverse ? end : start; reverse ? i > start : i < end; i += reverse ? -1 : 1)
{
// If we somehow have an invalid end and we haven't matched, return
if (i > stack.Length)
return (false, -1);
// Check to see if the values are equal
if (EqualAt(stack, i))
return (true, i);
}
return (false, -1);
}
/// <summary>
/// Get if a stack at a certain index is equal to a needle
/// </summary>
/// <param name="stack">Array to search for the given content</param>
/// <param name="index">Starting index to check equality</param>
/// <returns>True if the needle matches the stack at a given index</returns>
private bool EqualAt(byte[] stack, int index)
{
// If the index is invalid, we can't do anything
if (index < 0)
return false;
// If we're too close to the end of the stack, return false
if (Needle.Length > stack.Length - index)
return false;
// Loop through and check the value
for (int i = 0; i < Needle.Length; i++)
{
// A null value is a wildcard
if (Needle[i] == null)
continue;
else if (stack[i + index] != Needle[i])
return false;
}
return true;
}
#endregion
#region Stream Matching
/// <summary>
/// Get if this match can be found in a stack
/// </summary>
/// <param name="stack">Stream to search for the given content</param>
/// <param name="reverse">True to search from the end of the array, false from the start</param>
/// <returns>Tuple of success and found position</returns>
public (bool success, int position) Match(Stream stack, bool reverse = false)
{
// If either array is null or empty, we can't do anything
if (stack == null || stack.Length == 0 || Needle == null || Needle.Length == 0)
return (false, -1);
// If the needle array is larger than the stack array, it can't be contained within
if (Needle.Length > stack.Length)
return (false, -1);
// Set the default start and end values
int start = Start;
int end = End;
// If start or end are not set properly, set them to defaults
if (start < 0)
start = 0;
if (end < 0)
end = (int)(stack.Length - Needle.Length);
for (int i = reverse ? end : start; reverse ? i > start : i < end; i += reverse ? -1 : 1)
{
// If we somehow have an invalid end and we haven't matched, return
if (i > stack.Length)
return (false, -1);
// Check to see if the values are equal
if (EqualAt(stack, i))
return (true, i);
}
return (false, -1);
}
/// <summary>
/// Get if a stack at a certain index is equal to a needle
/// </summary>
/// <param name="stack">Stream to search for the given content</param>
/// <param name="index">Starting index to check equality</param>
/// <returns>True if the needle matches the stack at a given index</returns>
private bool EqualAt(Stream stack, int index)
{
// If the index is invalid, we can't do anything
if (index < 0)
return false;
// If we're too close to the end of the stack, return false
if (Needle.Length > stack.Length - index)
return false;
// Save the current position and move to the index
long currentPosition = stack.Position;
stack.Seek(index, SeekOrigin.Begin);
// Set the return value
bool matched = true;
// Loop through and check the value
for (int i = 0; i < Needle.Length; i++)
{
byte stackValue = (byte)stack.ReadByte();
// A null value is a wildcard
if (Needle[i] == null)
{
continue;
}
else if (stackValue != Needle[i])
{
matched = false;
break;
}
}
// Reset the position and return the value
stack.Seek(currentPosition, SeekOrigin.Begin);
return matched;
}
#endregion
}
}

View File

@@ -0,0 +1,199 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace BurnOutSharp.Matching
{
/// <summary>
/// A set of content matches that work together
/// </summary>
public class ContentMatchSet : MatchSet<ContentMatch, byte?[]>
{
/// <summary>
/// Function to get a content version
/// </summary>
/// <remarks>
/// A content version method takes the file path, the file contents,
/// and a list of found positions and returns a single string. That
/// string is either a version string, in which case it will be appended
/// to the protection name, or `null`, in which case it will cause
/// the protection to be omitted.
/// </remarks>
public Func<string, byte[], List<int>, string> GetArrayVersion { get; set; }
/// <summary>
/// Function to get a content version
/// </summary>
/// <remarks>
/// A content version method takes the file path, the file contents,
/// and a list of found positions and returns a single string. That
/// string is either a version string, in which case it will be appended
/// to the protection name, or `null`, in which case it will cause
/// the protection to be omitted.
/// </remarks>
public Func<string, Stream, List<int>, string> GetStreamVersion { get; set; }
#region Generic Constructors
public ContentMatchSet(byte?[] needle, string protectionName)
: this(new List<byte?[]> { needle }, getArrayVersion: null, protectionName) { }
public ContentMatchSet(List<byte?[]> needles, string protectionName)
: this(needles, getArrayVersion: null, protectionName) { }
public ContentMatchSet(ContentMatch needle, string protectionName)
: this(new List<ContentMatch>() { needle }, getArrayVersion: null, protectionName) { }
public ContentMatchSet(List<ContentMatch> needles, string protectionName)
: this(needles, getArrayVersion: null, protectionName) { }
#endregion
#region Array Constructors
public ContentMatchSet(byte?[] needle, Func<string, byte[], List<int>, string> getArrayVersion, string protectionName)
: this(new List<byte?[]> { needle }, getArrayVersion, protectionName) { }
public ContentMatchSet(List<byte?[]> needles, Func<string, byte[], List<int>, string> getArrayVersion, string protectionName)
: this(needles.Select(n => new ContentMatch(n)).ToList(), getArrayVersion, protectionName) { }
public ContentMatchSet(ContentMatch needle, Func<string, byte[], List<int>, string> getArrayVersion, string protectionName)
: this(new List<ContentMatch>() { needle }, getArrayVersion, protectionName) { }
public ContentMatchSet(List<ContentMatch> needles, Func<string, byte[], List<int>, string> getArrayVersion, string protectionName)
{
Matchers = needles;
GetArrayVersion = getArrayVersion;
ProtectionName = protectionName;
}
#endregion
#region Stream Constructors
public ContentMatchSet(byte?[] needle, Func<string, Stream, List<int>, string> getStreamVersion, string protectionName)
: this(new List<byte?[]> { needle }, getStreamVersion, protectionName) { }
public ContentMatchSet(List<byte?[]> needles, Func<string, Stream, List<int>, string> getStreamVersion, string protectionName)
: this(needles.Select(n => new ContentMatch(n)).ToList(), getStreamVersion, protectionName) { }
public ContentMatchSet(ContentMatch needle, Func<string, Stream, List<int>, string> getStreamVersion, string protectionName)
: this(new List<ContentMatch>() { needle }, getStreamVersion, protectionName) { }
public ContentMatchSet(List<ContentMatch> needles, Func<string, Stream, List<int>, string> getStreamVersion, string protectionName)
{
Matchers = needles;
GetStreamVersion = getStreamVersion;
ProtectionName = protectionName;
}
#endregion
#region Array Matching
/// <summary>
/// Determine whether all content matches pass
/// </summary>
/// <param name="stack">Array to search</param>
/// <returns>Tuple of passing status and matching positions</returns>
public (bool, List<int>) MatchesAll(byte[] stack)
{
// If no content matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, new List<int>());
// Initialize the position list
List<int> positions = new List<int>();
// Loop through all content matches and make sure all pass
foreach (var contentMatch in Matchers)
{
(bool match, int position) = contentMatch.Match(stack);
if (!match)
return (false, new List<int>());
else
positions.Add(position);
}
return (true, positions);
}
/// <summary>
/// Determine whether any content matches pass
/// </summary>
/// <param name="stack">Array to search</param>
/// <returns>Tuple of passing status and first matching position</returns>
public (bool, int) MatchesAny(byte[] stack)
{
// If no content matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, -1);
// Loop through all content matches and make sure all pass
foreach (var contentMatch in Matchers)
{
(bool match, int position) = contentMatch.Match(stack);
if (match)
return (true, position);
}
return (false, -1);
}
#endregion
#region Stream Matching
/// <summary>
/// Determine whether all content matches pass
/// </summary>
/// <param name="stack">Stream to search</param>
/// <returns>Tuple of passing status and matching positions</returns>
public (bool, List<int>) MatchesAll(Stream stack)
{
// If no content matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, new List<int>());
// Initialize the position list
List<int> positions = new List<int>();
// Loop through all content matches and make sure all pass
foreach (var contentMatch in Matchers)
{
(bool match, int position) = contentMatch.Match(stack);
if (!match)
return (false, new List<int>());
else
positions.Add(position);
}
return (true, positions);
}
/// <summary>
/// Determine whether any content matches pass
/// </summary>
/// <param name="stack">Stream to search</param>
/// <returns>Tuple of passing status and first matching position</returns>
public (bool, int) MatchesAny(Stream stack)
{
// If no content matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, -1);
// Loop through all content matches and make sure all pass
foreach (var contentMatch in Matchers)
{
(bool match, int position) = contentMatch.Match(stack);
if (match)
return (true, position);
}
return (false, -1);
}
#endregion
}
}

View File

@@ -0,0 +1,96 @@
using System.Collections.Generic;
using System.Linq;
namespace BurnOutSharp.Matching
{
public static class Extensions
{
/// <summary>
/// Find all positions of one array in another, if possible, if possible
/// </summary>
public static List<int> FindAllPositions(this byte[] stack, byte?[] needle, int start = 0, int end = -1)
{
// Get the outgoing list
List<int> positions = new List<int>();
// Initialize the loop variables
bool found = true;
int lastPosition = start;
var matcher = new ContentMatch(needle, end: end);
// Loop over and get all positions
while (found)
{
matcher.Start = lastPosition;
(found, lastPosition) = matcher.Match(stack, false);
if (found)
positions.Add(lastPosition);
}
return positions;
}
/// <summary>
/// Find the first position of one array in another, if possible
/// </summary>
public static bool FirstPosition(this byte[] stack, byte[] needle, out int position, int start = 0, int end = -1)
{
byte?[] nullableNeedle = needle != null ? needle.Select(b => (byte?)b).ToArray() : null;
return stack.FirstPosition(nullableNeedle, out position, start, end);
}
/// <summary>
/// Find the first position of one array in another, if possible
/// </summary>
public static bool FirstPosition(this byte[] stack, byte?[] needle, out int position, int start = 0, int end = -1)
{
var matcher = new ContentMatch(needle, start, end);
(bool found, int foundPosition) = matcher.Match(stack, false);
position = foundPosition;
return found;
}
/// <summary>
/// Find the last position of one array in another, if possible
/// </summary>
public static bool LastPosition(this byte[] stack, byte?[] needle, out int position, int start = 0, int end = -1)
{
var matcher = new ContentMatch(needle, start, end);
(bool found, int foundPosition) = matcher.Match(stack, true);
position = foundPosition;
return found;
}
/// <summary>
/// See if a byte array starts with another
/// </summary>
public static bool StartsWith(this byte[] stack, byte[] needle)
{
return stack.FirstPosition(needle, out int _, start: 0, end: 1);
}
/// <summary>
/// See if a byte array starts with another
/// </summary>
public static bool StartsWith(this byte[] stack, byte?[] needle)
{
return stack.FirstPosition(needle, out int _, start: 0, end: 1);
}
/// <summary>
/// See if a byte array ends with another
/// </summary>
public static bool EndsWith(this byte[] stack, byte[] needle)
{
return stack.FirstPosition(needle, out int _, start: stack.Length - needle.Length);
}
/// <summary>
/// See if a byte array ends with another
/// </summary>
public static bool EndsWith(this byte[] stack, byte?[] needle)
{
return stack.FirstPosition(needle, out int _, start: stack.Length - needle.Length);
}
}
}

View File

@@ -0,0 +1,7 @@
namespace BurnOutSharp.Matching
{
public interface IMatch<T>
{
T Needle { get; set; }
}
}

View File

@@ -0,0 +1,20 @@
using System.Collections.Generic;
namespace BurnOutSharp.Matching
{
/// <summary>
/// Wrapper for a single set of matching criteria
/// </summary>
public abstract class MatchSet<T, U> where T : IMatch<U>
{
/// <summary>
/// Set of all matchers
/// </summary>
public IEnumerable<T> Matchers { get; set; }
/// <summary>
/// Name of the protection to show
/// </summary>
public string ProtectionName { get; set; }
}
}

View File

@@ -0,0 +1,340 @@
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace BurnOutSharp.Matching
{
/// <summary>
/// Helper class for matching
/// </summary>
public static class MatchUtil
{
#region Array Content Matching
/// <summary>
/// Get all content matches for a given list of matchers
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Array to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
public static ConcurrentQueue<string> GetAllMatches(
string file,
byte[] stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug = false)
{
return FindAllMatches(file, stack, matchers, includeDebug, false);
}
/// <summary>
/// Get first content match for a given list of matchers
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Array to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <returns>String representing the matched protection, null otherwise</returns>
public static string GetFirstMatch(
string file,
byte[] stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug = false)
{
var contentMatches = FindAllMatches(file, stack, matchers, includeDebug, true);
if (contentMatches == null || !contentMatches.Any())
return null;
return contentMatches.First();
}
/// <summary>
/// Get the required set of content matches on a per Matcher basis
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Array to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <param name="stopAfterFirst">True to stop after the first match, false otherwise</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
private static ConcurrentQueue<string> FindAllMatches(
string file,
byte[] stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug,
bool stopAfterFirst)
{
// If there's no mappings, we can't match
if (matchers == null || !matchers.Any())
return null;
// Initialize the queue of matched protections
var matchedProtections = new ConcurrentQueue<string>();
// Loop through and try everything otherwise
foreach (var matcher in matchers)
{
// Determine if the matcher passes
(bool passes, List<int> positions) = matcher.MatchesAll(stack);
if (!passes)
continue;
// Format the list of all positions found
string positionsString = string.Join(", ", positions);
// If we there is no version method, just return the protection name
if (matcher.GetArrayVersion == null)
{
matchedProtections.Enqueue((matcher.ProtectionName ?? "Unknown Protection") + (includeDebug ? $" (Index {positionsString})" : string.Empty));
}
// Otherwise, invoke the version method
else
{
// A null version returned means the check didn't pass at the version step
string version = matcher.GetArrayVersion(file, stack, positions);
if (version == null)
continue;
matchedProtections.Enqueue($"{matcher.ProtectionName ?? "Unknown Protection"} {version}".TrimEnd() + (includeDebug ? $" (Index {positionsString})" : string.Empty));
}
// If we're stopping after the first protection, bail out here
if (stopAfterFirst)
return matchedProtections;
}
return matchedProtections;
}
#endregion
#region Stream Content Matching
/// <summary>
/// Get all content matches for a given list of matchers
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Stream to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
public static ConcurrentQueue<string> GetAllMatches(
string file,
Stream stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug = false)
{
return FindAllMatches(file, stack, matchers, includeDebug, false);
}
/// <summary>
/// Get first content match for a given list of matchers
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Stream to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <returns>String representing the matched protection, null otherwise</returns>
public static string GetFirstMatch(
string file,
Stream stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug = false)
{
var contentMatches = FindAllMatches(file, stack, matchers, includeDebug, true);
if (contentMatches == null || !contentMatches.Any())
return null;
return contentMatches.First();
}
/// <summary>
/// Get the required set of content matches on a per Matcher basis
/// </summary>
/// <param name="file">File to check for matches</param>
/// <param name="stack">Stream to search</param>
/// <param name="matchers">Enumerable of ContentMatchSets to be run on the file</param>
/// <param name="includeDebug">True to include positional data, false otherwise</param>
/// <param name="stopAfterFirst">True to stop after the first match, false otherwise</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
private static ConcurrentQueue<string> FindAllMatches(
string file,
Stream stack,
IEnumerable<ContentMatchSet> matchers,
bool includeDebug,
bool stopAfterFirst)
{
// If there's no mappings, we can't match
if (matchers == null || !matchers.Any())
return null;
// Initialize the queue of matched protections
var matchedProtections = new ConcurrentQueue<string>();
// Loop through and try everything otherwise
foreach (var matcher in matchers)
{
// Determine if the matcher passes
(bool passes, List<int> positions) = matcher.MatchesAll(stack);
if (!passes)
continue;
// Format the list of all positions found
string positionsString = string.Join(", ", positions);
// If we there is no version method, just return the protection name
if (matcher.GetStreamVersion == null)
{
matchedProtections.Enqueue((matcher.ProtectionName ?? "Unknown Protection") + (includeDebug ? $" (Index {positionsString})" : string.Empty));
}
// Otherwise, invoke the version method
else
{
// A null version returned means the check didn't pass at the version step
string version = matcher.GetStreamVersion(file, stack, positions);
if (version == null)
continue;
matchedProtections.Enqueue($"{matcher.ProtectionName ?? "Unknown Protection"} {version}".TrimEnd() + (includeDebug ? $" (Index {positionsString})" : string.Empty));
}
// If we're stopping after the first protection, bail out here
if (stopAfterFirst)
return matchedProtections;
}
return matchedProtections;
}
#endregion
#region Path Matching
/// <summary>
/// Get all path matches for a given list of matchers
/// </summary>
/// <param name="file">File path to check for matches</param>
/// <param name="matchers">Enumerable of PathMatchSets to be run on the file</param>
/// <param name="any">True if any path match is a success, false if all have to match</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
public static ConcurrentQueue<string> GetAllMatches(string file, IEnumerable<PathMatchSet> matchers, bool any = false)
{
return FindAllMatches(new List<string> { file }, matchers, any, false);
}
// <summary>
/// Get all path matches for a given list of matchers
/// </summary>
/// <param name="files">File paths to check for matches</param>
/// <param name="matchers">Enumerable of PathMatchSets to be run on the file</param>
/// <param name="any">True if any path match is a success, false if all have to match</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
public static ConcurrentQueue<string> GetAllMatches(IEnumerable<string> files, IEnumerable<PathMatchSet> matchers, bool any = false)
{
return FindAllMatches(files, matchers, any, false);
}
/// <summary>
/// Get first path match for a given list of matchers
/// </summary>
/// <param name="file">File path to check for matches</param>
/// <param name="matchers">Enumerable of PathMatchSets to be run on the file</param>
/// <param name="any">True if any path match is a success, false if all have to match</param>
/// <returns>String representing the matched protection, null otherwise</returns>
public static string GetFirstMatch(string file, IEnumerable<PathMatchSet> matchers, bool any = false)
{
var contentMatches = FindAllMatches(new List<string> { file }, matchers, any, true);
if (contentMatches == null || !contentMatches.Any())
return null;
return contentMatches.First();
}
/// <summary>
/// Get first path match for a given list of matchers
/// </summary>
/// <param name="files">File paths to check for matches</param>
/// <param name="matchers">Enumerable of PathMatchSets to be run on the file</param>
/// <param name="any">True if any path match is a success, false if all have to match</param>
/// <returns>String representing the matched protection, null otherwise</returns>
public static string GetFirstMatch(IEnumerable<string> files, IEnumerable<PathMatchSet> matchers, bool any = false)
{
var contentMatches = FindAllMatches(files, matchers, any, true);
if (contentMatches == null || !contentMatches.Any())
return null;
return contentMatches.First();
}
/// <summary>
/// Get the required set of path matches on a per Matcher basis
/// </summary>
/// <param name="files">File paths to check for matches</param>
/// <param name="matchers">Enumerable of PathMatchSets to be run on the file</param>
/// <param name="any">True if any path match is a success, false if all have to match</param>
/// <param name="stopAfterFirst">True to stop after the first match, false otherwise</param>
/// <returns>List of strings representing the matched protections, null or empty otherwise</returns>
private static ConcurrentQueue<string> FindAllMatches(IEnumerable<string> files, IEnumerable<PathMatchSet> matchers, bool any, bool stopAfterFirst)
{
// If there's no mappings, we can't match
if (matchers == null || !matchers.Any())
return new ConcurrentQueue<string>();
// Initialize the list of matched protections
var matchedProtections = new ConcurrentQueue<string>();
// Loop through and try everything otherwise
foreach (var matcher in matchers)
{
// Determine if the matcher passes
bool passes;
string firstMatchedString;
if (any)
{
(bool anyPasses, string matchedString) = matcher.MatchesAny(files);
passes = anyPasses;
firstMatchedString = matchedString;
}
else
{
(bool allPasses, List<string> matchedStrings) = matcher.MatchesAll(files);
passes = allPasses;
firstMatchedString = matchedStrings.FirstOrDefault();
}
// If we don't have a pass, just continue
if (!passes)
continue;
// If we there is no version method, just return the protection name
if (matcher.GetVersion == null)
{
matchedProtections.Enqueue(matcher.ProtectionName ?? "Unknown Protection");
}
// Otherwise, invoke the version method
else
{
// A null version returned means the check didn't pass at the version step
string version = matcher.GetVersion(firstMatchedString, files);
if (version == null)
continue;
matchedProtections.Enqueue($"{matcher.ProtectionName ?? "Unknown Protection"} {version}".TrimEnd());
}
// If we're stopping after the first protection, bail out here
if (stopAfterFirst)
return matchedProtections;
}
return matchedProtections;
}
#endregion
}
}

View File

@@ -0,0 +1,71 @@
using System.Collections.Generic;
using System.Linq;
namespace BurnOutSharp.Matching
{
/// <summary>
/// Path matching criteria
/// </summary>
public class PathMatch : IMatch<string>
{
/// <summary>
/// String to match
/// </summary>
public string Needle { get; set; }
/// <summary>
/// Match exact casing instead of invariant
/// </summary>
public bool MatchExact { get; set; }
/// <summary>
/// Match that values end with the needle and not just contains
/// </summary>
public bool UseEndsWith { get; set; }
/// <summary>
/// Constructor
/// </summary>
/// <param name="needle">String representing the search</param>
/// <param name="matchExact">True to match exact casing, false otherwise</param>
/// <param name="useEndsWith">True to match the end only, false for all contents</param>
public PathMatch(string needle, bool matchExact = false, bool useEndsWith = false)
{
Needle = needle;
MatchExact = matchExact;
UseEndsWith = useEndsWith;
}
#region Matching
/// <summary>
/// Get if this match can be found in a stack
/// </summary>
/// <param name="stack">List of strings to search for the given content</param>
/// <returns>Tuple of success and matched item</returns>
public (bool, string) Match(IEnumerable<string> stack)
{
// If either array is null or empty, we can't do anything
if (stack == null || !stack.Any() || Needle == null || Needle.Length == 0)
return (false, null);
// Preprocess the needle, if necessary
string procNeedle = MatchExact ? Needle : Needle.ToLowerInvariant();
foreach (string stackItem in stack)
{
// Preprocess the stack item, if necessary
string procStackItem = MatchExact ? stackItem : stackItem.ToLowerInvariant();
if (UseEndsWith && procStackItem.EndsWith(procNeedle))
return (true, stackItem);
else if (!UseEndsWith && procStackItem.Contains(procNeedle))
return (true, stackItem);
}
return (false, null);
}
#endregion
}
}

View File

@@ -0,0 +1,108 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace BurnOutSharp.Matching
{
/// <summary>
/// A set of path matches that work together
/// </summary>
public class PathMatchSet : MatchSet<PathMatch, string>
{
/// <summary>
/// Function to get a path version for this Matcher
/// </summary>
/// <remarks>
/// A path version method takes the matched path and an enumerable of files
/// and returns a single string. That string is either a version string,
/// in which case it will be appended to the protection name, or `null`,
/// in which case it will cause the protection to be omitted.
/// </remarks>
public Func<string, IEnumerable<string>, string> GetVersion { get; set; }
#region Constructors
public PathMatchSet(string needle, string protectionName)
: this(new List<string> { needle }, null, protectionName) { }
public PathMatchSet(List<string> needles, string protectionName)
: this(needles, null, protectionName) { }
public PathMatchSet(string needle, Func<string, IEnumerable<string>, string> getVersion, string protectionName)
: this(new List<string> { needle }, getVersion, protectionName) { }
public PathMatchSet(List<string> needles, Func<string, IEnumerable<string>, string> getVersion, string protectionName)
: this(needles.Select(n => new PathMatch(n)).ToList(), getVersion, protectionName) { }
public PathMatchSet(PathMatch needle, string protectionName)
: this(new List<PathMatch>() { needle }, null, protectionName) { }
public PathMatchSet(List<PathMatch> needles, string protectionName)
: this(needles, null, protectionName) { }
public PathMatchSet(PathMatch needle, Func<string, IEnumerable<string>, string> getVersion, string protectionName)
: this(new List<PathMatch>() { needle }, getVersion, protectionName) { }
public PathMatchSet(List<PathMatch> needles, Func<string, IEnumerable<string>, string> getVersion, string protectionName)
{
Matchers = needles;
GetVersion = getVersion;
ProtectionName = protectionName;
}
#endregion
#region Matching
/// <summary>
/// Determine whether all path matches pass
/// </summary>
/// <param name="stack">List of strings to try to match</param>
/// <returns>Tuple of passing status and matching values</returns>
public (bool, List<string>) MatchesAll(IEnumerable<string> stack)
{
// If no path matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, new List<string>());
// Initialize the value list
List<string> values = new List<string>();
// Loop through all path matches and make sure all pass
foreach (var pathMatch in Matchers)
{
(bool match, string value) = pathMatch.Match(stack);
if (!match)
return (false, new List<string>());
else
values.Add(value);
}
return (true, values);
}
/// <summary>
/// Determine whether any path matches pass
/// </summary>
/// <param name="stack">List of strings to try to match</param>
/// <returns>Tuple of passing status and first matching value</returns>
public (bool, string) MatchesAny(IEnumerable<string> stack)
{
// If no path matches are defined, we fail out
if (Matchers == null || !Matchers.Any())
return (false, null);
// Loop through all path matches and make sure all pass
foreach (var pathMatch in Matchers)
{
(bool match, string value) = pathMatch.Match(stack);
if (match)
return (true, value);
}
return (false, null);
}
#endregion
}
}

View File

@@ -0,0 +1,13 @@
namespace BurnOutSharp.Models.AACS
{
/// <summary>
/// This record type is undocumented but found in real media key blocks
/// </summary>
public sealed class CopyrightRecord : Record
{
/// <summary>
/// Null-terminated ASCII string representing the copyright
/// </summary>
public string Copyright;
}
}

View File

@@ -0,0 +1,21 @@
namespace BurnOutSharp.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class DriveRevocationListEntry
{
/// <summary>
/// A 2-byte Range value indicates the range of revoked IDs starting
/// from the ID contained in the record. A value of zero in the Range
/// field indicates that only one ID is being revoked, a value of one
/// in the Range field indicates two IDs are being revoked, and so on.
/// </summary>
public ushort Range;
/// <summary>
/// A 6-byte Drive ID value identifying the Licensed Drive being revoked
/// (or the first in a range of Licensed Drives being revoked, in the
/// case of a non-zero Range value).
/// </summary>
public byte[] DriveID;
}
}

View File

@@ -0,0 +1,26 @@
namespace BurnOutSharp.Models.AACS
{
/// <summary>
/// A properly formatted type 3 or type 4 Media Key Block contains exactly
/// one Drive Revocation List Record. It follows the Host Revocation List
/// Record, although it may not immediately follow it.
///
/// The Drive Revocation List Record is identical to the Host Revocation
/// List Record, except it has type 2016, and it contains Drive Revocation
/// List Entries, not Host Revocation List Entries. The Drive Revocation List
/// Entries refer to Drive IDs in the Drive Certificates.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class DriveRevocationListRecord : Record
{
/// <summary>
/// The total number of Drive Revocation List Entry fields that follow.
/// </summary>
public uint TotalNumberOfEntries;
/// <summary>
/// Revocation list entries
/// </summary>
public DriveRevocationSignatureBlock[] SignatureBlocks;
}
}

View File

@@ -0,0 +1,17 @@
namespace BurnOutSharp.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class DriveRevocationSignatureBlock
{
/// <summary>
/// The number of Drive Revocation List Entry fields in the signature block.
/// </summary>
public uint NumberOfEntries;
/// <summary>
/// A list of 8-byte Host Drive List Entry fields, the length of this
/// list being equal to the number in the signature block.
/// </summary>
public DriveRevocationListEntry[] EntryFields;
}
}

View File

@@ -0,0 +1,23 @@
namespace BurnOutSharp.Models.AACS
{
/// <summary>
/// A properly formatted MKB shall contain an End of Media Key Block Record.
/// When a device encounters this Record it stops processing the MKB, using
/// whatever Km value it has calculated up to that point as the final Km for
/// that MKB (pending possible checks for correctness of the key, as
/// described previously).
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class EndOfMediaKeyBlockRecord : Record
{
/// <summary>
/// AACS LAs signature on the data in the Media Key Block up to,
/// but not including, this record. Devices depending on the Version
/// Number in the Type and Version Record must verify the signature.
/// Other devices may ignore the signature data. If any device
/// determines that the signature does not verify or is omitted, it
/// must refuse to use the Media Key.
/// </summary>
public byte[] SignatureData;
}
}

View File

@@ -0,0 +1,46 @@
namespace BurnOutSharp.Models.AACS
{
public enum MediaKeyBlockType : uint
{
/// <summary>
/// (Type 3). This is a normal Media Key Block suitable for being recorded
/// on a AACS Recordable Media. Both Class I and Class II Licensed Products
/// use it to directly calculate the Media Key.
/// </summary>
Type3 = 0x00031003,
/// <summary>
/// (Type 4). This is a Media Key Block that has been designed to use Key
/// Conversion Data (KCD). Thus, it is suitable only for pre-recorded media
/// from which the KCD is derived. Both Class I and Class II Licensed Products
/// use it to directly calculate the Media Key.
/// </summary>
Type4 = 0x00041003,
/// <summary>
/// (Type 10). This is a Class II Media Key Block (one that has the functionality
/// of a Sequence Key Block). This can only be processed by Class II Licensed
/// Products; Class I Licensed Products are revoked in Type 10 Media Key Blocks
/// and cannot process them. This type does not contain the Host Revocation List
/// Record, the Drive Revocation List Record, and the Media Key Data Record, as
/// described in the following sections. It does contain the records shown in
/// Section 3.2.5.2, which are only processed by Class II Licensed Products.
/// </summary>
Type10 = 0x000A1003,
}
public enum RecordType : byte
{
EndOfMediaKeyBlock = 0x02,
ExplicitSubsetDifference = 0x04,
MediaKeyData = 0x05,
SubsetDifferenceIndex = 0x07,
TypeAndVersion = 0x10,
DriveRevocationList = 0x20,
HostRevocationList = 0x21,
VerifyMediaKey = 0x81,
// Not documented
Copyright = 0x7F,
}
}

View File

@@ -0,0 +1,11 @@
namespace BurnOutSharp.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class ExplicitSubsetDifferenceRecord : Record
{
/// <summary>
/// In this record, each subset-difference is encoded with 5 bytes.
/// </summary>
public SubsetDifference[] SubsetDifferences;
}
}

View File

@@ -0,0 +1,21 @@
namespace BurnOutSharp.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class HostRevocationListEntry
{
/// <summary>
/// A 2-byte Range value indicates the range of revoked IDs starting
/// from the ID contained in the record. A value of zero in the Range
/// field indicates that only one ID is being revoked, a value of one
/// in the Range field indicates two IDs are being revoked, and so on.
/// </summary>
public ushort Range;
/// <summary>
/// A 6-byte Host ID value identifying the host being revoked (or the
/// first in a range of hosts being revoked, in the case of a non-zero
/// Range value).
/// </summary>
public byte[] HostID;
}
}

View File

@@ -0,0 +1,29 @@
namespace BurnOutSharp.Models.AACS
{
/// <summary>
/// A properly formatted type 3 or type 4 Media Key Block shall have exactly
/// one Host Revocation List Record as its second record. This record provides
/// a list of hosts that have been revoked by the AACS LA. The AACS specification
/// is applicable to PC-based system where a Licensed Drive and PC Host act
/// together as the Recording Device and/or Playback Device for AACS Content.
/// AACS uses a drive-host authentication protocol for the host to verify the
/// integrity of the data received from the Licensed Drive, and for the Licensed
/// Drive to check the validity of the host application. The Type and Version
/// Record and the Host Revocation List Record are guaranteed to be the first two
/// records of a Media Key Block, to make it easier for Licensed Drives to extract
/// this data from an arbitrary Media Key Block.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class HostRevocationListRecord : Record
{
/// <summary>
/// The total number of Host Revocation List Entry fields that follow.
/// </summary>
public uint TotalNumberOfEntries;
/// <summary>
/// Revocation list entries
/// </summary>
public HostRevocationSignatureBlock[] SignatureBlocks;
}
}

View File

@@ -0,0 +1,17 @@
namespace BurnOutSharp.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class HostRevocationSignatureBlock
{
/// <summary>
/// The number of Host Revocation List Entry fields in the signature block.
/// </summary>
public uint NumberOfEntries;
/// <summary>
/// A list of 8-byte Host Revocation List Entry fields, the length of this
/// list being equal to the number in the signature block.
/// </summary>
public HostRevocationListEntry[] EntryFields;
}
}

View File

@@ -0,0 +1,14 @@
namespace BurnOutSharp.Models.AACS
{
/// <summary>
/// A Media Key Block is formatted as a sequence of contiguous Records.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class MediaKeyBlock
{
/// <summary>
/// Records
/// </summary>
public Record[] Records { get; set; }
}
}

View File

@@ -0,0 +1,18 @@
namespace BurnOutSharp.Models.AACS
{
/// <summary>
/// This record gives the associated encrypted media key data for the
/// subset-differences identified in the Explicit Subset-Difference Record.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class MediaKeyDataRecord : Record
{
/// <summary>
/// Each subset difference has its associated 16 bytes in this
/// record, in the same order it is encountered in the subset-difference
/// record. This 16 bytes is the ciphertext value C in the media
/// key calculation.
/// </summary>
public byte[][] MediaKeyData;
}
}

View File

@@ -0,0 +1,28 @@
namespace BurnOutSharp.Models.AACS
{
/// <summary>
/// Each Record begins with a one-byte Record Type field, followed by a
/// three-byte Record Length field.
///
/// The following subsections describe the currently defined Record types,
/// and how a device processes each. All multi-byte integers, including
/// the length field, are “Big Endian”; in other words, the most significant
/// byte comes first in the record.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public abstract class Record
{
/// <summary>
/// The Record Type field value indicates the type of the Record.
/// </summary>
public RecordType RecordType;
/// <summary>
/// The Record Length field value indicates the number of bytes in
/// the Record, including the Record Type and the Record Length
/// fields themselves. Record lengths are always multiples of 4 bytes.
/// </summary>
// <remarks>UInt24 not UInt32</remarks>
public uint RecordLength;
}
}

View File

@@ -0,0 +1,20 @@
namespace BurnOutSharp.Models.AACS
{
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class SubsetDifference
{
/// <summary>
/// The mask for u is given by the first byte. That byte is
/// treated as a number, the number of low-order 0-bits in
/// the mask. For example, the value 0x01 denotes a mask of
/// 0xFFFFFFFE; value 0x0A denotes a mask of 0xFFFFFC00.
/// </summary>
public byte Mask;
/// <summary>
/// The last 4 bytes are the uv number, most significant
/// byte first.
/// </summary>
public uint Number;
}
}

View File

@@ -0,0 +1,26 @@
namespace BurnOutSharp.Models.AACS
{
/// <summary>
/// This is a speed-up record which can be ignored by devices not wishing to
/// take advantage of it. It is a lookup table which allows devices to quickly
/// find their subset-difference in the Explicit Subset-Difference record,
/// without processing the entire record. This Subset-Difference Index record
/// is always present, and always precedes the Explicit Subset-Difference record
/// in the MKB, although it does not necessarily immediately precede it.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class SubsetDifferenceIndexRecord : Record
{
/// <summary>
/// The number of devices per index offset.
/// </summary>
public uint Span;
/// <summary>
/// These offsets refer to the offset within the following Explicit
/// Subset-Difference record, with 0 being the start of the record.
/// </summary>
// <remarks>UInt24 not UInt32</remarks>
public uint[] Offsets;
}
}

View File

@@ -0,0 +1,32 @@
namespace BurnOutSharp.Models.AACS
{
/// <summary>
/// Devices, except for recording devices which are writing Media Key Block
/// Extensions, may ignore this record. Recording devices shall verify the
/// signature (see End of Media Key Block record) and use the Version Number
/// in this record to determine if a new Media Key BLock Extension is, in
/// fact, more recent than the Media Key Block Extension that is currently
/// on the media.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class TypeAndVersionRecord : Record
{
/// <summary>
/// For AACS applications, the MKBType field is one of three values.
/// It is not an error for a Type 3 Media Key Block to be used for
/// controlling access to AACS Content on pre- recorded media. In
/// this case, the device shall not use the KCD.
/// </summary>
public MediaKeyBlockType MediaKeyBlockType;
/// <summary>
/// The Version Number is a 32-bit unsigned integer. Each time the
/// licensing agency changes the revocation, it increments the version
/// number and inserts the new value in subsequent Media Key Blocks.
/// Thus, larger values indicate more recent Media Key Blocks. The
/// Version Numbers begin at 1; 0 is a special value used for test
/// Media Key Blocks.
/// </summary>
public uint VersionNumber;
}
}

View File

@@ -0,0 +1,24 @@
namespace BurnOutSharp.Models.AACS
{
/// <summary>
/// A properly formatted MKB shall have exactly one Verify Media Key Record
/// as its first record. The presence of the Verify Media Key Record in an MKB
/// is mandatory, but the use of the Record by a device is not mandatory. The
/// device may use the Verify Media Key Record to verify the correctness of a
/// given MKB, or of its processing of it. If everything is correct, the device
/// should observe the condition:
/// [AES_128D(vKm, C]msb_64 == 0x0123456789ABCDEF)]
/// where Km is the Media Key value.
/// </summary>
/// <see href="https://aacsla.com/wp-content/uploads/2019/02/AACS_Spec_Common_Final_0953.pdf"/>
public sealed class VerifyMediaKeyRecord : Record
{
/// <summary>
/// Bytes 4 through 19 of the Record contain the ciphertext value
/// Cv = AES-128E (Km, 0x0123456789ABCDEF || 0xXXXXXXXXXXXXXXXX)
/// where 0xXXXXXXXXXXXXXXXX is an arbitrary 8-byte value, and Km is
/// the correct final Media Key value.
/// </summary>
public byte[] CiphertextValue;
}
}

View File

@@ -0,0 +1,7 @@
namespace BurnOutSharp.Models.BDPlus
{
public static class Constants
{
public const string SignatureString = "BDSVM_CC";
}
}

View File

@@ -0,0 +1,46 @@
namespace BurnOutSharp.Models.BDPlus
{
/// <see href="https://github.com/mwgoldsmith/bdplus/blob/master/src/libbdplus/bdsvm/loader.c"/>
public sealed class SVM
{
/// <summary>
/// "BDSVM_CC"
/// </summary>
public string Signature;
/// <summary>
/// 5 bytes of unknown data
/// </summary>
public byte[] Unknown1;
/// <summary>
/// Version year
/// </summary>
public ushort Year;
/// <summary>
/// Version month
/// </summary>
public byte Month;
/// <summary>
/// Version day
/// </summary>
public byte Day;
/// <summary>
/// 4 bytes of unknown data
/// </summary>
public byte[] Unknown2;
/// <summary>
/// Length
/// </summary>
public uint Length;
/// <summary>
/// Length bytes of data
/// </summary>
public byte[] Data;
}
}

View File

@@ -0,0 +1,19 @@
namespace BurnOutSharp.Models.BFPK
{
/// <summary>
/// BFPK custom archive format
/// </summary>
/// <see cref="https://forum.xentax.com/viewtopic.php?t=5102"/>
public sealed class Archive
{
/// <summary>
/// Header
/// </summary>
public Header Header { get; set; }
/// <summary>
/// Files
/// </summary>
public FileEntry[] Files { get; set; }
}
}

View File

@@ -0,0 +1,11 @@
namespace BurnOutSharp.Models.BFPK
{
public static class Constants
{
public static readonly byte[] SignatureBytes = new byte[] { 0x42, 0x46, 0x50, 0x4b };
public const string SignatureString = "BFPK";
public const uint SignatureUInt32 = 0x4b504642;
}
}

View File

@@ -0,0 +1,34 @@
namespace BurnOutSharp.Models.BFPK
{
/// <summary>
/// File entry
/// </summary>
/// <see cref="https://forum.xentax.com/viewtopic.php?t=5102"/>
public sealed class FileEntry
{
/// <summary>
/// Name size
/// </summary>
public int NameSize;
/// <summary>
/// Name
/// </summary>
public string Name;
/// <summary>
/// Uncompressed size
/// </summary>
public int UncompressedSize;
/// <summary>
/// Offset
/// </summary>
public int Offset;
/// <summary>
/// Compressed size
/// </summary>
public int CompressedSize;
}
}

View File

@@ -0,0 +1,27 @@
using System.Runtime.InteropServices;
namespace BurnOutSharp.Models.BFPK
{
/// <summary>
/// Header
/// </summary>
/// <see cref="https://forum.xentax.com/viewtopic.php?t=5102"/>
[StructLayout(LayoutKind.Sequential)]
public sealed class Header
{
/// <summary>
/// "BFPK"
/// </summary>
public string Magic;
/// <summary>
/// Version
/// </summary>
public int Version;
/// <summary>
/// Files
/// </summary>
public int Files;
}
}

View File

@@ -0,0 +1,35 @@
namespace BurnOutSharp.Models.BMP
{
/// <summary>
/// The BITMAPFILEHEADER structure contains information about the type, size,
/// and layout of a file that contains a DIB.
/// </summary>
/// <see href="https://learn.microsoft.com/en-us/windows/win32/api/wingdi/ns-wingdi-bitmapfileheader"/>
public sealed class BITMAPFILEHEADER
{
/// <summary>
/// The file type; must be BM.
/// </summary>
public ushort Type;
/// <summary>
/// The size, in bytes, of the bitmap file.
/// </summary>
public uint Size;
/// <summary>
/// Reserved; must be zero.
/// </summary>
public ushort Reserved1;
/// <summary>
/// Reserved; must be zero.
/// </summary>
public ushort Reserved2;
/// <summary>
/// The offset, in bytes, from the beginning of the BITMAPFILEHEADER structure to the bitmap bits.
/// </summary>
public uint OffBits;
}
}

View File

@@ -0,0 +1,94 @@
namespace BurnOutSharp.Models.BMP
{
/// <summary>
/// The BITMAPINFOHEADER structure contains information about the dimensions and
/// color format of a device-independent bitmap (DIB).
/// </summary>
public sealed class BITMAPINFOHEADER
{
/// <summary>
/// Specifies the number of bytes required by the structure. This value does
/// not include the size of the color table or the size of the color masks,
/// if they are appended to the end of structure.
/// </summary>
public uint Size;
/// <summary>
/// Specifies the width of the bitmap, in pixels.
/// </summary>
public int Width;
/// <summary>
/// Specifies the height of the bitmap, in pixels.
/// - For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is a
/// bottom-up DIB with the origin at the lower left corner. If biHeight is
/// negative, the bitmap is a top-down DIB with the origin at the upper left
/// corner.
/// - For YUV bitmaps, the bitmap is always top-down, regardless of the sign of
/// biHeight. Decoders should offer YUV formats with positive biHeight, but for
/// backward compatibility they should accept YUV formats with either positive
/// or negative biHeight.
/// - For compressed formats, biHeight must be positive, regardless of image orientation.
/// </summary>
public int Height;
/// <summary>
/// Specifies the number of planes for the target device. This value must be set to 1.
/// </summary>
public ushort Planes;
/// <summary>
/// Specifies the number of bits per pixel (bpp). For uncompressed formats, this value
/// is the average number of bits per pixel. For compressed formats, this value is the
/// implied bit depth of the uncompressed image, after the image has been decoded.
/// </summary>
public ushort BitCount;
/// <summary>
/// For compressed video and YUV formats, this member is a FOURCC code, specified as a
/// DWORD in little-endian order. For example, YUYV video has the FOURCC 'VYUY' or
/// 0x56595559. For more information, see FOURCC Codes.
///
/// For uncompressed RGB formats, the following values are possible:
/// - BI_RGB: Uncompressed RGB.
/// - BI_BITFIELDS: Uncompressed RGB with color masks. Valid for 16-bpp and 32-bpp bitmaps.
///
/// Note that BI_JPG and BI_PNG are not valid video formats.
///
/// For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is always RGB 555.
/// If biCompression equals BI_BITFIELDS, the format is either RGB 555 or RGB 565. Use
/// the subtype GUID in the AM_MEDIA_TYPE structure to determine the specific RGB type.
/// </summary>
public uint Compression;
/// <summary>
/// Specifies the size, in bytes, of the image. This can be set to 0 for uncompressed
/// RGB bitmaps.
/// </summary>
public uint SizeImage;
/// <summary>
/// Specifies the horizontal resolution, in pixels per meter, of the target device for
/// the bitmap.
/// </summary>
public int XPelsPerMeter;
/// <summary>
/// Specifies the vertical resolution, in pixels per meter, of the target device for
/// the bitmap.
/// </summary>
public int YPelsPerMeter;
/// <summary>
/// Specifies the number of color indices in the color table that are actually used by
/// the bitmap.
/// </summary>
public uint ClrUsed;
/// <summary>
/// Specifies the number of color indices that are considered important for displaying
/// the bitmap. If this value is zero, all colors are important.
/// </summary>
public uint ClrImportant;
}
}

View File

@@ -0,0 +1,25 @@
namespace BurnOutSharp.Models.BSP
{
public static class Constants
{
/// <summary>
/// Number of lumps in a BSP
/// </summary>
public const int HL_BSP_LUMP_COUNT = 15;
/// <summary>
/// Index for the entities lump
/// </summary>
public const int HL_BSP_LUMP_ENTITIES = 0;
/// <summary>
/// Index for the texture data lump
/// </summary>
public const int HL_BSP_LUMP_TEXTUREDATA = 2;
/// <summary>
/// Number of valid mipmap levels
/// </summary>
public const int HL_BSP_MIPMAP_COUNT = 4;
}
}

View File

@@ -0,0 +1,29 @@
namespace BurnOutSharp.Models.BSP
{
/// <summary>
/// Half-Life Level
/// </summary>
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/BSPFile.h"/>
public sealed class File
{
/// <summary>
/// Header data
/// </summary>
public Header Header { get; set; }
/// <summary>
/// Lumps
/// </summary>
public Lump[] Lumps { get; set; }
/// <summary>
/// Texture header data
/// </summary>
public TextureHeader TextureHeader { get; set; }
/// <summary>
/// Textures
/// </summary>
public Texture[] Textures { get; set; }
}
}

View File

@@ -0,0 +1,11 @@
namespace BurnOutSharp.Models.BSP
{
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/BSPFile.h"/>
public sealed class Header
{
/// <summary>
/// Version
/// </summary>
public uint Version;
}
}

View File

@@ -0,0 +1,16 @@
namespace BurnOutSharp.Models.BSP
{
/// <see href="https://github.com/RavuAlHemio/hllib/blob/master/HLLib/BSPFile.h"/>
public sealed class Lump
{
/// <summary>
/// Offset
/// </summary>
public uint Offset;
/// <summary>
/// Length
/// </summary>
public uint Length;
}
}

Some files were not shown because too many files have changed in this diff Show More