Compare commits

..

141 Commits
1.9.0 ... 1.9.6

Author SHA1 Message Date
Matt Nadareski
9574232719 Bump version 2025-09-24 11:06:47 -04:00
Matt Nadareski
676bf584d6 Clean up some console writes 2025-09-24 10:55:39 -04:00
Matt Nadareski
0a4fd01ede Gate MS-CAB warning behind debug to avoid confusion 2025-09-24 10:51:27 -04:00
Matt Nadareski
6cb4023498 Update namespaces in README 2025-09-24 10:50:12 -04:00
Matt Nadareski
8f775dbb3b This should be ObjectIdentifier not strictly ASN.1 2025-09-24 10:47:00 -04:00
Matt Nadareski
91785eab1f Fix tests 2025-09-24 10:40:39 -04:00
Matt Nadareski
4f8751667a Namespace ASN models until Models is updated 2025-09-24 10:40:30 -04:00
Matt Nadareski
ad7508c464 Add some more OIDs 2025-09-24 10:11:21 -04:00
Matt Nadareski
97a9e141ae Add some more OIDs 2025-09-24 09:54:18 -04:00
Matt Nadareski
842a0c3daf Remove useless PathProcessor helper class 2025-09-24 09:43:27 -04:00
Matt Nadareski
b6acde9145 Make consistent with other deserializers 2025-09-24 09:27:24 -04:00
Matt Nadareski
b97dbc2ac5 Simplify extension code slightly 2025-09-24 09:22:23 -04:00
Matt Nadareski
e03852bd7e Simplify ASN.1 integration 2025-09-24 09:19:46 -04:00
Matt Nadareski
0db5de204e Rename TLV deserializer to be more consistent 2025-09-24 09:06:26 -04:00
Matt Nadareski
5fe3f14419 Make TypeLengthValue more model-like 2025-09-24 09:00:48 -04:00
Matt Nadareski
ffe1d9a82d Create and use TypeLengthValue deserializer 2025-09-24 08:54:25 -04:00
Matt Nadareski
11d6560290 Update IO to 1.7.5 2025-09-24 08:30:24 -04:00
Matt Nadareski
e29d8e6728 Integrate ASN.1 code from separate library 2025-09-24 08:25:11 -04:00
Matt Nadareski
918e81a4cb Move extensions to new namespace for cleanliness 2025-09-23 09:58:44 -04:00
Matt Nadareski
ad0ddede38 Fix reference issue 2025-09-22 21:21:43 -04:00
Matt Nadareski
e969b12884 Move wrapper factory up a level 2025-09-22 21:14:33 -04:00
Matt Nadareski
7a5475255f Fix the fake wrapper tests 2025-09-22 20:14:01 -04:00
Matt Nadareski
0ffd436de8 Update packages 2025-09-22 20:07:18 -04:00
Matt Nadareski
4f685187e9 COFF naming doesn't need to continue 2025-09-22 12:56:19 -04:00
Matt Nadareski
2b356b37b1 Sync NE overlay handling with PE 2025-09-22 10:08:46 -04:00
Matt Nadareski
7c56268eb1 Add PFF version 0 detection 2025-09-22 09:35:46 -04:00
Matt Nadareski
970a54e6e3 Add placeholder SFFS wrapper 2025-09-22 09:13:27 -04:00
Matt Nadareski
e35ddf0780 Always overwrite on output, ensure flushed streams 2025-09-21 21:10:40 -04:00
Matt Nadareski
bf35b7c10b Bump version 2025-09-21 12:35:37 -04:00
Matt Nadareski
4026b8ca09 Handle differently-encoded XMLs as textfiles 2025-09-21 12:10:58 -04:00
Matt Nadareski
6d2e2d8c3b Handle some invalid parsing cases that were missed previously 2025-09-21 11:44:58 -04:00
Matt Nadareski
a2b08157cc Fix issue with split resource tables 2025-09-21 11:23:53 -04:00
Matt Nadareski
0108ecf4c1 Bump version 2025-09-21 09:33:55 -04:00
Matt Nadareski
4921da0bb5 Fix issues from porting MPQ from BOS 2025-09-21 00:04:16 -04:00
Matt Nadareski
0c836bb3b1 Try one more thing? 2025-09-20 23:39:49 -04:00
Matt Nadareski
a8e41c1505 Try this 2025-09-20 23:24:12 -04:00
Matt Nadareski
f67e1c9d2b Bump version 2025-09-20 22:38:08 -04:00
Matt Nadareski
f0ce58a79e Move two things out of the lock 2025-09-20 22:32:54 -04:00
Matt Nadareski
b7f782c1b7 Update packages 2025-09-20 22:31:58 -04:00
Matt Nadareski
5e39e169b2 Clean up PE printing a bit 2025-09-20 20:17:38 -04:00
Matt Nadareski
104c5ccad4 XML resources and overlay 2025-09-20 19:48:45 -04:00
Matt Nadareski
d15b4d7d23 Bump version 2025-09-20 10:47:38 -04:00
Matt Nadareski
60e6a75d5e Make public again 2025-09-20 10:40:19 -04:00
Matt Nadareski
484415d0e5 Ensure the resource table has been parsed for version info 2025-09-20 10:36:09 -04:00
Matt Nadareski
0ef9b447c4 Remove extraneous semicolon 2025-09-20 10:34:08 -04:00
Matt Nadareski
8f64e2defd Minor cleanup to previous commit 2025-09-20 10:32:53 -04:00
HeroponRikiBestest
fbdadce129 Add Matroschka processing. (#23)
* Made changes

* Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later.

Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc.

* small fixes

* Store matroschka section as PE extension

* Move extractor out of deserializer, remove weird hack

* Potential GA fix

* More potential GA fixes.

* I have no idea why GA hits that error but not me

* Giving up on GA for now

* fix locking issues

* Fix GA building; thank you sabre

* Minor improvements all around

* Catch some braced single-line if statements

* Use var more

* Seperate deserializer into helper methods

* Make file path reading much more sane

* Removed MatroschkaHeaderType enum

* Removed MatroschkaGapType enum, further simplify matgaphelper.

* Remove MatroschkaHasUnknown enum, further simplify Unknown value reading.

* Cache initial offset.

* Remove TryCreate patterns.

* Rename matroschka variable to package

* Newline after object

* Rename to obj

* Remove a few unecessary TODOs

* Seperate hexstring byte read to another line.

* Fix documentation.

* More private static

* Changed data.position setting to seeking. NTS: check if this broke anything later

* rename entries to obj

* MatroschkaEntry to var

* Newline

* Alphabetical

* More alphabetical.

* section to package

* Move private variables.

* Move to extension properties.

* Revert section finding.

* Remove uneeded _dataSource lock and access.

* combine lines and make var

* Combine two null checks.

* Packaged files, some past commits I think I forgot to push.

* Missed two

* newline

* space

* newline

* Combine two lines

* Removed comment

* Return false explicitly

* Change hashing string implementation

* Fix order.

* Use offset reading instead of filedataarray

* Change file reading around a little preemptively for BOS

---------

Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
Matt Nadareski
d3e7abfaa3 Normalize ReadRangeFromSource use 2025-09-20 09:49:42 -04:00
Matt Nadareski
b2279e97b2 CHD is all big-endian 2025-09-18 21:18:43 -04:00
Matt Nadareski
7cf969336f Fix this bumble 2025-09-18 20:54:30 -04:00
Matt Nadareski
f73d48166a Source isn't needed here anymore 2025-09-18 09:46:01 -04:00
Matt Nadareski
53af618fe4 Proof-of-concept Wise section caching 2025-09-18 09:40:15 -04:00
Matt Nadareski
5d2cf58477 Fix this being finicky 2025-09-17 12:56:32 -04:00
Matt Nadareski
664e7dce28 Greater than but not equal 2025-09-17 10:30:35 -04:00
Matt Nadareski
14a8f00864 Clean up nonstandard deserializers 2025-09-16 23:24:49 -04:00
Matt Nadareski
0b889fdc06 Remove weird holdover property 2025-09-16 23:11:14 -04:00
Matt Nadareski
e336efc149 Do the same for serializers 2025-09-16 22:29:52 -04:00
Matt Nadareski
4cd52162eb Static implementations using reflection go away 2025-09-16 22:25:40 -04:00
Matt Nadareski
eab9fff711 One slipped through the cracks 2025-09-16 22:22:32 -04:00
Matt Nadareski
d4f3511060 These don't need to call the reflection one 2025-09-16 22:21:22 -04:00
Matt Nadareski
ed12bbb35c Avoid hidden reflection call for most cases 2025-09-16 22:20:13 -04:00
Matt Nadareski
aa4629fe99 MPQ needs a distinction 2025-09-16 22:12:18 -04:00
Matt Nadareski
1950f23cf4 This should actually be a different exception type 2025-09-16 22:08:50 -04:00
Matt Nadareski
ca7c88cef6 Add better summaries for things 2025-09-16 22:07:05 -04:00
Matt Nadareski
10848e6c51 Fix issue with seeking by introducing more constructors 2025-09-16 21:54:26 -04:00
Matt Nadareski
f5d0f065c1 Handle unknown AACS records a bit better for now 2025-09-16 20:11:41 -04:00
Matt Nadareski
17b0573b0b Handle memory-padded resources tables and non-local resources 2025-09-12 10:50:22 -04:00
Matt Nadareski
7f1d843d96 Minor bugfix in name retrieval 2025-09-12 09:28:27 -04:00
Matt Nadareski
cc4837c1d1 More partial classes for reasonable things 2025-09-12 09:09:40 -04:00
Matt Nadareski
588ee5bfe4 Make partial classes for extraction 2025-09-12 09:02:03 -04:00
Matt Nadareski
e9b1b2750f Fill out the placeholder 2025-09-11 12:32:44 -04:00
Matt Nadareski
1d6fa06e97 Placeholder for section table trailer data 2025-09-11 12:24:56 -04:00
Matt Nadareski
2c22924239 Seek to section table to match docs 2025-09-11 12:17:48 -04:00
Matt Nadareski
eb01dd1e25 Add note for later 2025-09-11 12:14:07 -04:00
Matt Nadareski
0a3cb79b1c Fix issue found in encrypted and obfuscated PE 2025-09-11 12:07:22 -04:00
Matt Nadareski
da9eace8cc Slight tweak to printing again 2025-09-11 11:47:07 -04:00
Matt Nadareski
526a02b8b6 Slight tweak to printing again 2025-09-11 11:44:58 -04:00
Matt Nadareski
658c7a1c3b Add another safeguard? 2025-09-11 11:10:11 -04:00
Matt Nadareski
af84474795 Fix invalid base relocation table parsing 2025-09-11 10:58:15 -04:00
Matt Nadareski
42913c6732 Invalid export should be null, not empty 2025-09-11 10:49:01 -04:00
Matt Nadareski
2cdf544518 Fix an oddly-reading format 2025-09-11 10:45:17 -04:00
Matt Nadareski
652ec58238 Fix certificate table info printing 2025-09-11 10:41:09 -04:00
Matt Nadareski
f8531daa5c Ensure overlay accounts for certificates properly 2025-09-11 10:40:09 -04:00
Matt Nadareski
e9e89b0b43 This has been consistently wrong 2025-09-11 10:33:10 -04:00
Matt Nadareski
55e788a894 Ignore invalid certificate entries 2025-09-11 10:21:51 -04:00
Matt Nadareski
b28bb93ccb Handle non-section data with valid RVA 2025-09-11 10:11:05 -04:00
Matt Nadareski
367aab0f83 Add placeholder for figuring something out later 2025-09-11 09:59:42 -04:00
Matt Nadareski
9dcf3b9e0a The offset needs to be passed all the way 2025-09-11 09:42:48 -04:00
Matt Nadareski
3c514110ce The offset needs to be passed fully 2025-09-11 09:29:46 -04:00
Matt Nadareski
c9b0c2dace Deliberately don't retain position 2025-09-11 09:27:56 -04:00
Matt Nadareski
d575b6977e Correctly parse resource data 2025-09-11 09:21:31 -04:00
Matt Nadareski
a00e6a5e2d Start cleaning up resource parsing more 2025-09-11 09:09:56 -04:00
Matt Nadareski
1b9ae83e8c Don't pad most tables to aligned size 2025-09-11 08:35:49 -04:00
Matt Nadareski
8b91eb1caf Bound the import and export tables 2025-09-11 08:33:59 -04:00
Matt Nadareski
2a6a7b5e9a Pass in the correct data 2025-09-11 08:14:24 -04:00
Matt Nadareski
a85943866e Start using table data only in already-bounded tables 2025-09-11 08:12:27 -04:00
Matt Nadareski
797fb519c1 Pass table data in, mostly unused 2025-09-11 07:49:17 -04:00
Matt Nadareski
3ba9d56363 Read table data directly 2025-09-11 07:44:28 -04:00
Matt Nadareski
04cd4e4056 Start wiring through size bounding on table reads 2025-09-11 07:41:17 -04:00
Matt Nadareski
348e170654 There 2025-09-10 21:54:10 -04:00
Matt Nadareski
f5a4ca6276 Finally figure out what I was doing 2025-09-10 21:37:05 -04:00
Matt Nadareski
672c010aa7 Fix a stupid issue 2025-09-10 21:04:09 -04:00
Matt Nadareski
2459d88951 Found the real issue 2025-09-10 20:46:32 -04:00
Matt Nadareski
350d1c8d31 I guess this can be null? 2025-09-10 20:29:26 -04:00
Matt Nadareski
98a3842a3e Fixx off-by-one error 2025-09-10 20:26:17 -04:00
Matt Nadareski
b52a4469ee Remove alignment, add TODO and comments 2025-09-10 11:21:34 -04:00
Matt Nadareski
e3143e21ba Fix comment to be more accurate 2025-09-10 11:18:53 -04:00
Matt Nadareski
1bf2181fd3 Make check a little nicer 2025-09-09 18:56:50 -04:00
Matt Nadareski
1460635aab Move hidden resources parsing to make method nicer 2025-09-09 18:52:35 -04:00
Matt Nadareski
935ec00c86 Notes about hidden resources 2025-09-09 17:15:25 -04:00
Matt Nadareski
473b6de09b Slight cleanup 2025-09-09 16:42:50 -04:00
Matt Nadareski
ba75f2ac2c Try to fix weird resource parsing 2025-09-09 14:54:54 -04:00
Matt Nadareski
a230b39fbc Make relocation block parsing safer 2025-09-09 13:51:40 -04:00
Matt Nadareski
8e963ac62a Fix a couple of potential logic bugs 2025-09-09 13:42:36 -04:00
Matt Nadareski
eaaa89847d Rename to pex and nex for readability 2025-09-09 13:11:27 -04:00
Matt Nadareski
ef76166978 Clean up a few more PE things 2025-09-09 13:11:09 -04:00
Matt Nadareski
72912586a1 Clean up COFF symbol table parsing 2025-09-09 12:18:46 -04:00
Matt Nadareski
fb241a4036 Make things easier to read, add some helpers 2025-09-09 09:57:53 -04:00
Matt Nadareski
368c8b0533 Add section table note 2025-09-09 09:37:03 -04:00
Matt Nadareski
4010325e65 Make note from Models 2025-09-09 09:31:53 -04:00
Matt Nadareski
11dd75ad95 Make import table easier to read 2025-09-08 23:21:45 -04:00
Matt Nadareski
d0480a1311 Make export table easier to read 2025-09-08 22:51:46 -04:00
Matt Nadareski
2be33b845d Be even more careful 2025-09-08 22:09:12 -04:00
Matt Nadareski
2ad42e3a0f Seek and ye shall find 2025-09-08 21:41:46 -04:00
Matt Nadareski
5d1f83800b Add SecuROM AddD deserializer 2025-09-08 21:20:47 -04:00
Matt Nadareski
30e89a7943 Clean this up 2025-09-08 21:13:34 -04:00
Matt Nadareski
61f5dc4cf2 Extract even more types of embedded data 2025-09-08 20:08:43 -04:00
Matt Nadareski
d056c179ed Add embedded UHA support 2025-09-08 08:51:40 -04:00
Matt Nadareski
b9c4bfc67e Expand the search window again 2025-09-08 08:17:01 -04:00
Matt Nadareski
6ab5ee0ae0 Add regions here for maybe future work 2025-09-08 08:03:07 -04:00
Matt Nadareski
94c1a86702 Add AssemblyName extension property to PE 2025-09-08 07:56:40 -04:00
Matt Nadareski
af6dd6a7fc Check for BZip2 and XZ in hidden places too 2025-09-08 07:52:41 -04:00
Matt Nadareski
45d4926d4c Toss the filename at the top of the infoprint output 2025-09-07 20:52:02 -04:00
Matt Nadareski
ce016c5eb0 Bump version 2025-09-06 08:18:43 -04:00
Matt Nadareski
2225c1f2d8 Update Nuget packages 2025-09-05 10:57:14 -04:00
Matt Nadareski
2d0c0d5845 Make a bunch of things cache more safer 2025-09-05 08:32:40 -04:00
Matt Nadareski
60f1756cbb Wrap places where ReadFrom was not being used but still could be parallel 2025-09-05 07:45:55 -04:00
Matt Nadareski
738a1d250a Add inherent locking the the data source in wrappers 2025-09-05 07:36:15 -04:00
Matt Nadareski
c8e65e1e30 Add section string lock 2025-09-03 13:46:12 -04:00
Matt Nadareski
ecb09ce6f2 Make sure source data isn't locked unnecessarily 2025-09-02 23:56:29 -04:00
Matt Nadareski
72a1484a71 More granular locks 2025-09-02 23:51:02 -04:00
164 changed files with 30678 additions and 8335 deletions

View File

@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.9.0</Version>
<Version>1.9.6</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
@@ -66,8 +66,8 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.IO" Version="1.7.1" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="9.0.8" Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))" />
<PackageReference Include="SabreTools.IO" Version="1.7.5" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="9.0.9" Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))" />
</ItemGroup>
</Project>

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using SabreTools.IO.Extensions;
using SabreTools.Serialization;
using SabreTools.Serialization.Wrappers;
namespace ExtractionTool

View File

@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.9.0</Version>
<Version>1.9.6</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
@@ -32,7 +32,7 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.IO" Version="1.7.1" />
<PackageReference Include="SabreTools.IO" Version="1.7.5" />
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
</ItemGroup>

View File

@@ -137,6 +137,8 @@ namespace InfoPrint
Console.WriteLine(builder);
using var sw = new StreamWriter(File.OpenWrite($"{filenameBase}.txt"));
sw.WriteLine(file);
sw.WriteLine();
sw.WriteLine(builder.ToString());
sw.Flush();
}

7
LICENSE Normal file
View File

@@ -0,0 +1,7 @@
Copyright (c) 2018-2025 Matt Nadareski
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -110,6 +110,9 @@ Below is a table of all namespaces within the library and what they represent
| --- | --- |
| `SabreTools.Serialization.CrossModel` | Convert between models; mainly used for metadata files converting to and from a common, `Dictionary`-based model |
| `SabreTools.Serialization.Deserializers` | Convert from external sources to models |
| `SabreTools.Serialization.Extensions` | Extension methods for both models and wrappers |
| `SabreTools.Serialization.Interfaces` | Interfaces used commonly throughout the library |
| `SabreTools.Serialization.ObjectIdentifier` | Object Identifier (OID) parsing |
| `SabreTools.Serialization.Printers` | Export model information in a formatted manner |
| `SabreTools.Serialization.Serializers` | Convert from models to external sources |
| `SabreTools.Serialization.Wrappers` | Classes that wrap serialization and models to allow for including extension properties |

View File

@@ -97,7 +97,11 @@ namespace SabreTools.Serialization.Test.CrossModel
Name = "XXXXXX",
Size = "XXXXXX",
CRC = "XXXXXX",
MD2 = "XXXXXX",
MD4 = "XXXXXX",
MD5 = "XXXXXX",
RIPEMD128 = "XXXXXX",
RIPEMD160 = "XXXXXX",
SHA1 = "XXXXXX",
Merge = "XXXXXX",
Status = "XXXXXX",
@@ -346,7 +350,11 @@ namespace SabreTools.Serialization.Test.CrossModel
Assert.Equal("XXXXXX", rom.Name);
Assert.Equal("XXXXXX", rom.Size);
Assert.Equal("XXXXXX", rom.CRC);
Assert.Equal("XXXXXX", rom.MD2);
Assert.Equal("XXXXXX", rom.MD4);
Assert.Equal("XXXXXX", rom.MD5);
Assert.Equal("XXXXXX", rom.RIPEMD128);
Assert.Equal("XXXXXX", rom.RIPEMD160);
Assert.Equal("XXXXXX", rom.SHA1);
Assert.Equal("XXXXXX", rom.Merge);
Assert.Equal("XXXXXX", rom.Status);

View File

@@ -150,7 +150,11 @@ namespace SabreTools.Serialization.Test.CrossModel
Name = "XXXXXX",
Size = "XXXXXX",
CRC = "XXXXXX",
MD2 = "XXXXXX",
MD4 = "XXXXXX",
MD5 = "XXXXXX",
RIPEMD128 = "XXXXXX",
RIPEMD160 = "XXXXXX",
SHA1 = "XXXXXX",
SHA256 = "XXXXXX",
SHA384 = "XXXXXX",
@@ -460,7 +464,11 @@ namespace SabreTools.Serialization.Test.CrossModel
Assert.Equal("XXXXXX", rom.Name);
Assert.Equal("XXXXXX", rom.Size);
Assert.Equal("XXXXXX", rom.CRC);
Assert.Equal("XXXXXX", rom.MD2);
Assert.Equal("XXXXXX", rom.MD4);
Assert.Equal("XXXXXX", rom.MD5);
Assert.Equal("XXXXXX", rom.RIPEMD128);
Assert.Equal("XXXXXX", rom.RIPEMD160);
Assert.Equal("XXXXXX", rom.SHA1);
Assert.Equal("XXXXXX", rom.SHA256);
Assert.Equal("XXXXXX", rom.SHA384);

View File

@@ -0,0 +1,152 @@
using System.IO;
using System.Linq;
using SabreTools.Models.ASN1;
using SabreTools.Serialization.Deserializers;
using Xunit;
namespace SabreTools.Serialization.Test.Deserializers
{
public class AbstractSyntaxNotationOneTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var deserializer = new AbstractSyntaxNotationOne();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var deserializer = new AbstractSyntaxNotationOne();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var deserializer = new AbstractSyntaxNotationOne();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var deserializer = new AbstractSyntaxNotationOne();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var deserializer = new AbstractSyntaxNotationOne();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var deserializer = new AbstractSyntaxNotationOne();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void ValidMinimalStream_NotNull()
{
Stream data = new MemoryStream([0x00]);
var deserializer = new AbstractSyntaxNotationOne();
var actual = deserializer.Deserialize(data);
Assert.NotNull(actual);
var actualSingle = Assert.Single(actual);
Assert.Equal(ASN1Type.V_ASN1_EOC, actualSingle.Type);
Assert.Equal(default, actualSingle.Length);
Assert.Null(actualSingle.Value);
}
[Fact]
public void ValidBoolean_NotNull()
{
Stream data = new MemoryStream([0x01, 0x01, 0x01]);
var deserializer = new AbstractSyntaxNotationOne();
var actual = deserializer.Deserialize(data);
Assert.NotNull(actual);
var actualSingle = Assert.Single(actual);
Assert.Equal(ASN1Type.V_ASN1_BOOLEAN, actualSingle.Type);
Assert.Equal(1UL, actualSingle.Length);
Assert.NotNull(actualSingle.Value);
byte[]? valueAsArray = actualSingle.Value as byte[];
Assert.NotNull(valueAsArray);
byte actualValue = Assert.Single(valueAsArray);
Assert.Equal(0x01, actualValue);
}
[Theory]
[InlineData(new byte[] { 0x26, 0x81, 0x03, 0x01, 0x01, 0x01 })]
[InlineData(new byte[] { 0x26, 0x82, 0x00, 0x03, 0x01, 0x01, 0x01 })]
[InlineData(new byte[] { 0x26, 0x83, 0x00, 0x00, 0x03, 0x01, 0x01, 0x01 })]
[InlineData(new byte[] { 0x26, 0x84, 0x00, 0x00, 0x00, 0x03, 0x01, 0x01, 0x01 })]
[InlineData(new byte[] { 0x26, 0x85, 0x00, 0x00, 0x00, 0x00, 0x03, 0x01, 0x01, 0x01 })]
[InlineData(new byte[] { 0x26, 0x86, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x01, 0x01, 0x01 })]
[InlineData(new byte[] { 0x26, 0x87, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x01, 0x01, 0x01 })]
[InlineData(new byte[] { 0x26, 0x88, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x01, 0x01, 0x01 })]
public void ComplexValue_NotNull(byte[] arr)
{
Stream data = new MemoryStream(arr);
var deserializer = new AbstractSyntaxNotationOne();
var actual = deserializer.Deserialize(data);
Assert.NotNull(actual);
var actualSingle = Assert.Single(actual);
Assert.Equal(ASN1Type.V_ASN1_CONSTRUCTED | ASN1Type.V_ASN1_OBJECT, actualSingle.Type);
Assert.Equal(3UL, actualSingle.Length);
Assert.NotNull(actualSingle.Value);
TypeLengthValue[]? valueAsArray = actualSingle.Value as TypeLengthValue[];
Assert.NotNull(valueAsArray);
TypeLengthValue actualSub = Assert.Single(valueAsArray);
Assert.Equal(ASN1Type.V_ASN1_BOOLEAN, actualSub.Type);
Assert.Equal(1UL, actualSub.Length);
Assert.NotNull(actualSub.Value);
}
[Theory]
[InlineData(new byte[] { 0x26, 0x80 })]
[InlineData(new byte[] { 0x26, 0x89 })]
public void ComplexValueInvalidLength_Null(byte[] arr)
{
Stream data = new MemoryStream(arr);
var deserializer = new AbstractSyntaxNotationOne();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
}
}

View File

@@ -215,7 +215,11 @@ namespace SabreTools.Serialization.Test.Deserializers
Name = "XXXXXX",
Size = "XXXXXX",
CRC = "XXXXXX",
MD2 = "XXXXXX",
MD4 = "XXXXXX",
MD5 = "XXXXXX",
RIPEMD128 = "XXXXXX",
RIPEMD160 = "XXXXXX",
SHA1 = "XXXXXX",
Merge = "XXXXXX",
Status = "XXXXXX",
@@ -464,7 +468,11 @@ namespace SabreTools.Serialization.Test.Deserializers
Assert.Equal("XXXXXX", rom.Name);
Assert.Equal("XXXXXX", rom.Size);
Assert.Equal("XXXXXX", rom.CRC);
Assert.Equal("XXXXXX", rom.MD2);
Assert.Equal("XXXXXX", rom.MD4);
Assert.Equal("XXXXXX", rom.MD5);
Assert.Equal("XXXXXX", rom.RIPEMD128);
Assert.Equal("XXXXXX", rom.RIPEMD160);
Assert.Equal("XXXXXX", rom.SHA1);
Assert.Equal("XXXXXX", rom.Merge);
Assert.Equal("XXXXXX", rom.Status);

View File

@@ -217,7 +217,11 @@ namespace SabreTools.Serialization.Test.Deserializers
Name = "XXXXXX",
Size = "XXXXXX",
CRC = "XXXXXX",
MD2 = "XXXXXX",
MD4 = "XXXXXX",
MD5 = "XXXXXX",
RIPEMD128 = "XXXXXX",
RIPEMD160 = "XXXXXX",
SHA1 = "XXXXXX",
SHA256 = "XXXXXX",
SHA384 = "XXXXXX",
@@ -512,7 +516,11 @@ namespace SabreTools.Serialization.Test.Deserializers
Assert.Equal("XXXXXX", rom.Name);
Assert.Equal("XXXXXX", rom.Size);
Assert.Equal("XXXXXX", rom.CRC);
Assert.Equal("XXXXXX", rom.MD2);
Assert.Equal("XXXXXX", rom.MD4);
Assert.Equal("XXXXXX", rom.MD5);
Assert.Equal("XXXXXX", rom.RIPEMD128);
Assert.Equal("XXXXXX", rom.RIPEMD160);
Assert.Equal("XXXXXX", rom.SHA1);
Assert.Equal("XXXXXX", rom.SHA256);
Assert.Equal("XXXXXX", rom.SHA384);

View File

@@ -0,0 +1,213 @@
using System;
using SabreTools.Models.ASN1;
using SabreTools.Serialization.Extensions;
using Xunit;
namespace SabreTools.Serialization.Test.Extensions
{
public class TypeLengthValueTests
{
#region Formatting
[Fact]
public void Format_EOC()
{
string expected = "Type: V_ASN1_EOC";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_EOC, Length = 0, Value = null };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ZeroLength()
{
string expected = "Type: V_ASN1_NULL, Length: 0";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_NULL, Length = 0, Value = null };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_InvalidConstructed()
{
string expected = "Type: V_ASN1_OBJECT, V_ASN1_CONSTRUCTED, Length: 1, Value: [INVALID DATA TYPE]";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_OBJECT | ASN1Type.V_ASN1_CONSTRUCTED, Length = 1, Value = (object?)false };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidConstructed()
{
string expected = "Type: V_ASN1_OBJECT, V_ASN1_CONSTRUCTED, Length: 3, Value:\n Type: V_ASN1_BOOLEAN, Length: 1, Value: True";
var boolTlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_BOOLEAN, Length = 1, Value = new byte[] { 0x01 } };
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_OBJECT | ASN1Type.V_ASN1_CONSTRUCTED, Length = 3, Value = new Models.ASN1.TypeLengthValue[] { boolTlv } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_InvalidDataType()
{
string expected = "Type: V_ASN1_OBJECT, Length: 1, Value: [INVALID DATA TYPE]";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_OBJECT, Length = 1, Value = (object?)false };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_InvalidLength()
{
string expected = "Type: V_ASN1_NULL, Length: 1, Value: [NO DATA]";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_NULL, Length = 1, Value = Array.Empty<byte>() };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_InvalidBooleanLength()
{
string expected = "Type: V_ASN1_BOOLEAN, Length: 2 [Expected length of 1], Value: True";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_BOOLEAN, Length = 2, Value = new byte[] { 0x01 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_InvalidBooleanArrayLength()
{
string expected = "Type: V_ASN1_BOOLEAN, Length: 1 [Expected value length of 1], Value: True";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_BOOLEAN, Length = 1, Value = new byte[] { 0x01, 0x00 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidBoolean()
{
string expected = "Type: V_ASN1_BOOLEAN, Length: 1, Value: True";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_BOOLEAN, Length = 1, Value = new byte[] { 0x01 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidInteger()
{
string expected = "Type: V_ASN1_INTEGER, Length: 1, Value: 1";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_INTEGER, Length = 1, Value = new byte[] { 0x01 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidBitString_NoBits()
{
string expected = "Type: V_ASN1_BIT_STRING, Length: 1, Value with 0 unused bits";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_BIT_STRING, Length = 1, Value = new byte[] { 0x00 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidBitString_Bits()
{
string expected = "Type: V_ASN1_BIT_STRING, Length: 1, Value with 1 unused bits: 01";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_BIT_STRING, Length = 1, Value = new byte[] { 0x01, 0x01 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidOctetString()
{
string expected = "Type: V_ASN1_OCTET_STRING, Length: 1, Value: 01";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_OCTET_STRING, Length = 1, Value = new byte[] { 0x01 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidObject()
{
string expected = "Type: V_ASN1_OBJECT, Length: 3, Value: 0.1.2.3 (/ITU-T/1/2/3)";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_OBJECT, Length = 3, Value = new byte[] { 0x01, 0x02, 0x03 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidUTF8String()
{
string expected = "Type: V_ASN1_UTF8STRING, Length: 3, Value: ABC";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_UTF8STRING, Length = 3, Value = new byte[] { 0x41, 0x42, 0x43 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidPrintableString()
{
string expected = "Type: V_ASN1_PRINTABLESTRING, Length: 3, Value: ABC";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_PRINTABLESTRING, Length = 3, Value = new byte[] { 0x41, 0x42, 0x43 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidTeletexString()
{
string expected = "Type: V_ASN1_TELETEXSTRING, Length: 3, Value: ABC";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_TELETEXSTRING, Length = 3, Value = new byte[] { 0x41, 0x42, 0x43 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidIA5String()
{
string expected = "Type: V_ASN1_IA5STRING, Length: 3, Value: ABC";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_IA5STRING, Length = 3, Value = new byte[] { 0x41, 0x42, 0x43 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_InvalidUTCTime()
{
string expected = "Type: V_ASN1_UTCTIME, Length: 3, Value: ABC";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_UTCTIME, Length = 3, Value = new byte[] { 0x41, 0x42, 0x43 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidUTCTime()
{
string expected = "Type: V_ASN1_UTCTIME, Length: 3, Value: 1980-01-01 00:00:00";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_UTCTIME, Length = 3, Value = new byte[] { 0x31, 0x39, 0x38, 0x30, 0x2D, 0x30, 0x31, 0x2D, 0x30, 0x31, 0x20, 0x30, 0x30, 0x3A, 0x30, 0x30, 0x3A, 0x30, 0x30 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidBmpString()
{
string expected = "Type: V_ASN1_BMPSTRING, Length: 6, Value: ABC";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_BMPSTRING, Length = 6, Value = new byte[] { 0x41, 0x00, 0x42, 0x00, 0x43, 0x00 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
[Fact]
public void Format_ValidUnformatted()
{
string expected = "Type: V_ASN1_OBJECT_DESCRIPTOR, Length: 1, Value: 01";
var tlv = new Models.ASN1.TypeLengthValue { Type = ASN1Type.V_ASN1_OBJECT_DESCRIPTOR, Length = 1, Value = new byte[] { 0x01 } };
string actual = tlv.Format();
Assert.Equal(expected, actual);
}
#endregion
}
}

View File

@@ -0,0 +1,111 @@
using SabreTools.Serialization.ObjectIdentifier;
using Xunit;
namespace SabreTools.Serialization.Test.ObjectIdentifier
{
// These tests are known to be incomplete due to the sheer number
// of possible OIDs that exist. The tests below are a minimal
// representation of functionality to guarantee proper behavior
// not necessarily absolute outputs
public class ParserTests
{
#region ASN.1
[Fact]
public void ASN1Notation_AlwaysNull()
{
ulong[]? values = null;
string? actual = Parser.ParseOIDToASN1Notation(values);
Assert.Null(actual);
}
#endregion
#region Dot Notation
[Fact]
public void DotNotation_NullValues_Null()
{
ulong[]? values = null;
string? actual = Parser.ParseOIDToDotNotation(values);
Assert.Null(actual);
}
[Fact]
public void DotNotation_EmptyValues_Null()
{
ulong[]? values = [];
string? actual = Parser.ParseOIDToDotNotation(values);
Assert.Null(actual);
}
[Fact]
public void DotNotation_Values_Formatted()
{
string expected = "0.1.2.3";
ulong[]? values = [0, 1, 2, 3];
string? actual = Parser.ParseOIDToDotNotation(values);
Assert.Equal(expected, actual);
}
#endregion
#region Modified OID-IRI
[Fact]
public void ModifiedOIDIRI_NullValues_Null()
{
ulong[]? values = null;
string? actual = Parser.ParseOIDToModifiedOIDIRI(values);
Assert.Null(actual);
}
[Fact]
public void ModifiedOIDIRI_EmptyValues_Null()
{
ulong[]? values = [];
string? actual = Parser.ParseOIDToModifiedOIDIRI(values);
Assert.Null(actual);
}
[Fact]
public void ModifiedOIDIRI_Values_Formatted()
{
string expected = "/ITU-T/[question]/2/3";
ulong[]? values = [0, 1, 2, 3];
string? actual = Parser.ParseOIDToModifiedOIDIRI(values);
Assert.Equal(expected, actual);
}
#endregion
#region OID-IRI
[Fact]
public void OIDIRI_NullValues_Null()
{
ulong[]? values = null;
string? actual = Parser.ParseOIDToOIDIRINotation(values);
Assert.Null(actual);
}
[Fact]
public void OIDIRI_EmptyValues_Null()
{
ulong[]? values = [];
string? actual = Parser.ParseOIDToOIDIRINotation(values);
Assert.Null(actual);
}
[Fact]
public void OIDIRI_Values_Formatted()
{
string expected = "/ITU-T/1/2/3";
ulong[]? values = [0, 1, 2, 3];
string? actual = Parser.ParseOIDToOIDIRINotation(values);
Assert.Equal(expected, actual);
}
#endregion
}
}

View File

@@ -28,7 +28,7 @@
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
<PackageReference Include="SabreTools.Models" Version="1.7.0" />
<PackageReference Include="SabreTools.Models" Version="1.7.2" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Wrappers;
using Xunit;
namespace SabreTools.Serialization.Test.Wrappers
{
public class LDSCRYPTTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var actual = LDSCRYPT.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var actual = LDSCRYPT.Create(data, offset);
Assert.Null(actual);
}
[Fact(Skip = "This will never pass with the current code")]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var actual = LDSCRYPT.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var actual = LDSCRYPT.Create(data);
Assert.Null(actual);
}
[Fact(Skip = "This will never pass with the current code")]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var actual = LDSCRYPT.Create(data);
Assert.Null(actual);
}
[Fact(Skip = "This will never pass with the current code")]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var actual = LDSCRYPT.Create(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Wrappers;
using Xunit;
namespace SabreTools.Serialization.Test.Wrappers
{
public class RealArcadeInstallerTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var actual = RealArcadeInstaller.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var actual = RealArcadeInstaller.Create(data, offset);
Assert.Null(actual);
}
[Fact(Skip = "This will never pass with the current code")]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var actual = RealArcadeInstaller.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var actual = RealArcadeInstaller.Create(data);
Assert.Null(actual);
}
[Fact(Skip = "This will never pass with the current code")]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var actual = RealArcadeInstaller.Create(data);
Assert.Null(actual);
}
[Fact(Skip = "This will never pass with the current code")]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var actual = RealArcadeInstaller.Create(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Wrappers;
using Xunit;
namespace SabreTools.Serialization.Test.Wrappers
{
public class RealArcadeMezzanineTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var actual = RealArcadeMezzanine.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var actual = RealArcadeMezzanine.Create(data, offset);
Assert.Null(actual);
}
[Fact(Skip = "This will never pass with the current code")]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var actual = RealArcadeMezzanine.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var actual = RealArcadeMezzanine.Create(data);
Assert.Null(actual);
}
[Fact(Skip = "This will never pass with the current code")]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var actual = RealArcadeMezzanine.Create(data);
Assert.Null(actual);
}
[Fact(Skip = "This will never pass with the current code")]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var actual = RealArcadeMezzanine.Create(data);
Assert.Null(actual);
}
}
}

View File

@@ -275,7 +275,11 @@ namespace SabreTools.Serialization.CrossModel
Name = item.ReadString(Models.Metadata.Rom.NameKey),
Size = item.ReadString(Models.Metadata.Rom.SizeKey),
CRC = item.ReadString(Models.Metadata.Rom.CRCKey),
MD2 = item.ReadString(Models.Metadata.Rom.MD2Key),
MD4 = item.ReadString(Models.Metadata.Rom.MD4Key),
MD5 = item.ReadString(Models.Metadata.Rom.MD5Key),
RIPEMD128 = item.ReadString(Models.Metadata.Rom.RIPEMD128Key),
RIPEMD160 = item.ReadString(Models.Metadata.Rom.RIPEMD160Key),
SHA1 = item.ReadString(Models.Metadata.Rom.SHA1Key),
SHA256 = item.ReadString(Models.Metadata.Rom.SHA256Key),
SHA384 = item.ReadString(Models.Metadata.Rom.SHA384Key),

View File

@@ -291,7 +291,11 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Rom.NameKey] = item.Name,
[Models.Metadata.Rom.SizeKey] = item.Size,
[Models.Metadata.Rom.CRCKey] = item.CRC,
[Models.Metadata.Rom.MD2Key] = item.MD2,
[Models.Metadata.Rom.MD4Key] = item.MD4,
[Models.Metadata.Rom.MD5Key] = item.MD5,
[Models.Metadata.Rom.RIPEMD128Key] = item.RIPEMD128,
[Models.Metadata.Rom.RIPEMD160Key] = item.RIPEMD160,
[Models.Metadata.Rom.SHA1Key] = item.SHA1,
[Models.Metadata.Rom.SHA256Key] = item.SHA256,
[Models.Metadata.Rom.SHA384Key] = item.SHA384,

View File

@@ -28,6 +28,8 @@ namespace SabreTools.Serialization.CrossModel
var md2s = new List<MD2>();
var md4s = new List<MD4>();
var md5s = new List<MD5>();
var ripemd128s = new List<RIPEMD128>();
var ripemd160s = new List<RIPEMD160>();
var sha1s = new List<SHA1>();
var sha256s = new List<SHA256>();
var sha384s = new List<SHA384>();
@@ -44,6 +46,10 @@ namespace SabreTools.Serialization.CrossModel
md4s.AddRange(hashfile.MD4);
if (hashfile.MD5 != null && hashfile.MD5.Length > 0)
md5s.AddRange(hashfile.MD5);
if (hashfile.RIPEMD128 != null && hashfile.RIPEMD128.Length > 0)
ripemd128s.AddRange(hashfile.RIPEMD128);
if (hashfile.RIPEMD160 != null && hashfile.RIPEMD160.Length > 0)
ripemd160s.AddRange(hashfile.RIPEMD160);
if (hashfile.SHA1 != null && hashfile.SHA1.Length > 0)
sha1s.AddRange(hashfile.SHA1);
if (hashfile.SHA256 != null && hashfile.SHA256.Length > 0)
@@ -66,6 +72,10 @@ namespace SabreTools.Serialization.CrossModel
hashfileItem.MD4 = [.. md4s];
if (md5s.Count > 0)
hashfileItem.MD5 = [.. md5s];
if (ripemd128s.Count > 0)
hashfileItem.RIPEMD128 = [.. ripemd128s];
if (ripemd160s.Count > 0)
hashfileItem.RIPEMD160 = [.. ripemd160s];
if (sha1s.Count > 0)
hashfileItem.SHA1 = [.. sha1s];
if (sha256s.Count > 0)
@@ -103,6 +113,12 @@ namespace SabreTools.Serialization.CrossModel
MD5 = hash == HashType.MD5
? Array.ConvertAll(roms, ConvertToMD5)
: null,
RIPEMD128 = hash == HashType.RIPEMD128
? Array.ConvertAll(roms, ConvertToRIPEMD128)
: null,
RIPEMD160 = hash == HashType.RIPEMD160
? Array.ConvertAll(roms, ConvertToRIPEMD160)
: null,
SHA1 = hash == HashType.SHA1
? Array.ConvertAll(roms, ConvertToSHA1)
: null,
@@ -160,6 +176,32 @@ namespace SabreTools.Serialization.CrossModel
return md5;
}
/// <summary>
/// Convert from <see cref="Models.Metadata.Rom"/> to <see cref="Models.Hashfile.RIPEMD128"/>
/// </summary>
private static RIPEMD128 ConvertToRIPEMD128(Models.Metadata.Rom item)
{
var ripemd128 = new RIPEMD128
{
Hash = item.ReadString(Models.Metadata.Rom.RIPEMD128Key),
File = item.ReadString(Models.Metadata.Rom.NameKey),
};
return ripemd128;
}
/// <summary>
/// Convert from <see cref="Models.Metadata.Rom"/> to <see cref="Models.Hashfile.RIPEMD160"/>
/// </summary>
private static RIPEMD160 ConvertToRIPEMD160(Models.Metadata.Rom item)
{
var ripemd160 = new RIPEMD160
{
Hash = item.ReadString(Models.Metadata.Rom.RIPEMD160Key),
File = item.ReadString(Models.Metadata.Rom.NameKey),
};
return ripemd160;
}
/// <summary>
/// Convert from <see cref="Models.Metadata.Rom"/> to <see cref="Models.Hashfile.SFV"/>
/// </summary>

View File

@@ -50,6 +50,10 @@ namespace SabreTools.Serialization.CrossModel
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.MD4, ConvertToInternalModel);
else if (item.MD5 != null && item.MD5.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.MD5, ConvertToInternalModel);
else if (item.RIPEMD128 != null && item.RIPEMD128.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.RIPEMD128, ConvertToInternalModel);
else if (item.RIPEMD160 != null && item.RIPEMD160.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.RIPEMD160, ConvertToInternalModel);
else if (item.SHA1 != null && item.SHA1.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SHA1, ConvertToInternalModel);
else if (item.SHA256 != null && item.SHA256.Length > 0)
@@ -103,6 +107,32 @@ namespace SabreTools.Serialization.CrossModel
return rom;
}
/// <summary>
/// Convert from <see cref="Models.Hashfile.RIPEMD128"/> to <see cref="Models.Metadata.Rom"/>
/// </summary>
private static Models.Metadata.Rom ConvertToInternalModel(RIPEMD128 item)
{
var rom = new Models.Metadata.Rom
{
[Models.Metadata.Rom.RIPEMD128Key] = item.Hash,
[Models.Metadata.Rom.NameKey] = item.File,
};
return rom;
}
/// <summary>
/// Convert from <see cref="Models.Hashfile.RIPEMD160"/> to <see cref="Models.Metadata.Rom"/>
/// </summary>
private static Models.Metadata.Rom ConvertToInternalModel(RIPEMD160 item)
{
var rom = new Models.Metadata.Rom
{
[Models.Metadata.Rom.RIPEMD160Key] = item.Hash,
[Models.Metadata.Rom.NameKey] = item.File,
};
return rom;
}
/// <summary>
/// Convert from <see cref="Models.Hashfile.SFV"/> to <see cref="Models.Metadata.Rom"/>
/// </summary>

View File

@@ -302,7 +302,11 @@ namespace SabreTools.Serialization.CrossModel
Name = item.ReadString(Models.Metadata.Rom.NameKey),
Size = item.ReadString(Models.Metadata.Rom.SizeKey),
CRC = item.ReadString(Models.Metadata.Rom.CRCKey),
MD2 = item.ReadString(Models.Metadata.Rom.MD2Key),
MD4 = item.ReadString(Models.Metadata.Rom.MD4Key),
MD5 = item.ReadString(Models.Metadata.Rom.MD5Key),
RIPEMD128 = item.ReadString(Models.Metadata.Rom.RIPEMD128Key),
RIPEMD160 = item.ReadString(Models.Metadata.Rom.RIPEMD160Key),
SHA1 = item.ReadString(Models.Metadata.Rom.SHA1Key),
SHA256 = item.ReadString(Models.Metadata.Rom.SHA256Key),
SHA384 = item.ReadString(Models.Metadata.Rom.SHA384Key),

View File

@@ -298,7 +298,11 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Rom.NameKey] = item.Name,
[Models.Metadata.Rom.SizeKey] = item.Size,
[Models.Metadata.Rom.CRCKey] = item.CRC,
[Models.Metadata.Rom.MD2Key] = item.MD2,
[Models.Metadata.Rom.MD4Key] = item.MD4,
[Models.Metadata.Rom.MD5Key] = item.MD5,
[Models.Metadata.Rom.RIPEMD128Key] = item.RIPEMD128,
[Models.Metadata.Rom.RIPEMD160Key] = item.RIPEMD160,
[Models.Metadata.Rom.SHA1Key] = item.SHA1,
[Models.Metadata.Rom.SHA256Key] = item.SHA256,
[Models.Metadata.Rom.SHA384Key] = item.SHA384,

View File

@@ -28,7 +28,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the record
var record = ParseRecord(data);
if (record == null)
return null;
continue;
// Add the record
records.Add(record);
@@ -64,9 +64,10 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Record on success, null on error</returns>
private static Record? ParseRecord(Stream data)
{
// The first byte is the type
// The first 4 bytes are the type and length
RecordType type = (RecordType)data.ReadByteValue();
data.Seek(-1, SeekOrigin.Current);
uint recordLength = data.ReadUInt24LittleEndian();
data.Seek(-4, SeekOrigin.Current);
// Create a record based on the type
return type switch
@@ -83,7 +84,7 @@ namespace SabreTools.Serialization.Deserializers
RecordType.Copyright => ParseCopyrightRecord(data),
// Unknown record type
_ => null,
_ => ParseGenericRecord(data),
};
}
@@ -231,6 +232,22 @@ namespace SabreTools.Serialization.Deserializers
return obj;
}
/// <summary>
/// Parse a Stream into a GenericRecord
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled GenericRecord on success, null on error</returns>
public static GenericRecord ParseGenericRecord(Stream data)
{
var obj = new GenericRecord();
obj.RecordType = (RecordType)data.ReadByteValue();
obj.RecordLength = data.ReadUInt24LittleEndian();
obj.Data = data.ReadBytes(0x10);
return obj;
}
/// <summary>
/// Parse a Stream into a HostRevocationListEntry
/// </summary>

View File

@@ -0,0 +1,158 @@
using System.Collections.Generic;
using System.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.ASN1;
namespace SabreTools.Serialization.Deserializers
{
public class AbstractSyntaxNotationOne : BaseBinaryDeserializer<TypeLengthValue[]>
{
/// <inheritdoc/>
public override TypeLengthValue[]? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Cache the current offset
long initialOffset = data.Position;
// Loop through the data and return all top-level values
var topLevelValues = new List<TypeLengthValue>();
while (data.Position < data.Length)
{
var topLevelValue = ParseTypeLengthValue(data);
if (topLevelValue == null)
break;
topLevelValues.Add(topLevelValue);
}
// Return null instead of empty
if (topLevelValues.Count == 0)
return null;
// Return the top-level values
return [.. topLevelValues];
}
catch
{
// Ignore the actual error
return null;
}
}
/// <summary>
/// Parse a Stream into a TypeLengthValue
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled TypeLengthValue on success, null on error</returns>
public TypeLengthValue? ParseTypeLengthValue(Stream data)
{
var obj = new TypeLengthValue();
// Get the type and modifiers
obj.Type = (ASN1Type)data.ReadByteValue();
// If we have an end indicator, we just return
if (obj.Type == ASN1Type.V_ASN1_EOC)
return obj;
// Get the length of the value
ulong? length = ReadLength(data);
if (length == null)
return null;
// Set the length
obj.Length = length.Value;
// Read the value
#if NET20 || NET35
if ((obj.Type & ASN1Type.V_ASN1_CONSTRUCTED) != 0)
#else
if (obj.Type.HasFlag(ASN1Type.V_ASN1_CONSTRUCTED))
#endif
{
var valueList = new List<TypeLengthValue>();
long currentIndex = data.Position;
while (data.Position < currentIndex + (long)obj.Length)
{
var value = ParseTypeLengthValue(data);
valueList.Add(value);
}
obj.Value = valueList.ToArray();
}
else
{
// TODO: Get more granular based on type
obj.Value = data.ReadBytes((int)obj.Length);
}
return obj;
}
/// <summary>
/// Reads the length field for a type
/// </summary>
/// <param name="data">Stream representing data to read</param>
/// <returns>The length value read from the array</returns>
private static ulong? ReadLength(Stream data)
{
// Read the first byte, assuming it's the length
byte length = data.ReadByteValue();
// If the bit 7 is not set, then use the value as it is
if ((length & 0x80) == 0)
return length;
// Otherwise, use the value as the number of remaining bytes to read
int bytesToRead = length & ~0x80;
// Assemble the length based on byte count
ulong fullLength = 0;
switch (bytesToRead)
{
case 8:
fullLength |= data.ReadByteValue();
fullLength <<= 8;
goto case 7;
case 7:
fullLength |= data.ReadByteValue();
fullLength <<= 8;
goto case 6;
case 6:
fullLength |= data.ReadByteValue();
fullLength <<= 8;
goto case 5;
case 5:
fullLength |= data.ReadByteValue();
fullLength <<= 8;
goto case 4;
case 4:
fullLength |= data.ReadByteValue();
fullLength <<= 8;
goto case 3;
case 3:
fullLength |= data.ReadByteValue();
fullLength <<= 8;
goto case 2;
case 2:
fullLength |= data.ReadByteValue();
fullLength <<= 8;
goto case 1;
case 1:
fullLength |= data.ReadByteValue();
break;
default:
return null;
}
return fullLength;
}
}
}

View File

@@ -21,7 +21,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc/>
public override MetadataFile? Deserialize(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return default;

View File

@@ -1,6 +1,4 @@
using System;
using System.IO;
using System.Reflection;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Deserializers
@@ -9,17 +7,15 @@ namespace SabreTools.Serialization.Deserializers
/// Base class for all binary deserializers
/// </summary>
/// <typeparam name="TModel">Type of the model to deserialize</typeparam>
/// <remarks>These methods assume there is a concrete implementation of the deserialzier for the model available</remarks>
/// <remarks>
/// This class allows all inheriting types to only implement <see cref="IStreamDeserializer<>"/>
/// and still implicitly implement <see cref="IByteDeserializer<>"/> and <see cref="IFileDeserializer<>"/>
/// </remarks>
public abstract class BaseBinaryDeserializer<TModel> :
IByteDeserializer<TModel>,
IFileDeserializer<TModel>,
IStreamDeserializer<TModel>
{
/// <summary>
/// Indicates if compressed files should be decompressed before processing
/// </summary>
protected virtual bool SkipCompression => false;
#region IByteDeserializer
/// <inheritdoc/>
@@ -35,7 +31,7 @@ namespace SabreTools.Serialization.Deserializers
// Create a memory stream and parse that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return DeserializeStream(dataStream);
return Deserialize(dataStream);
}
#endregion
@@ -45,8 +41,21 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc/>
public virtual TModel? Deserialize(string? path)
{
using var stream = PathProcessor.OpenStream(path, SkipCompression);
return DeserializeStream(stream);
try
{
// If we don't have a file
if (string.IsNullOrEmpty(path) || !File.Exists(path))
return default;
// Open the file for deserialization
using var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
return Deserialize(stream);
}
catch
{
// TODO: Handle logging the exception
return default;
}
}
#endregion
@@ -57,110 +66,5 @@ namespace SabreTools.Serialization.Deserializers
public abstract TModel? Deserialize(Stream? data);
#endregion
#region Static Implementations
/// <inheritdoc cref="IByteDeserializer.Deserialize(byte[]?, int)"/>
public static TModel? DeserializeBytes(byte[]? data, int offset)
{
var deserializer = GetType<IByteDeserializer<TModel>>();
if (deserializer == null)
return default;
return deserializer.Deserialize(data, offset);
}
/// <inheritdoc cref="IFileDeserializer.Deserialize(string?)"/>
public static TModel? DeserializeFile(string? path)
{
var deserializer = GetType<IFileDeserializer<TModel>>();
if (deserializer == null)
return default;
return deserializer.Deserialize(path);
}
/// <inheritdoc cref="IStreamDeserializer.Deserialize(Stream?)"/>
public static TModel? DeserializeStream(Stream? data)
{
var deserializer = GetType<IStreamDeserializer<TModel>>();
if (deserializer == null)
return default;
return deserializer.Deserialize(data);
}
#endregion
#region Helpers
/// <summary>
/// Get a constructed instance of a type, if possible
/// </summary>
/// <typeparam name="TDeserializer">Deserializer type to construct</typeparam>
/// <returns>Deserializer of the requested type, null on error</returns>
private static TDeserializer? GetType<TDeserializer>()
{
// If the deserializer type is invalid
string? deserializerName = typeof(TDeserializer)?.Name;
if (deserializerName == null)
return default;
// If the deserializer has no generic arguments
var genericArgs = typeof(TDeserializer).GetGenericArguments();
if (genericArgs.Length == 0)
return default;
// Loop through all loaded assemblies
Type modelType = genericArgs[0];
foreach (var assembly in AppDomain.CurrentDomain.GetAssemblies())
{
// If the assembly is invalid
if (assembly == null)
return default;
// If not all types can be loaded, use the ones that could be
Type?[] assemblyTypes = [];
try
{
assemblyTypes = assembly.GetTypes();
}
catch (ReflectionTypeLoadException rtle)
{
assemblyTypes = rtle.Types ?? [];
}
// Loop through all types
foreach (Type? type in assemblyTypes)
{
// If the type is invalid
if (type == null)
continue;
// If the type isn't a class
if (!type.IsClass)
continue;
// If the type doesn't implement the interface
var interfaceType = type.GetInterface(deserializerName);
if (interfaceType == null)
continue;
// If the interface doesn't use the correct type parameter
var genericTypes = interfaceType.GetGenericArguments();
if (genericTypes.Length != 1 || genericTypes[0] != modelType)
continue;
// Try to create a concrete instance of the type
var instance = (TDeserializer?)Activator.CreateInstance(type);
if (instance != null)
return instance;
}
}
return default;
}
#endregion
}
}

View File

@@ -4,6 +4,7 @@ using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.CFB;
using SabreTools.Serialization.Extensions;
using static SabreTools.Models.CFB.Constants;
namespace SabreTools.Serialization.Deserializers

View File

@@ -64,7 +64,7 @@ namespace SabreTools.Serialization.Deserializers
return headerV3;
case 4:
var headerV4 = ParseHeaderV1(data);
var headerV4 = ParseHeaderV4(data);
if (headerV4.Tag != Constants.SignatureString)
return null;
@@ -76,7 +76,7 @@ namespace SabreTools.Serialization.Deserializers
return headerV4;
case 5:
var headerV5 = ParseHeaderV1(data);
var headerV5 = ParseHeaderV5(data);
if (headerV5.Tag != Constants.SignatureString)
return null;
@@ -150,15 +150,15 @@ namespace SabreTools.Serialization.Deserializers
byte[] tag = data.ReadBytes(8);
obj.Tag = Encoding.ASCII.GetString(tag);
obj.Length = data.ReadUInt32LittleEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.Flags = (Flags)data.ReadUInt32LittleEndian();
obj.Compression = (CompressionType)data.ReadUInt32LittleEndian();
obj.HunkSize = data.ReadUInt32LittleEndian();
obj.TotalHunks = data.ReadUInt32LittleEndian();
obj.Cylinders = data.ReadUInt32LittleEndian();
obj.Heads = data.ReadUInt32LittleEndian();
obj.Sectors = data.ReadUInt32LittleEndian();
obj.Length = data.ReadUInt32BigEndian();
obj.Version = data.ReadUInt32BigEndian();
obj.Flags = (Flags)data.ReadUInt32BigEndian();
obj.Compression = (CompressionType)data.ReadUInt32BigEndian();
obj.HunkSize = data.ReadUInt32BigEndian();
obj.TotalHunks = data.ReadUInt32BigEndian();
obj.Cylinders = data.ReadUInt32BigEndian();
obj.Heads = data.ReadUInt32BigEndian();
obj.Sectors = data.ReadUInt32BigEndian();
obj.MD5 = data.ReadBytes(16);
obj.ParentMD5 = data.ReadBytes(16);
@@ -174,18 +174,18 @@ namespace SabreTools.Serialization.Deserializers
byte[] tag = data.ReadBytes(8);
obj.Tag = Encoding.ASCII.GetString(tag);
obj.Length = data.ReadUInt32LittleEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.Flags = (Flags)data.ReadUInt32LittleEndian();
obj.Compression = (CompressionType)data.ReadUInt32LittleEndian();
obj.HunkSize = data.ReadUInt32LittleEndian();
obj.TotalHunks = data.ReadUInt32LittleEndian();
obj.Cylinders = data.ReadUInt32LittleEndian();
obj.Heads = data.ReadUInt32LittleEndian();
obj.Sectors = data.ReadUInt32LittleEndian();
obj.Length = data.ReadUInt32BigEndian();
obj.Version = data.ReadUInt32BigEndian();
obj.Flags = (Flags)data.ReadUInt32BigEndian();
obj.Compression = (CompressionType)data.ReadUInt32BigEndian();
obj.HunkSize = data.ReadUInt32BigEndian();
obj.TotalHunks = data.ReadUInt32BigEndian();
obj.Cylinders = data.ReadUInt32BigEndian();
obj.Heads = data.ReadUInt32BigEndian();
obj.Sectors = data.ReadUInt32BigEndian();
obj.MD5 = data.ReadBytes(16);
obj.ParentMD5 = data.ReadBytes(16);
obj.BytesPerSector = data.ReadUInt32LittleEndian();
obj.BytesPerSector = data.ReadUInt32BigEndian();
return obj;
}
@@ -199,16 +199,16 @@ namespace SabreTools.Serialization.Deserializers
byte[] tag = data.ReadBytes(8);
obj.Tag = Encoding.ASCII.GetString(tag);
obj.Length = data.ReadUInt32LittleEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.Flags = (Flags)data.ReadUInt32LittleEndian();
obj.Compression = (CompressionType)data.ReadUInt32LittleEndian();
obj.TotalHunks = data.ReadUInt32LittleEndian();
obj.LogicalBytes = data.ReadUInt64LittleEndian();
obj.MetaOffset = data.ReadUInt64LittleEndian();
obj.Length = data.ReadUInt32BigEndian();
obj.Version = data.ReadUInt32BigEndian();
obj.Flags = (Flags)data.ReadUInt32BigEndian();
obj.Compression = (CompressionType)data.ReadUInt32BigEndian();
obj.TotalHunks = data.ReadUInt32BigEndian();
obj.LogicalBytes = data.ReadUInt64BigEndian();
obj.MetaOffset = data.ReadUInt64BigEndian();
obj.MD5 = data.ReadBytes(16);
obj.ParentMD5 = data.ReadBytes(16);
obj.HunkBytes = data.ReadUInt32LittleEndian();
obj.HunkBytes = data.ReadUInt32BigEndian();
obj.SHA1 = data.ReadBytes(20);
obj.ParentSHA1 = data.ReadBytes(20);
@@ -218,20 +218,20 @@ namespace SabreTools.Serialization.Deserializers
/// <summary>
/// Parse a Stream into a V4 header
/// </summary>
public static HeaderV4? ParseHeaderV4(Stream data)
public static HeaderV4 ParseHeaderV4(Stream data)
{
var obj = new HeaderV4();
byte[] tag = data.ReadBytes(8);
obj.Tag = Encoding.ASCII.GetString(tag);
obj.Length = data.ReadUInt32LittleEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.Flags = (Flags)data.ReadUInt32LittleEndian();
obj.Compression = (CompressionType)data.ReadUInt32LittleEndian();
obj.TotalHunks = data.ReadUInt32LittleEndian();
obj.LogicalBytes = data.ReadUInt64LittleEndian();
obj.MetaOffset = data.ReadUInt64LittleEndian();
obj.HunkBytes = data.ReadUInt32LittleEndian();
obj.Length = data.ReadUInt32BigEndian();
obj.Version = data.ReadUInt32BigEndian();
obj.Flags = (Flags)data.ReadUInt32BigEndian();
obj.Compression = (CompressionType)data.ReadUInt32BigEndian();
obj.TotalHunks = data.ReadUInt32BigEndian();
obj.LogicalBytes = data.ReadUInt64BigEndian();
obj.MetaOffset = data.ReadUInt64BigEndian();
obj.HunkBytes = data.ReadUInt32BigEndian();
obj.SHA1 = data.ReadBytes(20);
obj.ParentSHA1 = data.ReadBytes(20);
obj.RawSHA1 = data.ReadBytes(20);
@@ -248,18 +248,18 @@ namespace SabreTools.Serialization.Deserializers
byte[] tag = data.ReadBytes(8);
obj.Tag = Encoding.ASCII.GetString(tag);
obj.Length = data.ReadUInt32LittleEndian();
obj.Version = data.ReadUInt32LittleEndian();
obj.Length = data.ReadUInt32BigEndian();
obj.Version = data.ReadUInt32BigEndian();
obj.Compressors = new CodecType[4];
for (int i = 0; i < 4; i++)
{
obj.Compressors[i] = (CodecType)data.ReadUInt32LittleEndian();
obj.Compressors[i] = (CodecType)data.ReadUInt32BigEndian();
}
obj.LogicalBytes = data.ReadUInt64LittleEndian();
obj.MapOffset = data.ReadUInt64LittleEndian();
obj.MetaOffset = data.ReadUInt64LittleEndian();
obj.HunkBytes = data.ReadUInt32LittleEndian();
obj.UnitBytes = data.ReadUInt32LittleEndian();
obj.LogicalBytes = data.ReadUInt64BigEndian();
obj.MapOffset = data.ReadUInt64BigEndian();
obj.MetaOffset = data.ReadUInt64BigEndian();
obj.HunkBytes = data.ReadUInt32BigEndian();
obj.UnitBytes = data.ReadUInt32BigEndian();
obj.RawSHA1 = data.ReadBytes(20);
obj.SHA1 = data.ReadBytes(20);
obj.ParentSHA1 = data.ReadBytes(20);

View File

@@ -11,18 +11,11 @@ namespace SabreTools.Serialization.Deserializers
{
#region IByteDeserializer
/// <inheritdoc cref="IByteDeserializer.Deserialize(byte[]?, int)"/>
public static MetadataFile? DeserializeBytes(byte[]? data, int offset, bool quotes = true)
{
var deserializer = new ClrMamePro();
return deserializer.Deserialize(data, offset, quotes);
}
/// <inheritdoc/>
public override MetadataFile? Deserialize(byte[]? data, int offset)
=> Deserialize(data, offset, true);
/// <inheritdoc/>
/// <inheritdoc cref="Deserialize(byte[], int)"/>
public MetadataFile? Deserialize(byte[]? data, int offset, bool quotes)
{
// If the data is invalid
@@ -35,42 +28,41 @@ namespace SabreTools.Serialization.Deserializers
// Create a memory stream and parse that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return DeserializeStream(dataStream, quotes);
return Deserialize(dataStream, quotes);
}
#endregion
#region IFileDeserializer
/// <inheritdoc cref="IFileDeserializer.Deserialize(string?)"/>
public static MetadataFile? DeserializeFile(string? path, bool quotes = true)
{
var deserializer = new ClrMamePro();
return deserializer.Deserialize(path, quotes);
}
/// <inheritdoc/>
public override MetadataFile? Deserialize(string? path)
=> Deserialize(path, true);
/// <inheritdoc/>
/// <inheritdoc cref="Deserialize(string?)"/>
public MetadataFile? Deserialize(string? path, bool quotes)
{
using var stream = PathProcessor.OpenStream(path);
return DeserializeStream(stream, quotes);
try
{
// If we don't have a file
if (string.IsNullOrEmpty(path) || !File.Exists(path))
return default;
// Open the file for deserialization
using var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
return Deserialize(stream, quotes);
}
catch
{
// TODO: Handle logging the exception
return default;
}
}
#endregion
#region IStreamDeserializer
/// <inheritdoc cref="IStreamDeserializer.Deserialize(Stream?)"/>
public static MetadataFile? DeserializeStream(Stream? data, bool quotes = true)
{
var deserializer = new ClrMamePro();
return deserializer.Deserialize(data, quotes);
}
/// <inheritdoc/>
public override MetadataFile? Deserialize(Stream? data)
=> Deserialize(data, true);
@@ -78,7 +70,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc cref="Deserialize(Stream)"/>
public MetadataFile? Deserialize(Stream? data, bool quotes)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;
@@ -481,9 +473,21 @@ namespace SabreTools.Serialization.Deserializers
case "crc":
rom.CRC = kvp.Value;
break;
case "md2":
rom.MD2 = kvp.Value;
break;
case "md4":
rom.MD4 = kvp.Value;
break;
case "md5":
rom.MD5 = kvp.Value;
break;
case "ripemd128":
rom.RIPEMD128 = kvp.Value;
break;
case "ripemd160":
rom.RIPEMD160 = kvp.Value;
break;
case "sha1":
rom.SHA1 = kvp.Value;
break;

View File

@@ -11,7 +11,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc/>
public override MetadataFile? Deserialize(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;

View File

@@ -8,9 +8,6 @@ namespace SabreTools.Serialization.Deserializers
{
public class GZip : BaseBinaryDeserializer<Archive>
{
/// <inheritdoc/>
protected override bool SkipCompression => true;
/// <inheritdoc/>
public override Archive? Deserialize(Stream? data)
{

View File

@@ -10,18 +10,11 @@ namespace SabreTools.Serialization.Deserializers
{
#region IByteDeserializer
/// <inheritdoc cref="IByteDeserializer.Deserialize(byte[]?, int)"/>
public static Models.Hashfile.Hashfile? DeserializeBytes(byte[]? data, int offset, HashType hash = HashType.CRC32)
{
var deserializer = new Hashfile();
return deserializer.Deserialize(data, offset, hash);
}
/// <inheritdoc/>
public override Models.Hashfile.Hashfile? Deserialize(byte[]? data, int offset)
=> Deserialize(data, offset, HashType.CRC32);
/// <inheritdoc/>
/// <inheritdoc cref="Deserialize(byte[], int)"/>
public Models.Hashfile.Hashfile? Deserialize(byte[]? data, int offset, HashType hash)
{
// If the data is invalid
@@ -34,43 +27,41 @@ namespace SabreTools.Serialization.Deserializers
// Create a memory stream and parse that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return DeserializeStream(dataStream, hash);
return Deserialize(dataStream, hash);
}
#endregion
#region IFileDeserializer
/// <inheritdoc cref="IFileDeserializer.Deserialize(string?)"/>
public static Models.Hashfile.Hashfile? DeserializeFile(string? path, HashType hash = HashType.CRC32)
{
var deserializer = new Hashfile();
return deserializer.Deserialize(path, hash);
}
/// <inheritdoc/>
public override Models.Hashfile.Hashfile? Deserialize(string? path)
=> Deserialize(path, HashType.CRC32);
/// <inheritdoc/>
/// <inheritdoc cref="Deserialize(string?)"/>
public Models.Hashfile.Hashfile? Deserialize(string? path, HashType hash)
{
using var stream = PathProcessor.OpenStream(path);
return DeserializeStream(stream, hash);
try
{
// If we don't have a file
if (string.IsNullOrEmpty(path) || !File.Exists(path))
return default;
// Open the file for deserialization
using var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
return Deserialize(stream, hash);
}
catch
{
// TODO: Handle logging the exception
return default;
}
}
#endregion
#region IStreamDeserializer
/// <inheritdoc cref="IStreamDeserializer.Deserialize(Stream?)"/>
public static Models.Hashfile.Hashfile? DeserializeStream(Stream? data, HashType hash = HashType.CRC32)
{
var deserializer = new Hashfile();
return deserializer.Deserialize(data, hash);
}
/// <inheritdoc/>
public override Models.Hashfile.Hashfile? Deserialize(Stream? data)
=> Deserialize(data, HashType.CRC32);
@@ -83,6 +74,8 @@ namespace SabreTools.Serialization.Deserializers
HashType.MD2 => DeserializeMD2(data),
HashType.MD4 => DeserializeMD4(data),
HashType.MD5 => DeserializeMD5(data),
HashType.RIPEMD128 => DeserializeRIPEMD128(data),
HashType.RIPEMD160 => DeserializeRIPEMD160(data),
HashType.SHA1 => DeserializeSHA1(data),
HashType.SHA256 => DeserializeSHA256(data),
HashType.SHA384 => DeserializeSHA384(data),
@@ -96,7 +89,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? DeserializeSFV(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;
@@ -139,7 +132,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? DeserializeMD2(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;
@@ -183,7 +176,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? DeserializeMD4(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;
@@ -227,7 +220,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? DeserializeMD5(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;
@@ -260,10 +253,82 @@ namespace SabreTools.Serialization.Deserializers
return null;
}
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? DeserializeRIPEMD128(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
// Setup the reader and output
var reader = new StreamReader(data);
var ripemd128List = new List<RIPEMD128>();
// Loop through the rows and parse out values
while (!reader.EndOfStream)
{
// Read and split the line
string? line = reader.ReadLine();
string[]? lineParts = line?.Split([' '], StringSplitOptions.RemoveEmptyEntries);
if (lineParts == null || lineParts.Length < 2)
continue;
// Parse the line into a hash
var ripemd128 = new RIPEMD128
{
Hash = lineParts[0],
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
};
ripemd128List.Add(ripemd128);
}
// Assign the hashes to the hashfile and return
if (ripemd128List.Count > 0)
return new Models.Hashfile.Hashfile { RIPEMD128 = [.. ripemd128List] };
return null;
}
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? DeserializeRIPEMD160(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
// Setup the reader and output
var reader = new StreamReader(data);
var ripemd160List = new List<RIPEMD160>();
// Loop through the rows and parse out values
while (!reader.EndOfStream)
{
// Read and split the line
string? line = reader.ReadLine();
string[]? lineParts = line?.Split([' '], StringSplitOptions.RemoveEmptyEntries);
if (lineParts == null || lineParts.Length < 2)
continue;
// Parse the line into a hash
var ripemd160 = new RIPEMD160
{
Hash = lineParts[0],
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
};
ripemd160List.Add(ripemd160);
}
// Assign the hashes to the hashfile and return
if (ripemd160List.Count > 0)
return new Models.Hashfile.Hashfile { RIPEMD160 = [.. ripemd160List] };
return null;
}
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? DeserializeSHA1(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;
@@ -307,7 +372,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? DeserializeSHA256(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;
@@ -351,7 +416,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? DeserializeSHA384(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;
@@ -395,7 +460,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? DeserializeSHA512(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;
@@ -439,7 +504,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? DeserializeSpamSum(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return default;

View File

@@ -4,6 +4,7 @@ using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.InstallShieldCabinet;
using SabreTools.Serialization.Extensions;
using static SabreTools.Models.InstallShieldCabinet.Constants;
namespace SabreTools.Serialization.Deserializers

View File

@@ -56,8 +56,21 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled object on success, null on error</returns>
public T? Deserialize(string? path, Encoding encoding)
{
using var data = PathProcessor.OpenStream(path);
return Deserialize(data, encoding);
try
{
// If we don't have a file
if (string.IsNullOrEmpty(path) || !File.Exists(path))
return default;
// Open the file for deserialization
using var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
return Deserialize(stream, encoding);
}
catch
{
// TODO: Handle logging the exception
return default;
}
}
#endregion

View File

@@ -3,6 +3,7 @@ using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.NewExecutable;
using SabreTools.Serialization.Extensions;
using static SabreTools.Models.NewExecutable.Constants;
namespace SabreTools.Serialization.Deserializers
@@ -22,7 +23,7 @@ namespace SabreTools.Serialization.Deserializers
long initialOffset = data.Position;
// Create a new executable to fill
var executable = new Executable();
var nex = new Executable();
#region MS-DOS Stub
@@ -32,20 +33,25 @@ namespace SabreTools.Serialization.Deserializers
return null;
// Set the MS-DOS stub
executable.Stub = stub;
nex.Stub = stub;
#endregion
#region Executable Header
// Get the new executable offset
long newExeOffset = initialOffset + stub.Header.NewExeHeaderAddr;
if (newExeOffset < initialOffset || newExeOffset > data.Length)
return null;
// Try to parse the executable header
data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin);
data.Seek(newExeOffset, SeekOrigin.Begin);
var header = ParseExecutableHeader(data);
if (header.Magic != SignatureString)
return null;
// Set the executable header
executable.Header = header;
nex.Header = header;
#endregion
@@ -54,16 +60,16 @@ namespace SabreTools.Serialization.Deserializers
// If the offset for the segment table doesn't exist
long tableAddress = initialOffset + stub.Header.NewExeHeaderAddr + header.SegmentTableOffset;
if (tableAddress >= data.Length)
return executable;
return nex;
// Seek to the segment table
data.Seek(tableAddress, SeekOrigin.Begin);
// Set the segment table
executable.SegmentTable = new SegmentTableEntry[header.FileSegmentCount];
nex.SegmentTable = new SegmentTableEntry[header.FileSegmentCount];
for (int i = 0; i < header.FileSegmentCount; i++)
{
executable.SegmentTable[i] = ParseSegmentTableEntry(data, initialOffset);
nex.SegmentTable[i] = ParseSegmentTableEntry(data, initialOffset);
}
#endregion
@@ -73,13 +79,13 @@ namespace SabreTools.Serialization.Deserializers
// If the offset for the segment table doesn't exist
tableAddress = initialOffset + stub.Header.NewExeHeaderAddr + header.ResourceTableOffset;
if (tableAddress >= data.Length)
return executable;
return nex;
// Seek to the resource table
data.Seek(tableAddress, SeekOrigin.Begin);
// Set the resource table
executable.ResourceTable = ParseResourceTable(data, header.ResourceEntriesCount);
nex.ResourceTable = ParseResourceTable(data, header.ResourceEntriesCount);
#endregion
@@ -89,13 +95,13 @@ namespace SabreTools.Serialization.Deserializers
tableAddress = initialOffset + stub.Header.NewExeHeaderAddr + header.ResidentNameTableOffset;
long endOffset = initialOffset + stub.Header.NewExeHeaderAddr + header.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
return nex;
// Seek to the resident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
// Set the resident-name table
executable.ResidentNameTable = ParseResidentNameTable(data, endOffset);
nex.ResidentNameTable = ParseResidentNameTable(data, endOffset);
#endregion
@@ -104,16 +110,16 @@ namespace SabreTools.Serialization.Deserializers
// If the offset for the module-reference table doesn't exist
tableAddress = initialOffset + stub.Header.NewExeHeaderAddr + header.ModuleReferenceTableOffset;
if (tableAddress >= data.Length)
return executable;
return nex;
// Seek to the module-reference table
data.Seek(tableAddress, SeekOrigin.Begin);
// Set the module-reference table
executable.ModuleReferenceTable = new ModuleReferenceTableEntry[header.ModuleReferenceTableSize];
nex.ModuleReferenceTable = new ModuleReferenceTableEntry[header.ModuleReferenceTableSize];
for (int i = 0; i < header.ModuleReferenceTableSize; i++)
{
executable.ModuleReferenceTable[i] = ParseModuleReferenceTableEntry(data);
nex.ModuleReferenceTable[i] = ParseModuleReferenceTableEntry(data);
}
#endregion
@@ -124,13 +130,13 @@ namespace SabreTools.Serialization.Deserializers
tableAddress = initialOffset + stub.Header.NewExeHeaderAddr + header.ImportedNamesTableOffset;
endOffset = initialOffset + stub.Header.NewExeHeaderAddr + header.EntryTableOffset;
if (tableAddress >= data.Length)
return executable;
return nex;
// Seek to the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
// Set the imported-name table
executable.ImportedNameTable = ParseImportedNameTable(data, endOffset);
nex.ImportedNameTable = ParseImportedNameTable(data, endOffset);
#endregion
@@ -140,13 +146,13 @@ namespace SabreTools.Serialization.Deserializers
tableAddress = initialOffset + stub.Header.NewExeHeaderAddr + header.EntryTableOffset;
endOffset = initialOffset + stub.Header.NewExeHeaderAddr + header.EntryTableOffset + header.EntryTableSize;
if (tableAddress >= data.Length)
return executable;
return nex;
// Seek to the imported-name table
data.Seek(tableAddress, SeekOrigin.Begin);
// Set the entry table
executable.EntryTable = ParseEntryTable(data, endOffset);
nex.EntryTable = ParseEntryTable(data, endOffset);
#endregion
@@ -156,17 +162,17 @@ namespace SabreTools.Serialization.Deserializers
tableAddress = initialOffset + header.NonResidentNamesTableOffset;
endOffset = initialOffset + header.NonResidentNamesTableOffset + header.NonResidentNameTableSize;
if (tableAddress >= data.Length)
return executable;
return nex;
// Seek to the nonresident-name table
data.Seek(tableAddress, SeekOrigin.Begin);
// Set the nonresident-name table
executable.NonResidentNameTable = ParseNonResidentNameTable(data, endOffset);
nex.NonResidentNameTable = ParseNonResidentNameTable(data, endOffset);
#endregion
return executable;
return nex;
}
catch
{
@@ -415,7 +421,14 @@ namespace SabreTools.Serialization.Deserializers
obj.RelocationRecords = new RelocationRecord[obj.RelocationRecordCount];
for (int i = 0; i < obj.RelocationRecords.Length; i++)
{
obj.RelocationRecords[i] = ParseRelocationRecord(data);
if (data.Position >= data.Length)
break;
var record = ParseRelocationRecord(data);
if (record == null)
break;
obj.RelocationRecords[i] = record;
}
return obj;
@@ -428,12 +441,20 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled RelocationRecord on success, null on error</returns>
public static RelocationRecord ParseRelocationRecord(Stream data)
{
// Handle partial relocation sections
if (data.Position > data.Length - 4)
return null;
var obj = new RelocationRecord();
obj.SourceType = (RelocationRecordSourceType)data.ReadByteValue();
obj.Flags = (RelocationRecordFlag)data.ReadByteValue();
obj.Offset = data.ReadUInt16LittleEndian();
// Handle incomplete entries
if (data.Position > data.Length - 4)
return obj;
switch (obj.Flags & RelocationRecordFlag.TARGET_MASK)
{
case RelocationRecordFlag.INTERNALREF:

View File

@@ -2,7 +2,6 @@ using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Matching;
using SabreTools.Models.PKZIP;
using static SabreTools.Models.PKZIP.Constants;
@@ -10,9 +9,6 @@ namespace SabreTools.Serialization.Deserializers
{
public class PKZIP : BaseBinaryDeserializer<Archive>
{
/// <inheritdoc/>
protected override bool SkipCompression => true;
/// <inheritdoc/>
public override Archive? Deserialize(Stream? data)
{
@@ -45,7 +41,7 @@ namespace SabreTools.Serialization.Deserializers
{
// Central Directory File Header
case CentralDirectoryFileHeaderSignature:
var cdr = ParseCentralDirectoryFileHeader(data, out _);
var cdr = ParseCentralDirectoryFileHeader(data);
if (cdr == null)
return null;
@@ -170,10 +166,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled central directory file header on success, null on error</returns>
public static CentralDirectoryFileHeader? ParseCentralDirectoryFileHeader(Stream data, out ExtensibleDataField[]? extraFields)
public static CentralDirectoryFileHeader? ParseCentralDirectoryFileHeader(Stream data)
{
var obj = new CentralDirectoryFileHeader();
extraFields = null;
obj.Signature = data.ReadUInt32LittleEndian();
if (obj.Signature != CentralDirectoryFileHeaderSignature)
@@ -220,8 +215,7 @@ namespace SabreTools.Serialization.Deserializers
if (extraBytes.Length != obj.ExtraFieldLength)
return null;
// TODO: This should live on the model instead of the byte representation
extraFields = ParseExtraFields(obj, extraBytes);
obj.ExtraFields = ParseExtraFields(obj, extraBytes);
}
if (obj.FileCommentLength > 0 && data.Position + obj.FileCommentLength <= data.Length)
{
@@ -416,7 +410,7 @@ namespace SabreTools.Serialization.Deserializers
#region Local File Header
// Try to read the header
var localFileHeader = ParseLocalFileHeader(data, out var extraFields);
var localFileHeader = ParseLocalFileHeader(data);
if (localFileHeader == null)
return null;
@@ -424,9 +418,9 @@ namespace SabreTools.Serialization.Deserializers
obj.LocalFileHeader = localFileHeader;
ulong compressedSize = localFileHeader.CompressedSize;
if (extraFields != null)
if (localFileHeader.ExtraFields != null)
{
foreach (var field in extraFields)
foreach (var field in localFileHeader.ExtraFields)
{
if (field is not Zip64ExtendedInformationExtraField infoField)
continue;
@@ -532,10 +526,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled local file header on success, null on error</returns>
public static LocalFileHeader? ParseLocalFileHeader(Stream data, out ExtensibleDataField[]? extraFields)
public static LocalFileHeader? ParseLocalFileHeader(Stream data)
{
var obj = new LocalFileHeader();
extraFields = null;
obj.Signature = data.ReadUInt32LittleEndian();
if (obj.Signature != LocalFileHeaderSignature)
@@ -575,8 +568,7 @@ namespace SabreTools.Serialization.Deserializers
if (extraBytes.Length != obj.ExtraFieldLength)
return null;
// TODO: This should live on the model instead of the byte representation
extraFields = ParseExtraFields(obj, extraBytes);
obj.ExtraFields = ParseExtraFields(obj, extraBytes);
}
return obj;

View File

@@ -1,50 +0,0 @@
using System;
using System.IO;
using System.IO.Compression;
namespace SabreTools.Serialization.Deserializers
{
internal class PathProcessor
{
/// <summary>
/// Opens a path as a stream in a safe manner, decompressing if needed
/// </summary>
/// <param name="path">Path to open as a stream</param>
/// <returns>Stream representing the file, null on error</returns>
public static Stream? OpenStream(string? path, bool skipCompression = false)
{
try
{
// If we don't have a file
if (string.IsNullOrEmpty(path) || !File.Exists(path))
return null;
// Open the file for deserialization
var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
// Get the extension to determine if additional handling is needed
string ext = Path.GetExtension(path).TrimStart('.');
// Determine what we do based on the extension
if (!skipCompression && string.Equals(ext, "gz", StringComparison.OrdinalIgnoreCase))
{
return new GZipStream(stream, CompressionMode.Decompress);
}
else if (!skipCompression && string.Equals(ext, "zip", StringComparison.OrdinalIgnoreCase))
{
// TODO: Support zip-compressed files
return null;
}
else
{
return stream;
}
}
catch
{
// TODO: Handle logging the exception
return null;
}
}
}
}

View File

@@ -32,13 +32,16 @@ namespace SabreTools.Serialization.Deserializers
#region Audio Files
// Create the audio file deserializer
var audioDeserializer = new PlayJAudio();
// Create the audio files array
playlist.AudioFiles = new AudioFile[playlistHeader.TrackCount];
// Try to parse the audio files
for (int i = 0; i < playlist.AudioFiles.Length; i++)
{
var entryHeader = PlayJAudio.DeserializeStream(data);
var entryHeader = audioDeserializer.Deserialize(data);
if (entryHeader == null)
continue;

File diff suppressed because it is too large Load Diff

View File

@@ -11,7 +11,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc/>
public override MetadataFile? Deserialize(Stream? data)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;

View File

@@ -0,0 +1,85 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.SecuROM;
namespace SabreTools.Serialization.Deserializers
{
public class SecuROMAddD : BaseBinaryDeserializer<AddD>
{
/// <inheritdoc/>
public override AddD? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Cache the current offset
long initialOffset = data.Position;
var addD = ParseSecuROMAddD(data);
if (addD.Signature != 0x44646441)
return null;
return addD;
}
catch
{
// Ignore the actual error
return null;
}
}
/// <summary>
/// Parse a Stream into an SecuROMAddD
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SecuROMAddD on success, null on error</returns>
private static AddD ParseSecuROMAddD(Stream data)
{
var obj = new AddD();
obj.Signature = data.ReadUInt32LittleEndian();
obj.EntryCount = data.ReadUInt32LittleEndian();
obj.Version = data.ReadNullTerminatedAnsiString();
byte[] buildBytes = data.ReadBytes(4);
string buildStr = Encoding.ASCII.GetString(buildBytes);
obj.Build = buildStr.ToCharArray();
obj.Unknown14h = data.ReadBytes(1); // TODO: Figure out how to determine how many bytes are here consistently
obj.Entries = new AddDEntry[obj.EntryCount];
for (int i = 0; i < obj.Entries.Length; i++)
{
var entry = ParseSecuROMAddDEntry(data);
obj.Entries[i] = entry;
}
return obj;
}
/// <summary>
/// Parse a Stream into an SecuROMAddDEntry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SecuROMAddDEntry on success, null on error</returns>
private static AddDEntry ParseSecuROMAddDEntry(Stream data)
{
var obj = new AddDEntry();
obj.PhysicalOffset = data.ReadUInt32LittleEndian();
obj.Length = data.ReadUInt32LittleEndian();
obj.Unknown08h = data.ReadUInt32LittleEndian();
obj.Unknown0Ch = data.ReadUInt32LittleEndian();
obj.Unknown10h = data.ReadUInt32LittleEndian();
obj.Unknown14h = data.ReadUInt32LittleEndian();
obj.Unknown18h = data.ReadUInt32LittleEndian();
obj.Unknown1Ch = data.ReadUInt32LittleEndian();
obj.FileName = data.ReadNullTerminatedAnsiString();
obj.Unknown2Ch = data.ReadUInt32LittleEndian();
return obj;
}
}
}

View File

@@ -2,7 +2,6 @@ using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Matching;
using SabreTools.Models.SecuROM;
using static SabreTools.Models.SecuROM.Constants;

View File

@@ -0,0 +1,139 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.SecuROM;
using static SabreTools.Models.SecuROM.Constants;
namespace SabreTools.Serialization.Deserializers
{
// TODO: Cache data blocks during parse
public class SecuROMMatroschkaPackage : BaseBinaryDeserializer<MatroshkaPackage>
{
/// <inheritdoc/>
/// TODO: Unify matroschka spelling to "Matroschka"
public override MatroshkaPackage? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Cache the initial offset
long initialOffset = data.Position;
// Try to parse the header
var package = ParseMatroshkaPackage(data);
if (package == null)
return null;
// Try to parse the entries
package.Entries = ParseEntries(data, package.EntryCount);
return package;
}
catch
{
// Ignore the actual error
return null;
}
}
/// <summary>
/// Parse a Stream into a MatroshkaPackage
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled MatroshkaPackage on success, null on error</returns>
public static MatroshkaPackage? ParseMatroshkaPackage(Stream data)
{
var obj = new MatroshkaPackage();
byte[] magic = data.ReadBytes(4);
obj.Signature = Encoding.ASCII.GetString(magic);
if (obj.Signature != MatroshkaMagicString)
return null;
obj.EntryCount = data.ReadUInt32LittleEndian();
if (obj.EntryCount == 0)
return null;
// Check if "matrosch" section is a longer header one or not based on whether the next uint is 0 or 1. Anything
// else will just already be starting the filename string, which is never going to start with this.
// Previously thought that the longer header was correlated with RC, but at least one executable
// (NecroVisioN.exe from the GamersGate patch NecroVisioN_Patch1.2_GG.exe) isn't RC and still has it.
long tempPosition = data.Position;
uint tempValue = data.ReadUInt32LittleEndian();
data.Seek(tempPosition, SeekOrigin.Begin);
// Only 0 or 1 have been observed for long sections
if (tempValue < 2)
{
obj.UnknownRCValue1 = data.ReadUInt32LittleEndian();
obj.UnknownRCValue2 = data.ReadUInt32LittleEndian();
obj.UnknownRCValue3 = data.ReadUInt32LittleEndian();
var keyHexBytes = data.ReadBytes(32);
obj.KeyHexString = Encoding.ASCII.GetString(keyHexBytes);
if (!data.ReadBytes(4).EqualsExactly([0x00, 0x00, 0x00, 0x00]))
return null;
}
return obj;
}
/// <summary>
/// Parse a Stream into a MatroshkaEntry array
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="entryCount">Number of entries in the array</param>
/// <returns>Filled MatroshkaEntry array on success, null on error</returns>
private static MatroshkaEntry[] ParseEntries(Stream data, uint entryCount)
{
var obj = new MatroshkaEntry[entryCount];
// Determine if file path size is 256 or 512 bytes
long tempPosition = data.Position;
data.Seek(data.Position + 256, SeekOrigin.Begin);
var tempValue = data.ReadUInt32LittleEndian();
data.Seek(tempPosition, SeekOrigin.Begin);
int pathSize = tempValue == 0 ? 512 : 256;
// Set default value for unknown value checking
bool? hasUnknown = null;
// Read entries
for (int i = 0; i < obj.Length; i++)
{
var entry = new MatroshkaEntry();
byte[] pathBytes = data.ReadBytes(pathSize);
entry.Path = Encoding.ASCII.GetString(pathBytes);
entry.EntryType = (MatroshkaEntryType)data.ReadUInt32LittleEndian();
entry.Size = data.ReadUInt32LittleEndian();
entry.Offset = data.ReadUInt32LittleEndian();
// On the first entry, determine if the unknown value exists
if (hasUnknown == null)
{
tempPosition = data.Position;
tempValue = data.ReadUInt32LittleEndian();
data.Seek(tempPosition, SeekOrigin.Begin);
hasUnknown = tempValue == 0;
}
// TODO: Validate it's zero?
if (hasUnknown == true)
entry.Unknown = data.ReadUInt32LittleEndian();
entry.ModifiedTime = data.ReadUInt64LittleEndian();
entry.CreatedTime = data.ReadUInt64LittleEndian();
entry.AccessedTime = data.ReadUInt64LittleEndian();
entry.MD5 = data.ReadBytes(16);
obj[i] = entry;
}
return obj;
}
}
}

View File

@@ -18,18 +18,11 @@ namespace SabreTools.Serialization.Deserializers
#region IByteDeserializer
/// <inheritdoc cref="IByteDeserializer.Deserialize(byte[]?, int)"/>
public static MetadataFile? DeserializeBytes(byte[]? data, int offset, char delim)
{
var deserializer = new SeparatedValue();
return deserializer.Deserialize(data, offset, delim);
}
/// <inheritdoc/>
public override MetadataFile? Deserialize(byte[]? data, int offset)
=> Deserialize(data, offset, ',');
/// <inheritdoc/>
/// <inheritdoc cref="Deserialize(byte[], int)"/>
public MetadataFile? Deserialize(byte[]? data, int offset, char delim)
{
// If the data is invalid
@@ -42,42 +35,41 @@ namespace SabreTools.Serialization.Deserializers
// Create a memory stream and parse that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return DeserializeStream(dataStream, delim);
return Deserialize(dataStream, delim);
}
#endregion
#region IFileDeserializer
/// <inheritdoc cref="IFileDeserializer.Deserialize(string?)"/>
public static MetadataFile? DeserializeFile(string? path, char delim = ',')
{
var deserializer = new SeparatedValue();
return deserializer.Deserialize(path, delim);
}
/// <inheritdoc/>
public override MetadataFile? Deserialize(string? path)
=> Deserialize(path, ',');
/// <inheritdoc/>
/// <inheritdoc cref="Deserialize(string?)"/>
public MetadataFile? Deserialize(string? path, char delim)
{
using var stream = PathProcessor.OpenStream(path);
return DeserializeStream(stream, delim);
try
{
// If we don't have a file
if (string.IsNullOrEmpty(path) || !File.Exists(path))
return default;
// Open the file for deserialization
using var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
return Deserialize(stream, delim);
}
catch
{
// TODO: Handle logging the exception
return default;
}
}
#endregion
#region IStreamDeserializer
/// <inheritdoc cref="IStreamDeserializer.Deserialize(Stream?)"/>
public static MetadataFile? DeserializeStream(Stream? data, char delim = ',')
{
var deserializer = new SeparatedValue();
return deserializer.Deserialize(data, delim);
}
/// <inheritdoc/>
public override MetadataFile? Deserialize(Stream? data)
=> Deserialize(data, ',');
@@ -85,7 +77,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc cref="Deserialize(Stream)"/>
public MetadataFile? Deserialize(Stream? data, char delim)
{
// If tthe data is invalid
// If the data is invalid
if (data == null || !data.CanRead)
return null;

View File

@@ -9,9 +9,6 @@ namespace SabreTools.Serialization.Deserializers
{
public class TapeArchive : BaseBinaryDeserializer<Archive>
{
/// <inheritdoc/>
protected override bool SkipCompression => true;
/// <inheritdoc/>
public override Archive? Deserialize(Stream? data)
{

View File

@@ -74,54 +74,54 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled OverlayHeader on success, null on error</returns>
private static OverlayHeader ParseOverlayHeader(Stream data)
{
var header = new OverlayHeader();
var obj = new OverlayHeader();
header.DllNameLen = data.ReadByteValue();
if (header.DllNameLen > 0)
obj.DllNameLen = data.ReadByteValue();
if (obj.DllNameLen > 0)
{
byte[] dllName = data.ReadBytes(header.DllNameLen);
header.DllName = Encoding.ASCII.GetString(dllName);
header.DllSize = data.ReadUInt32LittleEndian();
byte[] dllName = data.ReadBytes(obj.DllNameLen);
obj.DllName = Encoding.ASCII.GetString(dllName);
obj.DllSize = data.ReadUInt32LittleEndian();
}
// Read as a single block
header.Flags = (OverlayHeaderFlags)data.ReadUInt32LittleEndian();
obj.Flags = (OverlayHeaderFlags)data.ReadUInt32LittleEndian();
// Read as a single block
header.GraphicsData = data.ReadBytes(12);
obj.GraphicsData = data.ReadBytes(12);
// Read as a single block
header.WiseScriptExitEventOffset = data.ReadUInt32LittleEndian();
header.WiseScriptCancelEventOffset = data.ReadUInt32LittleEndian();
obj.WiseScriptExitEventOffset = data.ReadUInt32LittleEndian();
obj.WiseScriptCancelEventOffset = data.ReadUInt32LittleEndian();
// Read as a single block
header.WiseScriptInflatedSize = data.ReadUInt32LittleEndian();
header.WiseScriptDeflatedSize = data.ReadUInt32LittleEndian();
header.WiseDllDeflatedSize = data.ReadUInt32LittleEndian();
header.Ctl3d32DeflatedSize = data.ReadUInt32LittleEndian();
header.SomeData4DeflatedSize = data.ReadUInt32LittleEndian();
header.RegToolDeflatedSize = data.ReadUInt32LittleEndian();
header.ProgressDllDeflatedSize = data.ReadUInt32LittleEndian();
header.SomeData7DeflatedSize = data.ReadUInt32LittleEndian();
header.SomeData8DeflatedSize = data.ReadUInt32LittleEndian();
header.SomeData9DeflatedSize = data.ReadUInt32LittleEndian();
header.SomeData10DeflatedSize = data.ReadUInt32LittleEndian();
header.FinalFileDeflatedSize = data.ReadUInt32LittleEndian();
header.FinalFileInflatedSize = data.ReadUInt32LittleEndian();
header.EOF = data.ReadUInt32LittleEndian();
obj.WiseScriptInflatedSize = data.ReadUInt32LittleEndian();
obj.WiseScriptDeflatedSize = data.ReadUInt32LittleEndian();
obj.WiseDllDeflatedSize = data.ReadUInt32LittleEndian();
obj.Ctl3d32DeflatedSize = data.ReadUInt32LittleEndian();
obj.SomeData4DeflatedSize = data.ReadUInt32LittleEndian();
obj.RegToolDeflatedSize = data.ReadUInt32LittleEndian();
obj.ProgressDllDeflatedSize = data.ReadUInt32LittleEndian();
obj.SomeData7DeflatedSize = data.ReadUInt32LittleEndian();
obj.SomeData8DeflatedSize = data.ReadUInt32LittleEndian();
obj.SomeData9DeflatedSize = data.ReadUInt32LittleEndian();
obj.SomeData10DeflatedSize = data.ReadUInt32LittleEndian();
obj.FinalFileDeflatedSize = data.ReadUInt32LittleEndian();
obj.FinalFileInflatedSize = data.ReadUInt32LittleEndian();
obj.EOF = data.ReadUInt32LittleEndian();
// Newer installers read this and DibInflatedSize in the above block
header.DibDeflatedSize = data.ReadUInt32LittleEndian();
obj.DibDeflatedSize = data.ReadUInt32LittleEndian();
// Handle older overlay data
if (header.DibDeflatedSize > data.Length)
if (obj.DibDeflatedSize > data.Length)
{
header.DibDeflatedSize = 0;
obj.DibDeflatedSize = 0;
data.Seek(-4, SeekOrigin.Current);
return header;
return obj;
}
header.DibInflatedSize = data.ReadUInt32LittleEndian();
obj.DibInflatedSize = data.ReadUInt32LittleEndian();
// Peek at the next 2 bytes
ushort peek = data.ReadUInt16LittleEndian();
@@ -130,25 +130,25 @@ namespace SabreTools.Serialization.Deserializers
// If the next value is a known Endianness
if (Enum.IsDefined(typeof(Endianness), peek))
{
header.Endianness = (Endianness)data.ReadUInt16LittleEndian();
obj.Endianness = (Endianness)data.ReadUInt16LittleEndian();
}
else
{
// The first two values are part of the sizes block above
header.InstallScriptDeflatedSize = data.ReadUInt32LittleEndian();
header.CharacterSet = (CharacterSet)data.ReadUInt32LittleEndian();
header.Endianness = (Endianness)data.ReadUInt16LittleEndian();
obj.InstallScriptDeflatedSize = data.ReadUInt32LittleEndian();
obj.CharacterSet = (CharacterSet)data.ReadUInt32LittleEndian();
obj.Endianness = (Endianness)data.ReadUInt16LittleEndian();
}
// Endianness and init text len are read in a single block
header.InitTextLen = data.ReadByteValue();
if (header.InitTextLen > 0)
obj.InitTextLen = data.ReadByteValue();
if (obj.InitTextLen > 0)
{
byte[] initText = data.ReadBytes(header.InitTextLen);
header.InitText = Encoding.ASCII.GetString(initText);
byte[] initText = data.ReadBytes(obj.InitTextLen);
obj.InitText = Encoding.ASCII.GetString(initText);
}
return header;
return obj;
}
}
}

View File

@@ -2,7 +2,6 @@ using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Matching;
using SabreTools.Models.WiseInstaller;
using static SabreTools.Models.WiseInstaller.Constants;

View File

@@ -1,8 +1,8 @@
using System.Text;
namespace SabreTools.Serialization
namespace SabreTools.Serialization.Extensions
{
public static partial class Extensions
public static class CFB
{
/// <summary>
/// Decode a MIME-encoded stream name stored as a byte array

View File

@@ -1,8 +1,8 @@
using SabreTools.Models.InstallShieldCabinet;
namespace SabreTools.Serialization
namespace SabreTools.Serialization.Extensions
{
public static partial class Extensions
public static class InstallShieldCabinet
{
#region File Descriptors

View File

@@ -1,8 +1,8 @@
using SabreTools.Models.NewExecutable;
namespace SabreTools.Serialization
namespace SabreTools.Serialization.Extensions
{
public static partial class Extensions
public static class NewExecutable
{
/// <summary>
/// Determine if a resource type information entry is an integer or offset

View File

@@ -4,12 +4,14 @@ using System.IO;
using System.Text;
using System.Xml.Serialization;
using SabreTools.IO.Extensions;
using SabreTools.Models.COFF;
using SabreTools.Models.PortableExecutable;
using SabreTools.Models.PortableExecutable.ResourceEntries;
using SabreTools.Models.PortableExecutable.Resource.Entries;
using SabreTools.Models.SecuROM;
namespace SabreTools.Serialization
namespace SabreTools.Serialization.Extensions
{
public static partial class Extensions
public static class PortableExecutable
{
/// <summary>
/// Convert a relative virtual address to a physical one
@@ -33,6 +35,7 @@ namespace SabreTools.Serialization
return rva - matchingSection.VirtualAddress + matchingSection.PointerToRawData;
// Loop through all of the sections
uint maxVirtualAddress = 0, maxRawPointer = 0;
for (int i = 0; i < sections.Length; i++)
{
// If the section "starts" at 0, just skip it
@@ -44,6 +47,13 @@ namespace SabreTools.Serialization
if (rva < section.VirtualAddress)
continue;
// Cache the maximum matching section data, in case of a miss
if (rva >= section.VirtualAddress)
{
maxVirtualAddress = section.VirtualAddress;
maxRawPointer = section.PointerToRawData;
}
// Attempt to derive the physical address from the current section
if (section.VirtualSize != 0 && rva <= section.VirtualAddress + section.VirtualSize)
return rva - section.VirtualAddress + section.PointerToRawData;
@@ -51,7 +61,7 @@ namespace SabreTools.Serialization
return rva - section.VirtualAddress + section.PointerToRawData;
}
return 0;
return maxRawPointer != 0 ? rva - maxVirtualAddress + maxRawPointer : 0;
}
/// <summary>
@@ -100,9 +110,9 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled NB10ProgramDatabase on success, null on error</returns>
public static NB10ProgramDatabase? ParseNB10ProgramDatabase(this byte[] data, ref int offset)
public static Models.PortableExecutable.DebugData.NB10ProgramDatabase? ParseNB10ProgramDatabase(this byte[] data, ref int offset)
{
var obj = new NB10ProgramDatabase();
var obj = new Models.PortableExecutable.DebugData.NB10ProgramDatabase();
obj.Signature = data.ReadUInt32LittleEndian(ref offset);
if (obj.Signature != 0x3031424E)
@@ -122,9 +132,9 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled RSDSProgramDatabase on success, null on error</returns>
public static RSDSProgramDatabase? ParseRSDSProgramDatabase(this byte[] data, ref int offset)
public static Models.PortableExecutable.DebugData.RSDSProgramDatabase? ParseRSDSProgramDatabase(this byte[] data, ref int offset)
{
var obj = new RSDSProgramDatabase();
var obj = new Models.PortableExecutable.DebugData.RSDSProgramDatabase();
obj.Signature = data.ReadUInt32LittleEndian(ref offset);
if (obj.Signature != 0x53445352)
@@ -147,10 +157,10 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse into overlay data</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled SecuROMAddD on success, null on error</returns>
public static SecuROMAddD? ParseSecuROMAddD(this byte[] data, ref int offset)
public static AddD? ParseSecuROMAddD(this byte[] data, ref int offset)
{
// Read in the table
var obj = new SecuROMAddD();
var obj = new AddD();
obj.Signature = data.ReadUInt32LittleEndian(ref offset);
if (obj.Signature != 0x44646441)
@@ -178,7 +188,7 @@ namespace SabreTools.Serialization
obj.Unknown14h = data.ReadBytes(ref offset, bytesToRead);
obj.Entries = new SecuROMAddDEntry[obj.EntryCount];
obj.Entries = new AddDEntry[obj.EntryCount];
for (int i = 0; i < obj.EntryCount; i++)
{
obj.Entries[i] = ParseSecuROMAddDEntry(data, ref offset);
@@ -193,9 +203,9 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>Filled SecuROMAddDEntry on success, null on error</returns>
public static SecuROMAddDEntry ParseSecuROMAddDEntry(this byte[] data, ref int offset)
public static AddDEntry ParseSecuROMAddDEntry(this byte[] data, ref int offset)
{
var obj = new SecuROMAddDEntry();
var obj = new AddDEntry();
obj.PhysicalOffset = data.ReadUInt32LittleEndian(ref offset);
obj.Length = data.ReadUInt32LittleEndian(ref offset);
@@ -221,7 +231,7 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="entry">Resource data entry to parse into an accelerator table resource</param>
/// <returns>A filled accelerator table resource on success, null on error</returns>
public static AcceleratorTableEntry[]? AsAcceleratorTableResource(this ResourceDataEntry? entry)
public static AcceleratorTableEntry[]? AsAcceleratorTableResource(this Models.PortableExecutable.Resource.DataEntry? entry)
{
// If we have data that's invalid for this resource type, we can't do anything
if (entry?.Data == null || entry.Data.Length % 8 != 0)
@@ -250,7 +260,7 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="entry">Resource data entry to parse into a side-by-side assembly manifest</param>
/// <returns>A filled side-by-side assembly manifest on success, null on error</returns>
public static AssemblyManifest? AsAssemblyManifest(this ResourceDataEntry? entry)
public static AssemblyManifest? AsAssemblyManifest(this Models.PortableExecutable.Resource.DataEntry? entry)
{
// If we have an invalid entry, just skip
if (entry?.Data == null)
@@ -272,7 +282,7 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="entry">Resource data entry to parse into a dialog box</param>
/// <returns>A filled dialog box on success, null on error</returns>
public static DialogBoxResource? AsDialogBox(this ResourceDataEntry? entry)
public static DialogBoxResource? AsDialogBox(this Models.PortableExecutable.Resource.DataEntry? entry)
{
// If we have an invalid entry, just skip
if (entry?.Data == null)
@@ -721,7 +731,7 @@ namespace SabreTools.Serialization
#region Creation data
dialogItemTemplate.CreationDataSize = entry.Data.ReadUInt16LittleEndian(ref offset);
if (dialogItemTemplate.CreationDataSize != 0)
if (dialogItemTemplate.CreationDataSize != 0 && dialogItemTemplate.CreationDataSize + offset < entry.Data.Length)
dialogItemTemplate.CreationData = entry.Data.ReadBytes(ref offset, dialogItemTemplate.CreationDataSize);
#endregion
@@ -749,7 +759,7 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="entry">Resource data entry to parse into a font group</param>
/// <returns>A filled font group on success, null on error</returns>
public static FontGroupHeader? AsFontGroup(this ResourceDataEntry? entry)
public static FontGroupHeader? AsFontGroup(this Models.PortableExecutable.Resource.DataEntry? entry)
{
// If we have an invalid entry, just skip
if (entry?.Data == null)
@@ -818,7 +828,7 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="entry">Resource data entry to parse into a menu</param>
/// <returns>A filled menu on success, null on error</returns>
public static MenuResource? AsMenu(this ResourceDataEntry? entry)
public static MenuResource? AsMenu(this Models.PortableExecutable.Resource.DataEntry? entry)
{
// If we have an invalid entry, just skip
if (entry?.Data == null)
@@ -914,7 +924,7 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="entry">Resource data entry to parse into a message table resource</param>
/// <returns>A filled message table resource on success, null on error</returns>
public static MessageResourceData? AsMessageResourceData(this ResourceDataEntry? entry)
public static MessageResourceData? AsMessageResourceData(this Models.PortableExecutable.Resource.DataEntry? entry)
{
// If we have an invalid entry, just skip
if (entry?.Data == null)
@@ -1002,10 +1012,10 @@ namespace SabreTools.Serialization
// Align to the DWORD boundary if we're not at the end
data.AlignToBoundary(ref offset, 4);
var stringFileInfoChildren = new List<StringTable>();
var stringFileInfoChildren = new List<Models.PortableExecutable.Resource.Entries.StringTable>();
while ((offset - currentOffset) < stringFileInfo.Length)
{
var stringTable = new StringTable();
var stringTable = new Models.PortableExecutable.Resource.Entries.StringTable();
stringTable.Length = data.ReadUInt16LittleEndian(ref offset);
stringTable.ValueLength = data.ReadUInt16LittleEndian(ref offset);
@@ -1059,7 +1069,7 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="entry">Resource data entry to parse into a string table resource</param>
/// <returns>A filled string table resource on success, null on error</returns>
public static Dictionary<int, string?>? AsStringTable(this ResourceDataEntry? entry)
public static Dictionary<int, string?>? AsStringTable(this Models.PortableExecutable.Resource.DataEntry? entry)
{
// If we have an invalid entry, just skip
if (entry?.Data == null)
@@ -1151,7 +1161,7 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="entry">Resource data entry to parse into a version info resource</param>
/// <returns>A filled version info resource on success, null on error</returns>
public static VersionInfo? AsVersionInfo(this ResourceDataEntry? entry)
public static VersionInfo? AsVersionInfo(this Models.PortableExecutable.Resource.DataEntry? entry)
{
// If we have an invalid entry, just skip
if (entry?.Data == null)
@@ -1348,10 +1358,10 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled ResourceHeader on success, null on error</returns>
public static ResourceHeader ParseResourceHeader(this byte[] data, ref int offset)
public static Models.PortableExecutable.Resource.ResourceHeader ParseResourceHeader(this byte[] data, ref int offset)
{
// Read in the table
var obj = new ResourceHeader();
var obj = new Models.PortableExecutable.Resource.ResourceHeader();
obj.DataSize = data.ReadUInt32LittleEndian(ref offset);
obj.HeaderSize = data.ReadUInt32LittleEndian(ref offset);

View File

@@ -0,0 +1,160 @@
using System;
using System.Numerics;
using System.Text;
using SabreTools.Models.ASN1;
using SabreTools.Serialization.ObjectIdentifier;
namespace SabreTools.Serialization.Extensions
{
public static class TypeLengthValue
{
/// <summary>
/// Format a TypeLengthValue as a string
/// </summary>
/// <param name="paddingLevel">Padding level of the item when formatting</param>
/// <returns>String representing the TypeLengthValue, if possible</returns>
public static string Format(this Models.ASN1.TypeLengthValue tlv, int paddingLevel = 0)
{
// Create the left-padding string
string padding = new(' ', paddingLevel);
// Create the string builder
var formatBuilder = new StringBuilder();
// Append the type
formatBuilder.Append($"{padding}Type: {tlv.Type}");
if (tlv.Type == ASN1Type.V_ASN1_EOC)
return formatBuilder.ToString();
// Append the length
formatBuilder.Append($", Length: {tlv.Length}");
if (tlv.Length == 0)
return formatBuilder.ToString();
// If we have a constructed type
#if NET20 || NET35
if ((tlv.Type & ASN1Type.V_ASN1_CONSTRUCTED) != 0)
#else
if (tlv.Type.HasFlag(ASN1Type.V_ASN1_CONSTRUCTED))
#endif
{
if (tlv.Value is not Models.ASN1.TypeLengthValue[] valueAsObjectArray)
{
formatBuilder.Append(", Value: [INVALID DATA TYPE]");
return formatBuilder.ToString();
}
formatBuilder.Append(", Value:\n");
for (int i = 0; i < valueAsObjectArray.Length; i++)
{
var child = valueAsObjectArray[i];
string childString = child.Format(paddingLevel + 1);
formatBuilder.Append($"{childString}\n");
}
return formatBuilder.ToString().TrimEnd('\n');
}
// Get the value as a byte array
if (tlv.Value is not byte[] valueAsByteArray)
{
formatBuilder.Append(", Value: [INVALID DATA TYPE]");
return formatBuilder.ToString();
}
else if (valueAsByteArray.Length == 0)
{
formatBuilder.Append(", Value: [NO DATA]");
return formatBuilder.ToString();
}
// If we have a primitive type
switch (tlv.Type)
{
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-boolean"/>
case ASN1Type.V_ASN1_BOOLEAN:
if (tlv.Length > 1)
formatBuilder.Append($" [Expected length of 1]");
else if (valueAsByteArray.Length > 1)
formatBuilder.Append($" [Expected value length of 1]");
bool booleanValue = valueAsByteArray[0] != 0x00;
formatBuilder.Append($", Value: {booleanValue}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer"/>
case ASN1Type.V_ASN1_INTEGER:
Array.Reverse(valueAsByteArray);
var integerValue = new BigInteger(valueAsByteArray);
formatBuilder.Append($", Value: {integerValue}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string"/>
case ASN1Type.V_ASN1_BIT_STRING:
// TODO: Read into a BitArray and print that out instead?
int unusedBits = valueAsByteArray[0];
if (unusedBits == 0)
formatBuilder.Append($", Value with {unusedBits} unused bits");
else
formatBuilder.Append($", Value with {unusedBits} unused bits: {BitConverter.ToString(valueAsByteArray, 1).Replace('-', ' ')}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-octet-string"/>
case ASN1Type.V_ASN1_OCTET_STRING:
formatBuilder.Append($", Value: {BitConverter.ToString(valueAsByteArray).Replace('-', ' ')}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier"/>
/// <see cref="http://snmpsharpnet.com/index.php/2009/03/02/ber-encoding-and-decoding-oid-values/"/>
case ASN1Type.V_ASN1_OBJECT:
// Derive array of values
ulong[] objectNodes = Parser.ParseDERIntoArray(valueAsByteArray, tlv.Length);
// Append the dot and modified OID-IRI notations
string? dotNotationString = Parser.ParseOIDToDotNotation(objectNodes);
string? oidIriString = Parser.ParseOIDToOIDIRINotation(objectNodes);
formatBuilder.Append($", Value: {dotNotationString} ({oidIriString})");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-utf8string"/>
case ASN1Type.V_ASN1_UTF8STRING:
formatBuilder.Append($", Value: {Encoding.UTF8.GetString(valueAsByteArray)}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-printablestring"/>
case ASN1Type.V_ASN1_PRINTABLESTRING:
formatBuilder.Append($", Value: {Encoding.ASCII.GetString(valueAsByteArray)}");
break;
//case ASN1Type.V_ASN1_T61STRING:
case ASN1Type.V_ASN1_TELETEXSTRING:
formatBuilder.Append($", Value: {Encoding.ASCII.GetString(valueAsByteArray)}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-ia5string"/>
case ASN1Type.V_ASN1_IA5STRING:
formatBuilder.Append($", Value: {Encoding.ASCII.GetString(valueAsByteArray)}");
break;
case ASN1Type.V_ASN1_UTCTIME:
string utctimeString = Encoding.ASCII.GetString(valueAsByteArray);
if (DateTime.TryParse(utctimeString, out DateTime utctimeDateTime))
formatBuilder.Append($", Value: {utctimeDateTime:yyyy-MM-dd HH:mm:ss}");
else
formatBuilder.Append($", Value: {utctimeString}");
break;
/// <see href="https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bmpstring"/>
case ASN1Type.V_ASN1_BMPSTRING:
formatBuilder.Append($", Value: {Encoding.Unicode.GetString(valueAsByteArray)}");
break;
default:
formatBuilder.Append($", Value: {BitConverter.ToString(valueAsByteArray).Replace('-', ' ')}");
break;
}
// Return the formatted string
return formatBuilder.ToString();
}
}
}

View File

@@ -1,8 +1,8 @@
using System.Text.RegularExpressions;
namespace SabreTools.Serialization
namespace SabreTools.Serialization.Extensions
{
public static partial class Extensions
public static class WiseScript
{
/// <summary>
/// Convert a Wise function ID to the formal action name

View File

@@ -5,39 +5,39 @@
namespace SabreTools.Serialization
{
public delegate TResult Func<out TResult>();
internal delegate TResult Func<out TResult>();
public delegate TResult Func<in T, out TResult>(T arg);
internal delegate TResult Func<in T, out TResult>(T arg);
public delegate TResult Func<in T1, in T2, out TResult>(T1 arg1, T2 arg2);
internal delegate TResult Func<in T1, in T2, out TResult>(T1 arg1, T2 arg2);
public delegate TResult Func<in T1, in T2, in T3, out TResult>(T1 arg1, T2 arg2, T3 arg3);
internal delegate TResult Func<in T1, in T2, in T3, out TResult>(T1 arg1, T2 arg2, T3 arg3);
public delegate TResult Func<in T1, in T2, in T3, in T4, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4);
internal delegate TResult Func<in T1, in T2, in T3, in T4, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, in T12, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11, T12 arg12);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, in T12, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11, T12 arg12);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, in T12, in T13, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11, T12 arg12, T13 arg13);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, in T12, in T13, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11, T12 arg12, T13 arg13);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, in T12, in T13, in T14, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11, T12 arg12, T13 arg13, T14 arg14);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, in T12, in T13, in T14, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11, T12 arg12, T13 arg13, T14 arg14);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, in T12, in T13, in T14, in T15, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11, T12 arg12, T13 arg13, T14 arg14, T15 arg15);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, in T12, in T13, in T14, in T15, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11, T12 arg12, T13 arg13, T14 arg14, T15 arg15);
public delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, in T12, in T13, in T14, in T15, in T16, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11, T12 arg12, T13 arg13, T14 arg14, T15 arg15, T16 arg16);
internal delegate TResult Func<in T1, in T2, in T3, in T4, in T5, in T6, in T7, in T8, in T9, in T10, in T11, in T12, in T13, in T14, in T15, in T16, out TResult>(T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, T6 arg6, T7 arg7, T8 arg8, T9 arg9, T10 arg10, T11 arg11, T12 arg12, T13 arg13, T14 arg14, T15 arg15, T16 arg16);
}
#endif

View File

@@ -0,0 +1,20 @@
namespace SabreTools.Serialization.ObjectIdentifier
{
/// <summary>
/// Methods related to Object Identifiers (OID) and ASN.1 notation
/// </summary>
public static partial class Parser
{
/// <summary>
/// Parse an OID in separated-value notation into ASN.1 notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <returns>ASN.1 formatted string, if possible</returns>
/// <remarks>
public static string? ParseOIDToASN1Notation(ulong[]? values)
{
// TODO: Once the modified OID-IRI formatting is done, make an ASN.1 notation version
return null;
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,907 @@
using System;
using System.Text;
namespace SabreTools.Serialization.ObjectIdentifier
{
/// <summary>
/// Methods related to Object Identifiers (OID) and OID-IRI formatting
/// </summary>
public static partial class Parser
{
/// <summary>
/// Parse an OID in separated-value notation into OID-IRI notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <param name="index">Current index into the list</param>
/// <returns>OID-IRI formatted string, if possible</returns>
/// <see href="https://oid-base.com/"/>
public static string? ParseOIDToOIDIRINotation(ulong[]? values)
{
// If we have an invalid set of values, we can't do anything
if (values == null || values.Length == 0)
return null;
// Set the initial index
int index = 0;
// Get a string builder for the path
var nameBuilder = new StringBuilder();
// Try to parse the standard value
string? standard = ParseOIDToOIDIRINotation(values, ref index);
if (standard == null)
return null;
// Add the standard value to the output
nameBuilder.Append(standard);
// If we have no more items
if (index == values.Length)
return nameBuilder.ToString();
// Add trailing items as just values
nameBuilder.Append("/");
// Get the remaining values in a new array
var remainingValues = new ulong[values.Length - index];
Array.Copy(values, index, remainingValues, 0, remainingValues.Length);
// Convert the values and append to the builder
var stringValues = Array.ConvertAll(remainingValues, v => v.ToString());
nameBuilder.Append(string.Join("/", stringValues));
// Create and return the string
return nameBuilder.ToString();
}
/// <summary>
/// Parse an OID in separated-value notation into OID-IRI notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <param name="index">Current index into the list</param>
/// <returns>OID-IRI formatted string, if possible</returns>
/// <see href="https://oid-base.com/"/>
private static string? ParseOIDToOIDIRINotation(ulong[]? values, ref int index)
{
// If we have an invalid set of values, we can't do anything
if (values == null || values.Length == 0)
return null;
// If we have an invalid index, we can't do anything
if (index < 0 || index >= values.Length)
return null;
#region Start
var oidPath = string.Empty;
switch (values[index++])
{
case 0: goto oid_0;
case 1: goto oid_1;
case 2: goto oid_2;
default: return oidPath;
}
#endregion
// itu-t, ccitt, itu-r
#region 0.*
oid_0:
oidPath += "/ITU-T";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 0: goto oid_0_0;
case 2: return $"{oidPath}/Administration";
case 3: return $"{oidPath}/Network-Operator";
case 4: return $"{oidPath}/Identified-Organization";
case 5: return "/ITU-R/R-Recommendation";
case 9: return $"{oidPath}/Data";
default: return $"{oidPath}/{values[index - 1]}";
}
;
// recommendation
#region 0.0.*
oid_0_0:
oidPath += "/Recommendation";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/A",
2 => $"{oidPath}/B",
3 => $"{oidPath}/C",
4 => $"{oidPath}/D",
5 => $"{oidPath}/E",
6 => $"{oidPath}/F",
7 => $"{oidPath}/G",
8 => $"{oidPath}/H",
9 => $"{oidPath}/I",
10 => $"{oidPath}/J",
11 => $"{oidPath}/K",
12 => $"{oidPath}/L",
13 => $"{oidPath}/M",
14 => $"{oidPath}/N",
15 => $"{oidPath}/O",
16 => $"{oidPath}/P",
17 => $"{oidPath}/Q",
18 => $"{oidPath}/R",
19 => $"{oidPath}/S",
20 => $"{oidPath}/T",
21 => $"{oidPath}/U",
22 => $"{oidPath}/V",
24 => $"{oidPath}/X",
25 => $"{oidPath}/Y",
26 => $"{oidPath}/Z",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
#endregion
// iso
#region 1.*
oid_1:
oidPath += "/ISO";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 0: return $"{oidPath}/Standard";
case 1: return $"{oidPath}/Registration-Authority";
case 2: goto oid_1_2;
case 3: return $"{oidPath}/Identified-Organization";
default: return $"{oidPath}/{values[index - 1]}";
}
// member-body
#region 1.2.*
oid_1_2:
oidPath += "/Member-Body";
if (index == values.Length) return oidPath;
return values[index++] switch
{
36 => $"{oidPath}/AU",
40 => $"{oidPath}/AT",
56 => $"{oidPath}/BE",
124 => $"{oidPath}/CA",
156 => $"{oidPath}/CN",
203 => $"{oidPath}/CZ",
208 => $"{oidPath}/DK",
246 => $"{oidPath}/FI",
250 => $"{oidPath}/FR",
276 => $"{oidPath}/DE",
300 => $"{oidPath}/GR",
344 => $"{oidPath}/HK",
372 => $"{oidPath}/IE",
392 => $"{oidPath}/JP",
398 => $"{oidPath}/KZ",
410 => $"{oidPath}/KR",
498 => $"{oidPath}/MD",
528 => $"{oidPath}/NL",
566 => $"{oidPath}/NG",
578 => $"{oidPath}/NO",
616 => $"{oidPath}/PL",
643 => $"{oidPath}/RU",
702 => $"{oidPath}/SG",
752 => $"{oidPath}/SE",
804 => $"{oidPath}/UA",
826 => $"{oidPath}/GB",
840 => $"{oidPath}/US",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
#endregion
// joint-iso-itu-t, joint-iso-ccitt
#region 2.*
oid_2:
oidPath += "/Joint-ISO-ITU-T";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 1: return "/ASN.1";
case 16: oidPath = string.Empty; goto oid_2_16;
case 17: return $"{oidPath}/Registration-Procedures";
case 23: return $"{oidPath}/International-Organizations";
case 25: goto oid_2_25;
case 27: return "/Tag-Based";
case 28: return $"{oidPath}/ITS";
case 41: return "/BIP";
case 42: oidPath = string.Empty; goto oid_2_42;
case 48: oidPath = string.Empty; goto oid_2_48;
case 49: oidPath = string.Empty; goto oid_2_49;
case 50: return "/OIDResolutionSystem";
case 51: return "/GS1";
case 52: return $"{oidPath}/UAV";
case 999: return $"{oidPath}/Example";
default: return $"{oidPath}/{values[index - 1]}";
}
// country
#region 2.16.*
oid_2_16:
oidPath += "/Country";
if (index == values.Length) return oidPath;
return values[index++] switch
{
4 => $"{oidPath}AF",
8 => $"{oidPath}AL",
12 => $"{oidPath}DZ",
20 => $"{oidPath}AD",
24 => $"{oidPath}AO",
28 => $"{oidPath}AG",
31 => $"{oidPath}AZ",
32 => $"{oidPath}AR",
36 => $"{oidPath}AU",
40 => $"{oidPath}AT",
44 => $"{oidPath}BS",
48 => $"{oidPath}BH",
50 => $"{oidPath}BD",
51 => $"{oidPath}AM",
52 => $"{oidPath}BB",
56 => $"{oidPath}BE",
60 => $"{oidPath}BM",
64 => $"{oidPath}BT",
68 => $"{oidPath}BO",
70 => $"{oidPath}BA",
72 => $"{oidPath}BW",
76 => $"{oidPath}BR",
84 => $"{oidPath}BZ",
90 => $"{oidPath}SB",
96 => $"{oidPath}BN",
100 => $"{oidPath}BG",
104 => $"{oidPath}MM",
108 => $"{oidPath}BI",
112 => $"{oidPath}BY",
116 => $"{oidPath}KH",
120 => $"{oidPath}CM",
124 => $"{oidPath}CA",
132 => $"{oidPath}CV",
140 => $"{oidPath}CF",
144 => $"{oidPath}LK",
148 => $"{oidPath}TD",
152 => $"{oidPath}CL",
156 => $"{oidPath}CN",
158 => $"{oidPath}TW",
170 => $"{oidPath}CO",
174 => $"{oidPath}KM",
178 => $"{oidPath}CG",
180 => $"{oidPath}CD",
188 => $"{oidPath}CR",
191 => $"{oidPath}HR",
192 => $"{oidPath}CU",
196 => $"{oidPath}CY",
203 => $"{oidPath}CZ",
204 => $"{oidPath}BJ",
208 => $"{oidPath}DK",
212 => $"{oidPath}DM",
214 => $"{oidPath}DO",
218 => $"{oidPath}EC",
222 => $"{oidPath}SV",
226 => $"{oidPath}GQ",
231 => $"{oidPath}ET",
232 => $"{oidPath}ER",
233 => $"{oidPath}EE",
242 => $"{oidPath}FJ",
246 => $"{oidPath}FI",
250 => $"{oidPath}FR",
262 => $"{oidPath}DJ",
266 => $"{oidPath}GA",
268 => $"{oidPath}GE",
270 => $"{oidPath}GM",
275 => $"{oidPath}PS",
276 => $"{oidPath}DE",
288 => $"{oidPath}GH",
296 => $"{oidPath}KI",
300 => $"{oidPath}GR",
308 => $"{oidPath}GD",
320 => $"{oidPath}GT",
324 => $"{oidPath}GN",
328 => $"{oidPath}GY",
332 => $"{oidPath}HT",
336 => $"{oidPath}VA",
340 => $"{oidPath}HN",
344 => $"{oidPath}HK",
348 => $"{oidPath}HU",
352 => $"{oidPath}IS",
356 => $"{oidPath}IN",
360 => $"{oidPath}ID",
364 => $"{oidPath}IR",
368 => $"{oidPath}IQ",
372 => $"{oidPath}IE",
376 => $"{oidPath}IL",
380 => $"{oidPath}IT",
384 => $"{oidPath}CI",
388 => $"{oidPath}JM",
392 => $"{oidPath}JP",
398 => $"{oidPath}KZ",
400 => $"{oidPath}JO",
404 => $"{oidPath}KE",
408 => $"{oidPath}KP",
410 => $"{oidPath}KR",
414 => $"{oidPath}KW",
417 => $"{oidPath}KG",
418 => $"{oidPath}LA",
422 => $"{oidPath}LB",
426 => $"{oidPath}LS",
428 => $"{oidPath}LV",
430 => $"{oidPath}LR",
434 => $"{oidPath}LY",
438 => $"{oidPath}LI",
440 => $"{oidPath}LT",
442 => $"{oidPath}LU",
450 => $"{oidPath}MG",
454 => $"{oidPath}MW",
458 => $"{oidPath}MY",
462 => $"{oidPath}MV",
466 => $"{oidPath}ML",
470 => $"{oidPath}MT",
478 => $"{oidPath}MR",
480 => $"{oidPath}MU",
484 => $"{oidPath}MX",
492 => $"{oidPath}MC",
496 => $"{oidPath}MN",
498 => $"{oidPath}MD",
499 => $"{oidPath}ME",
504 => $"{oidPath}MA",
508 => $"{oidPath}MZ",
512 => $"{oidPath}OM",
516 => $"{oidPath}NA",
520 => $"{oidPath}NR",
524 => $"{oidPath}NP",
528 => $"{oidPath}NL",
530 => $"{oidPath}AN",
548 => $"{oidPath}VU",
554 => $"{oidPath}NZ",
558 => $"{oidPath}NI",
562 => $"{oidPath}NE",
566 => $"{oidPath}NG",
578 => $"{oidPath}NO",
583 => $"{oidPath}FM",
584 => $"{oidPath}MH",
585 => $"{oidPath}PW",
586 => $"{oidPath}PK",
591 => $"{oidPath}PA",
598 => $"{oidPath}PG",
600 => $"{oidPath}PY",
604 => $"{oidPath}PE",
608 => $"{oidPath}PH",
616 => $"{oidPath}PL",
620 => $"{oidPath}PT",
624 => $"{oidPath}GW",
626 => $"{oidPath}TL",
634 => $"{oidPath}QA",
642 => $"{oidPath}RO",
643 => $"{oidPath}RU",
646 => $"{oidPath}RW",
659 => $"{oidPath}KN",
662 => $"{oidPath}LC",
670 => $"{oidPath}VC",
674 => $"{oidPath}SM",
678 => $"{oidPath}ST",
682 => $"{oidPath}SA",
686 => $"{oidPath}SN",
688 => $"{oidPath}RS",
690 => $"{oidPath}SC",
694 => $"{oidPath}SL",
702 => $"{oidPath}SG",
703 => $"{oidPath}SK",
704 => $"{oidPath}VN",
705 => $"{oidPath}SI",
706 => $"{oidPath}SO",
710 => $"{oidPath}ZA",
716 => $"{oidPath}ZW",
724 => $"{oidPath}ES",
728 => $"{oidPath}SS",
729 => $"{oidPath}SD",
740 => $"{oidPath}SR",
748 => $"{oidPath}SZ",
752 => $"{oidPath}SE",
756 => $"{oidPath}CH",
760 => $"{oidPath}SY",
762 => $"{oidPath}TJ",
764 => $"{oidPath}TH",
768 => $"{oidPath}TG",
776 => $"{oidPath}TO",
780 => $"{oidPath}TT",
784 => $"{oidPath}AE",
788 => $"{oidPath}TN",
792 => $"{oidPath}TR",
795 => $"{oidPath}TM",
798 => $"{oidPath}TV",
800 => $"{oidPath}UG",
804 => $"{oidPath}UA",
807 => $"{oidPath}MK",
818 => $"{oidPath}EG",
826 => $"{oidPath}GB",
834 => $"{oidPath}TZ",
840 => $"{oidPath}US",
854 => $"{oidPath}BF",
858 => $"{oidPath}UY",
860 => $"{oidPath}UZ",
862 => $"{oidPath}VE",
882 => $"{oidPath}WS",
887 => $"{oidPath}YE",
894 => $"{oidPath}ZM",
_ => $"{oidPath}{values[index - 1]}",
};
#endregion
// uuid [TODO: Requires 128-bit values]
#region 2.25.*
oid_2_25:
oidPath += "/UUID";
if (index == values.Length) return oidPath;
return values[index++] switch
{
0 => $"{oidPath}/00000000-0000-0000-0000-000000000000",
//case 288786655511405443130567505384701230: return $"{oidPath}/00379e48-0a2b-1085-b288-0002a5d5fd2e";
//case 987895962269883002155146617097157934: return $"{oidPath}/00be4308-0c89-1085-8ea0-0002a5d5fd2e";
//case 1858228783942312576083372383319475483: return $"{oidPath}/0165e1c0-a655-11e0-95b8-0002a5d5c51b";
//case 2474299330026746002885628159579243803: return $"{oidPath}/01dc8860-25fb-11da-82b2-0002a5d5c51b";
//case 3263645701162998421821186056373271854: return $"{oidPath}/02748e28-08c4-1085-b21d-0002a5d5fd2e";
//case 3325839809379844461264382260940242222: return $"{oidPath}/02808890-0ad8-1085-9bdf-0002a5d5fd2e";
// TODO: Left off at http://www.oid-info.com/get/2.25.3664154270495270126161055518190585115
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
// telebiometrics
#region 2.42.*
oid_2_42:
oidPath += "/Telebiometrics";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 0: goto oid_2_42_0;
case 1: goto oid_2_42_1;
case 2: goto oid_2_42_2;
case 3: goto oid_2_42_3;
default: return $"{oidPath}/{values[index - 1]}";
}
// modules
#region 2.42.0.*
oid_2_42_0:
oidPath += "/Modules";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 0: goto oid_2_42_0_0;
default: return $"{oidPath}/{values[index - 1]}";
}
// main
#region 2.42.0.0.*
oid_2_42_0_0:
oidPath += "/Main_Module";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Version1",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
#endregion
// tmm
#region 2.42.1.*
oid_2_42_1:
oidPath += "/TMM";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 0: goto oid_2_42_1_0;
case 1: goto oid_2_42_1_1;
case 2: goto oid_2_42_1_2;
case 3: goto oid_2_42_1_3;
case 4: return $"{oidPath}/Practitioners";
default: return $"{oidPath}/{values[index - 1]}";
}
// modules
#region 2.42.1.0.*
oid_2_42_1_0:
oidPath += "/Modules";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 0: goto oid_2_42_1_0_0;
default: return $"{oidPath}/{values[index - 1]}";
}
// main
#region 2.42.1.0.0.*
oid_2_42_1_0_0:
oidPath += "/Main";
if (index == values.Length) return oidPath;
return values[index++] switch
{
0 => $"{oidPath}/First_Version",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
#endregion
// measures, metric
#region 2.42.1.1.*
oid_2_42_1_1:
oidPath += "/Measures";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 1: goto oid_2_42_1_1_1;
case 2: return $"{oidPath}/Units";
case 3: return $"{oidPath}";
case 4: return $"{oidPath}/Conditions";
case 5: goto oid_2_42_1_1_5;
default: return $"{oidPath}/{values[index - 1]}";
}
// quantities
#region 2.42.1.1.1.*
oid_2_42_1_1_1:
oidPath += "/Quantities";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Physics",
2 => $"{oidPath}/Chemistry",
3 => $"{oidPath}/Biology",
4 => $"{oidPath}/Culturology",
5 => $"{oidPath}/Psychology",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
// methods
#region 2.42.1.1.5.*
oid_2_42_1_1_5:
oidPath += "/Methods";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Physics",
2 => $"{oidPath}/Chemistry",
3 => $"{oidPath}/Biology",
4 => $"{oidPath}/Culturology",
5 => $"{oidPath}/Psychology",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
#endregion
// fields-of-study, scientific
#region 2.42.1.2.*
oid_2_42_1_2:
oidPath += "/Fields_of_Study";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Physics",
2 => $"{oidPath}/Chemistry",
3 => $"{oidPath}/Biology",
4 => $"{oidPath}/Culturology",
5 => $"{oidPath}/Psychology",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
// modalities, sensory
#region 2.42.1.3.*
oid_2_42_1_3:
oidPath += "/Modalities";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Tango",
2 => $"{oidPath}/Video",
3 => $"{oidPath}/Audio",
4 => $"{oidPath}/Chemo",
5 => $"{oidPath}/Radio",
6 => $"{oidPath}/Calor",
7 => $"{oidPath}/Electro",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
#endregion
// human-physiology
#region 2.42.2.*
oid_2_42_2:
oidPath += "/Human_Physiology";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 0: goto oid_2_42_2_0;
case 1: goto oid_2_42_2_1;
case 2: return $"{oidPath}/Symbol_Combinations";
default: return $"{oidPath}/{values[index - 1]}";
}
// modules
#region 2.42.2.0.*
oid_2_42_2_0:
oidPath += "/Modules";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 0: goto oid_2_42_2_0_0;
default: return $"{oidPath}/{values[index - 1]}";
}
// main
#region 2.42.2.0.0.*
oid_2_42_2_0_0:
oidPath += "/Main_Module";
if (index == values.Length) return oidPath;
return values[index++] switch
{
0 => $"{oidPath}/First_Version",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
#endregion
// symbols
#region 2.42.2.1.*
oid_2_42_2_1:
oidPath += "/Symbols";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Tango_in",
2 => $"{oidPath}/Video_in",
3 => $"{oidPath}/Audio_in",
4 => $"{oidPath}/Chemo_in",
5 => $"{oidPath}/Radio_in",
6 => $"{oidPath}/Calor_in",
7 => $"{oidPath}/Tango_out",
8 => $"{oidPath}/Video_out",
9 => $"{oidPath}/Audio_out",
10 => $"{oidPath}/Chemo_out",
11 => $"{oidPath}/Radio_out",
12 => $"{oidPath}/Calor_out",
13 => $"{oidPath}/Safe",
14 => $"{oidPath}/Threshold",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
#endregion
// obj-cat, telehealth, e-health-protocol, th
#region 2.42.3.*
oid_2_42_3:
oidPath += "/E_Health_Protocol";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 0: goto oid_2_42_3_0;
case 1: return $"{oidPath}/[Patient schemes]";
case 2: return $"{oidPath}/[Medical staff schemes]";
case 3: return $"{oidPath}/[Observer schemes]";
case 4: return $"{oidPath}/[Pharmaceutical schemes]";
case 5: return $"{oidPath}/[Laboratory schemes]";
case 6: return $"{oidPath}/[Drug manufacturer schemes]";
case 7: return $"{oidPath}/[Medical device schemes]";
case 8: return $"{oidPath}/[Medical software schemes]";
case 9: return $"{oidPath}/[Medical insurance schemes]";
case 10: return $"{oidPath}/[Medical record schemes]";
default: return $"{oidPath}/{values[index - 1]}";
}
// obj-cat, telehealth, e-health-protocol, th
#region 2.42.3.0.*
oid_2_42_3_0:
oidPath += "/Modules";
if (index == values.Length) return oidPath;
switch (values[index++])
{
case 0: goto oid_2_42_3_0_0;
case 1: goto oid_2_42_3_0_1;
case 2: goto oid_2_42_3_0_2;
case 3: goto oid_2_42_3_0_3;
case 4: goto oid_2_42_3_0_4;
case 5: goto oid_2_42_3_0_5;
default: return $"{oidPath}/{values[index - 1]}";
}
// identification
#region 2.42.3.0.0.*
oid_2_42_3_0_0:
oidPath += "/Identification";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Version1",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
// set-up
#region 2.42.3.0.1.*
oid_2_42_3_0_1:
oidPath += "/Setup";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Version1",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
// send-and-ack
#region 2.42.3.0.2.*
oid_2_42_3_0_2:
oidPath += "/Send-and-ack";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Version1",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
// command-response
#region 2.42.3.0.3.*
oid_2_42_3_0_3:
oidPath += "/Command-response";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Version1",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
// quantity-and-units
#region 2.42.3.0.4.*
oid_2_42_3_0_4:
oidPath += "/Quantities_And_Units";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Version1",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
// examples
#region 2.42.3.0.5.*
oid_2_42_3_0_5:
oidPath += "/Examples";
if (index == values.Length) return oidPath;
return values[index++] switch
{
0 => $"{oidPath}/Command_Response",
1 => $"{oidPath}/Data_Message",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
#endregion
#endregion
#endregion
// cybersecurity
#region 2.48.*
oid_2_48:
oidPath += "/Cybersecurity";
if (index == values.Length) return oidPath;
return values[index++] switch
{
1 => $"{oidPath}/Country",
2 => $"{oidPath}/International-Org",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
// alerting
#region 2.49.*
oid_2_49:
oidPath += "/Alerting";
if (index == values.Length) return oidPath;
return values[index++] switch
{
0 => $"{oidPath}/WMO",
_ => $"{oidPath}/{values[index - 1]}",
};
#endregion
#endregion
}
}
}

View File

@@ -0,0 +1,71 @@
using System;
using System.Collections.Generic;
namespace SabreTools.Serialization.ObjectIdentifier
{
/// <summary>
/// Methods related to Object Identifiers (OID)
/// </summary>
public static partial class Parser
{
// TODO: ulong[] isn't going to work. If we can use .NET 7, we can use UInt128
// We might want to look into storing all values as GUID? I don't remember if
// you can do value comparisions between an integral value and a GUID, though.
/// <summary>
/// Parse an OID in DER-encoded byte notation into a list of values
/// </summary>
/// <param name="data">Byte array representing the data to read</param>
/// <param name="length">Total length of the data according to the DER TLV</param>
/// <returns>Array of values representing the OID</returns>
public static ulong[] ParseDERIntoArray(byte[] data, ulong length)
{
// The first byte contains nodes 1 and 2
int firstNode = Math.DivRem(data[0], 40, out int secondNode);
// Create a list for all nodes
List<ulong> nodes = [(ulong)firstNode, (ulong)secondNode];
// All other nodes are encoded uniquely
int offset = 1;
while (offset < (long)length)
{
// If bit 7 is not set
if ((data[offset] & 0x80) == 0)
{
nodes.Add(data[offset]);
offset++;
continue;
}
// Otherwise, read the encoded value in a loop
ulong dotValue = 0;
bool doneProcessing = false;
do
{
// Shift the current encoded value
dotValue <<= 7;
// If we have a leading zero byte, we're at the end
if ((data[offset] & 0x80) == 0)
doneProcessing = true;
// Clear the top byte
unchecked { data[offset] &= (byte)~0x80; }
// Add the new value to the result
dotValue |= data[offset];
// Increment the offset
offset++;
} while (offset < data.Length && !doneProcessing);
// Add the parsed value to the output
nodes.Add(dotValue);
}
return [.. nodes];
}
}
}

View File

@@ -0,0 +1,25 @@
using System;
namespace SabreTools.Serialization.ObjectIdentifier
{
/// <summary>
/// Methods related to Object Identifiers (OID) and dot notation
/// </summary>
public static partial class Parser
{
/// <summary>
/// Parse an OID in separated-value notation into dot notation
/// </summary>
/// <param name="values">List of values to check against</param>
/// <returns>List of values representing the dot notation</returns>
public static string? ParseOIDToDotNotation(ulong[]? values)
{
// If we have an invalid set of values, we can't do anything
if (values == null || values.Length == 0)
return null;
var stringValues = Array.ConvertAll(values, v => v.ToString());
return string.Join(".", [.. stringValues]);
}
}
}

View File

@@ -1,6 +1,5 @@
using System.Text;
using SabreTools.Models.BSP;
using SabreTools.Models.TAR;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers

View File

@@ -1,6 +1,7 @@
using System.Collections.Generic;
using System.Text;
using SabreTools.Models.NewExecutable;
using SabreTools.Serialization.Extensions;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers

View File

@@ -69,10 +69,7 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(localFileHeader.FileNameLength, " [Local File Header] File name length");
builder.AppendLine(localFileHeader.ExtraFieldLength, " [Local File Header] Extra field length");
builder.AppendLine(localFileHeader.FileName, " [Local File Header] File name");
// TODO: Reenable this when models are fixed
// var extraFields = Deserializers.PKZIP.ParseExtraFields(localFileHeader, localFileHeader.ExtraField);
// Print(builder, " [Local File Header] Extra Fields", extraFields);
Print(builder, " [Local File Header] Extra Fields", localFileHeader.ExtraFields);
}
#endregion
@@ -241,10 +238,7 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(entry.RelativeOffsetOfLocalHeader, " Relative offset of local header");
builder.AppendLine(entry.FileName, " File name");
builder.AppendLine(entry.FileComment, " File comment");
// TODO: Reenable this when models are fixed
// var extraFields = Deserializers.PKZIP.ParseExtraFields(entry, entry.ExtraField);
// Print(builder, " Extra Fields", extraFields);
Print(builder, " Extra Fields", entry.ExtraFields);
}
builder.AppendLine();

View File

@@ -2,12 +2,12 @@ using System;
using System.Collections.Generic;
using System.Text;
using System.Xml;
using SabreTools.ASN1;
using SabreTools.IO.Extensions;
using SabreTools.Matching;
using SabreTools.Models.COFF;
using SabreTools.Models.COFF.SymbolTableEntries;
using SabreTools.Models.PortableExecutable;
using SabreTools.Models.PortableExecutable.COFFSymbolTableEntries;
using SabreTools.Models.PortableExecutable.ResourceEntries;
using SabreTools.Models.PortableExecutable.Resource.Entries;
using SabreTools.Serialization.Extensions;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
@@ -28,22 +28,36 @@ namespace SabreTools.Serialization.Printers
Print(builder, executable.Stub?.Header);
// Header
Print(builder, executable.Signature, executable.COFFFileHeader);
Print(builder, executable.Signature, executable.FileHeader);
Print(builder, executable.OptionalHeader, executable.SectionTable);
// Tables
// COFF Tables
Print(builder, executable.SectionTable);
Print(builder, executable.COFFSymbolTable);
Print(builder, executable.COFFStringTable);
Print(builder, executable.SymbolTable);
Print(builder, executable.StringTable);
// Export Table
Print(builder, executable.ExportDirectoryTable, executable.SectionTable);
Print(builder, executable.ExportAddressTable, executable.SectionTable);
Print(builder, executable.NamePointerTable);
Print(builder, executable.OrdinalTable);
Print(builder, executable.ExportNameTable);
// Import Table
Print(builder, executable.ImportDirectoryTable, executable.SectionTable);
Print(builder, executable.ImportLookupTables, executable.SectionTable);
Print(builder, executable.ImportAddressTables, executable.SectionTable);
Print(builder, executable.HintNameTable);
// Resource Table
Print(builder, executable.ResourceDirectoryTable, executable.SectionTable);
Print(builder, executable.AttributeCertificateTable);
Print(builder, executable.DelayLoadDirectoryTable);
Print(builder, executable.DelayLoadDirectoryTable, executable.SectionTable);
// Named Sections
Print(builder, executable.BaseRelocationTable, executable.SectionTable);
Print(builder, executable.DebugTable);
Print(builder, executable.ExportTable);
Print(builder, executable.ImportTable, executable.SectionTable);
Print(builder, executable.ResourceDirectoryTable);
}
private static void Print(StringBuilder builder, Models.MSDOS.ExecutableHeader? header)
@@ -83,13 +97,13 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine();
}
private static void Print(StringBuilder builder, string? signature, COFFFileHeader? header)
private static void Print(StringBuilder builder, string? signature, FileHeader? header)
{
builder.AppendLine(" COFF File Header Information:");
builder.AppendLine(" File Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No COFF file header");
builder.AppendLine(" No file header");
builder.AppendLine();
return;
}
@@ -105,7 +119,7 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine();
}
private static void Print(StringBuilder builder, OptionalHeader? header, SectionHeader[]? table)
private static void Print(StringBuilder builder, Models.PortableExecutable.OptionalHeader? header, SectionHeader[]? table)
{
builder.AppendLine(" Optional Header Information:");
builder.AppendLine(" -------------------------");
@@ -127,10 +141,7 @@ namespace SabreTools.Serialization.Printers
if (header.Magic == OptionalHeaderMagicNumber.PE32)
builder.AppendLine(header.BaseOfData, " Base of data");
if (header.Magic == OptionalHeaderMagicNumber.PE32)
builder.AppendLine(header.ImageBase_PE32, " Image base");
else
builder.AppendLine(header.ImageBase_PE32Plus, " Image base");
builder.AppendLine(header.ImageBase, " Image base");
builder.AppendLine(header.SectionAlignment, " Section alignment");
builder.AppendLine(header.FileAlignment, " File alignment");
builder.AppendLine(header.MajorOperatingSystemVersion, " Major operating system version");
@@ -145,20 +156,10 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(header.CheckSum, " Checksum");
builder.AppendLine($" Subsystem: {header.Subsystem} (0x{header.Subsystem:X})");
builder.AppendLine($" DLL characteristics: {header.DllCharacteristics} (0x{header.DllCharacteristics:X})");
if (header.Magic == OptionalHeaderMagicNumber.PE32)
{
builder.AppendLine(header.SizeOfStackReserve_PE32, " Size of stack reserve");
builder.AppendLine(header.SizeOfStackCommit_PE32, " Size of stack commit");
builder.AppendLine(header.SizeOfHeapReserve_PE32, " Size of heap reserve");
builder.AppendLine(header.SizeOfHeapCommit_PE32, " Size of heap commit");
}
else
{
builder.AppendLine(header.SizeOfStackReserve_PE32Plus, " Size of stack reserve");
builder.AppendLine(header.SizeOfStackCommit_PE32Plus, " Size of stack commit");
builder.AppendLine(header.SizeOfHeapReserve_PE32Plus, " Size of heap reserve");
builder.AppendLine(header.SizeOfHeapCommit_PE32Plus, " Size of heap commit");
}
builder.AppendLine(header.SizeOfStackReserve, " Size of stack reserve");
builder.AppendLine(header.SizeOfStackCommit, " Size of stack commit");
builder.AppendLine(header.SizeOfHeapReserve, " Size of heap reserve");
builder.AppendLine(header.SizeOfHeapCommit, " Size of heap commit");
builder.AppendLine(header.LoaderFlags, " Loader flags");
builder.AppendLine(header.NumberOfRvaAndSizes, " Number of data-directory entries");
@@ -193,8 +194,8 @@ namespace SabreTools.Serialization.Printers
if (header.CertificateTable != null)
{
builder.AppendLine(" Certificate Table (5)");
builder.AppendLine(header.CertificateTable.VirtualAddress, " Virtual address");
builder.AppendLine(header.CertificateTable.VirtualAddress.ConvertVirtualAddress(table), " Physical address");
builder.AppendLine(" Virtual address: N/A");
builder.AppendLine(header.CertificateTable.VirtualAddress, " Physical address");
builder.AppendLine(header.CertificateTable.Size, " Size");
}
if (header.BaseRelocationTable != null)
@@ -314,11 +315,11 @@ namespace SabreTools.Serialization.Printers
private static void Print(StringBuilder builder, BaseEntry[]? entries)
{
builder.AppendLine(" COFF Symbol Table Information:");
builder.AppendLine(" Symbol Table Information:");
builder.AppendLine(" -------------------------");
if (entries == null || entries.Length == 0)
{
builder.AppendLine(" No COFF symbol table items");
builder.AppendLine(" No symbol table items");
builder.AppendLine();
return;
}
@@ -344,7 +345,7 @@ namespace SabreTools.Serialization.Printers
private static void Print(StringBuilder builder, StandardRecord entry, int i)
{
builder.AppendLine($" COFF Symbol Table Entry {i} (Standard Record)");
builder.AppendLine($" Symbol Table Entry {i} (Standard Record)");
if (entry.ShortName != null)
{
builder.AppendLine(entry.ShortName, " Short name", Encoding.ASCII);
@@ -355,7 +356,7 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(entry.Offset, " Offset");
}
builder.AppendLine(entry.Value, " Value");
builder.AppendLine(entry.SectionNumber, " Section number");
builder.AppendLine($" Section number: {entry.SectionNumber} (0x{entry.SectionNumber:X})");
builder.AppendLine($" Symbol type: {entry.SymbolType} (0x{entry.SymbolType:X})");
builder.AppendLine($" Storage class: {entry.StorageClass} (0x{entry.StorageClass:X})");
builder.AppendLine(entry.NumberOfAuxSymbols, " Number of aux symbols");
@@ -363,7 +364,7 @@ namespace SabreTools.Serialization.Printers
private static void Print(StringBuilder builder, FunctionDefinition entry, int i)
{
builder.AppendLine($" COFF Symbol Table Entry {i} (Function Definition)");
builder.AppendLine($" Symbol Table Entry {i} (Function Definition)");
builder.AppendLine(entry.TagIndex, " Tag index");
builder.AppendLine(entry.TotalSize, " Total size");
builder.AppendLine(entry.PointerToLinenumber, " Pointer to linenumber");
@@ -373,7 +374,7 @@ namespace SabreTools.Serialization.Printers
private static void Print(StringBuilder builder, Descriptor entry, int i)
{
builder.AppendLine($" COFF Symbol Table Entry {i} (.bf and .ef Symbol)");
builder.AppendLine($" Symbol Table Entry {i} (.bf and .ef Symbol)");
builder.AppendLine(entry.Unused1, " Unused");
builder.AppendLine(entry.Linenumber, " Linenumber");
builder.AppendLine(entry.Unused2, " Unused");
@@ -383,7 +384,7 @@ namespace SabreTools.Serialization.Printers
private static void Print(StringBuilder builder, WeakExternal entry, int i)
{
builder.AppendLine($" COFF Symbol Table Entry {i} (Weak External)");
builder.AppendLine($" Symbol Table Entry {i} (Weak External)");
builder.AppendLine(entry.TagIndex, " Tag index");
builder.AppendLine(entry.Characteristics, " Characteristics");
builder.AppendLine(entry.Unused, " Unused");
@@ -391,13 +392,13 @@ namespace SabreTools.Serialization.Printers
private static void Print(StringBuilder builder, FileRecord entry, int i)
{
builder.AppendLine($" COFF Symbol Table Entry {i} (File)");
builder.AppendLine($" Symbol Table Entry {i} (File)");
builder.AppendLine(entry.FileName, " File name", Encoding.ASCII);
}
private static void Print(StringBuilder builder, SectionDefinition entry, int i)
{
builder.AppendLine($" COFF Symbol Table Entry {i} (Section Defintion)");
builder.AppendLine($" Symbol Table Entry {i} (Section Defintion)");
builder.AppendLine(entry.Length, " Length");
builder.AppendLine(entry.NumberOfRelocations, " Number of relocations");
builder.AppendLine(entry.NumberOfLinenumbers, " Number of linenumbers");
@@ -409,20 +410,20 @@ namespace SabreTools.Serialization.Printers
private static void Print(StringBuilder builder, CLRTokenDefinition entry, int i)
{
builder.AppendLine($" COFF Symbol Table Entry {i} (CLR Token Defintion)");
builder.AppendLine(entry.AuxFormat6AuxType, " Aux type");
builder.AppendLine(entry.AuxFormat6Reserved1, " Reserved");
builder.AppendLine(entry.AuxFormat6SymbolTableIndex, " Symbol table index");
builder.AppendLine(entry.AuxFormat6Reserved2, " Reserved");
builder.AppendLine($" Symbol Table Entry {i} (CLR Token Defintion)");
builder.AppendLine(entry.AuxType, " Aux type");
builder.AppendLine(entry.Reserved1, " Reserved");
builder.AppendLine(entry.SymbolTableIndex, " Symbol table index");
builder.AppendLine(entry.Reserved2, " Reserved");
}
private static void Print(StringBuilder builder, COFFStringTable? stringTable)
private static void Print(StringBuilder builder, Models.COFF.StringTable? stringTable)
{
builder.AppendLine(" COFF String Table Information:");
builder.AppendLine(" String Table Information:");
builder.AppendLine(" -------------------------");
if (stringTable?.Strings == null || stringTable.Strings.Length == 0)
{
builder.AppendLine(" No COFF string table items");
builder.AppendLine(" No string table items");
builder.AppendLine();
return;
}
@@ -431,14 +432,14 @@ namespace SabreTools.Serialization.Printers
for (int i = 0; i < stringTable.Strings.Length; i++)
{
string? entry = stringTable.Strings[i];
builder.AppendLine($" COFF String Table Entry {i})");
builder.AppendLine($" String Table Entry {i})");
builder.AppendLine(entry, " Value");
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, AttributeCertificateTableEntry[]? entries)
private static void Print(StringBuilder builder, Models.PortableExecutable.AttributeCertificate.Entry[]? entries)
{
builder.AppendLine(" Attribute Certificate Table Information:");
builder.AppendLine(" -------------------------");
@@ -449,6 +450,9 @@ namespace SabreTools.Serialization.Printers
return;
}
// Create the deserializer
var deserializer = new Deserializers.AbstractSyntaxNotationOne();
for (int i = 0; i < entries.Length; i++)
{
var entry = entries[i];
@@ -468,7 +472,7 @@ namespace SabreTools.Serialization.Printers
}
else
{
var topLevelValues = AbstractSyntaxNotationOne.Parse(entry.Certificate, 0);
var topLevelValues = deserializer.Deserialize(entry.Certificate, 0);
if (topLevelValues == null)
{
builder.AppendLine(" INVALID DATA FOUND");
@@ -476,7 +480,7 @@ namespace SabreTools.Serialization.Printers
}
else
{
foreach (TypeLengthValue tlv in topLevelValues)
foreach (Models.ASN1.TypeLengthValue tlv in topLevelValues)
{
string tlvString = tlv.Format(paddingLevel: 4);
builder.AppendLine(tlvString);
@@ -502,7 +506,7 @@ namespace SabreTools.Serialization.Printers
}
}
private static void Print(StringBuilder builder, DelayLoadDirectoryTable? table)
private static void Print(StringBuilder builder, Models.PortableExecutable.DelayLoad.DirectoryTable? table, SectionHeader[]? sections)
{
builder.AppendLine(" Delay-Load Directory Table Information:");
builder.AppendLine(" -------------------------");
@@ -514,17 +518,22 @@ namespace SabreTools.Serialization.Printers
}
builder.AppendLine(table.Attributes, " Attributes");
builder.AppendLine(table.Name, " Name RVA");
builder.AppendLine(table.NameRVA, " Name RVA");
builder.AppendLine(table.NameRVA.ConvertVirtualAddress(sections), " Name physical address");
builder.AppendLine(table.ModuleHandle, " Module handle");
builder.AppendLine(table.DelayImportAddressTable, " Delay import address table RVA");
builder.AppendLine(table.DelayImportAddressTable.ConvertVirtualAddress(sections), " Delay import address table physical address");
builder.AppendLine(table.DelayImportNameTable, " Delay import name table RVA");
builder.AppendLine(table.DelayImportNameTable.ConvertVirtualAddress(sections), " Delay import name table physical address");
builder.AppendLine(table.BoundDelayImportTable, " Bound delay import table RVA");
builder.AppendLine(table.BoundDelayImportTable.ConvertVirtualAddress(sections), " Bound delay import table physical address");
builder.AppendLine(table.UnloadDelayImportTable, " Unload delay import table RVA");
builder.AppendLine(table.UnloadDelayImportTable.ConvertVirtualAddress(sections), " Unload delay import table physical address");
builder.AppendLine(table.TimeStamp, " Timestamp");
builder.AppendLine();
}
private static void Print(StringBuilder builder, BaseRelocationBlock[]? entries, SectionHeader[]? table)
private static void Print(StringBuilder builder, Models.PortableExecutable.BaseRelocation.Block[]? entries, SectionHeader[]? sections)
{
builder.AppendLine(" Base Relocation Table Information:");
builder.AppendLine(" -------------------------");
@@ -541,14 +550,16 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine($" Base Relocation Table Entry {i}");
builder.AppendLine(baseRelocationTableEntry.PageRVA, " Page RVA");
builder.AppendLine(baseRelocationTableEntry.PageRVA.ConvertVirtualAddress(table), " Page physical address");
builder.AppendLine(baseRelocationTableEntry.PageRVA.ConvertVirtualAddress(sections), " Page physical address");
builder.AppendLine(baseRelocationTableEntry.BlockSize, " Block size");
builder.AppendLine();
builder.AppendLine($" Base Relocation Table {i} Type and Offset Information:");
builder.AppendLine(" -------------------------");
if (baseRelocationTableEntry.TypeOffsetFieldEntries == null || baseRelocationTableEntry.TypeOffsetFieldEntries.Length == 0)
{
builder.AppendLine(" No base relocation table type and offset entries");
builder.AppendLine();
continue;
}
@@ -565,7 +576,7 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine();
}
private static void Print(StringBuilder builder, DebugTable? table)
private static void Print(StringBuilder builder, Models.PortableExecutable.DebugData.Table? table)
{
builder.AppendLine(" Debug Table Information:");
builder.AppendLine(" -------------------------");
@@ -576,7 +587,6 @@ namespace SabreTools.Serialization.Printers
return;
}
// TODO: If more sections added, model this after the Export Table
for (int i = 0; i < table.DebugDirectoryTable.Length; i++)
{
var entry = table.DebugDirectoryTable[i];
@@ -595,174 +605,176 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine();
}
private static void Print(StringBuilder builder, ExportTable? table)
private static void Print(StringBuilder builder, Models.PortableExecutable.Export.DirectoryTable? table, SectionHeader[]? sections)
{
builder.AppendLine(" Export Table Information:");
builder.AppendLine(value: " Export Directory Table Information:");
builder.AppendLine(" -------------------------");
if (table == null)
{
builder.AppendLine(" No export table");
builder.AppendLine();
return;
}
builder.AppendLine(" Export Directory Table Information:");
builder.AppendLine(" -------------------------");
if (table.ExportDirectoryTable == null)
{
builder.AppendLine(" No export directory table");
}
else
{
builder.AppendLine(table.ExportDirectoryTable.ExportFlags, " Export flags");
builder.AppendLine(table.ExportDirectoryTable.TimeDateStamp, " Time/Date stamp");
builder.AppendLine(table.ExportDirectoryTable.MajorVersion, " Major version");
builder.AppendLine(table.ExportDirectoryTable.MinorVersion, " Minor version");
builder.AppendLine(table.ExportDirectoryTable.NameRVA, " Name RVA");
builder.AppendLine(table.ExportDirectoryTable.Name, " Name");
builder.AppendLine(table.ExportDirectoryTable.OrdinalBase, " Ordinal base");
builder.AppendLine(table.ExportDirectoryTable.AddressTableEntries, " Address table entries");
builder.AppendLine(table.ExportDirectoryTable.NumberOfNamePointers, " Number of name pointers");
builder.AppendLine(table.ExportDirectoryTable.ExportAddressTableRVA, " Export address table RVA");
builder.AppendLine(table.ExportDirectoryTable.NamePointerRVA, " Name pointer table RVA");
builder.AppendLine(table.ExportDirectoryTable.OrdinalTableRVA, " Ordinal table RVA");
builder.AppendLine(table.ExportFlags, " Export flags");
builder.AppendLine(table.TimeDateStamp, " Time/Date stamp");
builder.AppendLine(table.MajorVersion, " Major version");
builder.AppendLine(table.MinorVersion, " Minor version");
builder.AppendLine(table.NameRVA, " Name RVA");
builder.AppendLine(table.NameRVA.ConvertVirtualAddress(sections), " Name physical address");
builder.AppendLine(table.Name, " Name");
builder.AppendLine(table.OrdinalBase, " Ordinal base");
builder.AppendLine(table.AddressTableEntries, " Address table entries");
builder.AppendLine(table.NumberOfNamePointers, " Number of name pointers");
builder.AppendLine(table.ExportAddressTableRVA, " Export address table RVA");
builder.AppendLine(table.ExportAddressTableRVA.ConvertVirtualAddress(sections), " Export address table physical address");
builder.AppendLine(table.NamePointerRVA, " Name pointer table RVA");
builder.AppendLine(table.NamePointerRVA.ConvertVirtualAddress(sections), " Name pointer table physical address");
builder.AppendLine(table.OrdinalTableRVA, " Ordinal table RVA");
builder.AppendLine(table.OrdinalTableRVA.ConvertVirtualAddress(sections), " Ordinal table physical address");
}
builder.AppendLine();
}
builder.AppendLine(" Export Address Table Information:");
builder.AppendLine(" -------------------------");
if (table.ExportAddressTable == null || table.ExportAddressTable.Length == 0)
private static void Print(StringBuilder builder, Models.PortableExecutable.Export.AddressTableEntry[]? table, SectionHeader[]? sections)
{
builder.AppendLine(" Export Address Table Information:");
builder.AppendLine(" -------------------------");
if (table == null || table.Length == 0)
{
builder.AppendLine(" No export address table items");
builder.AppendLine(" No export address table items");
}
else
{
for (int i = 0; i < table.ExportAddressTable.Length; i++)
for (int i = 0; i < table.Length; i++)
{
var entry = table.ExportAddressTable[i];
var entry = table[i];
builder.AppendLine($" Export Address Table Entry {i}");
builder.AppendLine(entry.ExportRVA, " Export RVA / Forwarder RVA");
}
}
builder.AppendLine();
builder.AppendLine(" Name Pointer Table Information:");
builder.AppendLine(" -------------------------");
if (table.NamePointerTable?.Pointers == null || table.NamePointerTable.Pointers.Length == 0)
{
builder.AppendLine(" No name pointer table items");
}
else
{
for (int i = 0; i < table.NamePointerTable.Pointers.Length; i++)
{
var entry = table.NamePointerTable.Pointers[i];
builder.AppendLine($" Name Pointer Table Entry {i}");
builder.AppendLine(entry, " Pointer");
}
}
builder.AppendLine();
builder.AppendLine(" Ordinal Table Information:");
builder.AppendLine(" -------------------------");
if (table.OrdinalTable?.Indexes == null || table.OrdinalTable.Indexes.Length == 0)
{
builder.AppendLine(" No ordinal table items");
}
else
{
for (int i = 0; i < table.OrdinalTable.Indexes.Length; i++)
{
var entry = table.OrdinalTable.Indexes[i];
builder.AppendLine($" Ordinal Table Entry {i}");
builder.AppendLine(entry, " Index");
}
}
builder.AppendLine();
builder.AppendLine(" Export Name Table Information:");
builder.AppendLine(" -------------------------");
if (table.ExportNameTable?.Strings == null || table.ExportNameTable.Strings.Length == 0)
{
builder.AppendLine(" No export name table items");
}
else
{
for (int i = 0; i < table.ExportNameTable.Strings.Length; i++)
{
var entry = table.ExportNameTable.Strings[i];
builder.AppendLine($" Export Name Table Entry {i}");
builder.AppendLine(entry, " String");
builder.AppendLine($" Export Address Table Entry {i}");
builder.AppendLine(entry.ExportRVA, " Export / Forwarder RVA");
builder.AppendLine(entry.ExportRVA.ConvertVirtualAddress(sections), " Export / Forwarder physical address");
}
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, ImportTable? table, SectionHeader[]? sectionTable)
private static void Print(StringBuilder builder, Models.PortableExecutable.Export.NamePointerTable? table)
{
builder.AppendLine(" Import Table Information:");
builder.AppendLine(" Export Name Pointer Table Information:");
builder.AppendLine(" -------------------------");
if (table == null)
if (table?.Pointers == null || table.Pointers.Length == 0)
{
builder.AppendLine(" No import table");
builder.AppendLine();
return;
}
builder.AppendLine();
builder.AppendLine(" Import Directory Table Information:");
builder.AppendLine(" -------------------------");
if (table.ImportDirectoryTable == null || table.ImportDirectoryTable.Length == 0)
{
builder.AppendLine(" No import directory table items");
builder.AppendLine(" No export name pointer table items");
}
else
{
for (int i = 0; i < table.ImportDirectoryTable.Length; i++)
for (int i = 0; i < table.Pointers.Length; i++)
{
var entry = table.ImportDirectoryTable[i];
var entry = table.Pointers[i];
builder.AppendLine($" Import Directory Table Entry {i}");
builder.AppendLine(entry.ImportLookupTableRVA, " Import lookup table RVA");
builder.AppendLine(entry.ImportLookupTableRVA.ConvertVirtualAddress(sectionTable), " Import lookup table Physical Address");
builder.AppendLine(entry.TimeDateStamp, " Time/Date stamp");
builder.AppendLine(entry.ForwarderChain, " Forwarder chain");
builder.AppendLine(entry.NameRVA, " Name RVA");
builder.AppendLine(entry.Name, " Name");
builder.AppendLine(entry.ImportAddressTableRVA, " Import address table RVA");
builder.AppendLine(entry.ImportAddressTableRVA.ConvertVirtualAddress(sectionTable), " Import address table Physical Address");
builder.AppendLine($" Export Name Pointer Table Entry {i}");
builder.AppendLine(entry, " Pointer");
}
}
builder.AppendLine();
}
builder.AppendLine(" Import Lookup Tables Information:");
builder.AppendLine(" -------------------------");
if (table.ImportLookupTables == null || table.ImportLookupTables.Count == 0)
private static void Print(StringBuilder builder, Models.PortableExecutable.Export.OrdinalTable? table)
{
builder.AppendLine(" Export Ordinal Table Information:");
builder.AppendLine(" -------------------------");
if (table?.Indexes == null || table.Indexes.Length == 0)
{
builder.AppendLine(" No import lookup tables");
builder.AppendLine(" No export ordinal table items");
}
else
{
foreach (var kvp in table.ImportLookupTables)
for (int i = 0; i < table.Indexes.Length; i++)
{
var entry = table.Indexes[i];
builder.AppendLine($" Export Ordinal Table Entry {i}");
builder.AppendLine(entry, " Index");
}
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, Models.PortableExecutable.Export.NameTable? table)
{
builder.AppendLine(" Export Name Table Information:");
builder.AppendLine(" -------------------------");
if (table?.Strings == null || table.Strings.Length == 0)
{
builder.AppendLine(" No export name table items");
}
else
{
for (int i = 0; i < table.Strings.Length; i++)
{
var entry = table.Strings[i];
builder.AppendLine($" Export Name Table Entry {i}");
builder.AppendLine(entry, " String");
}
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, Models.PortableExecutable.Import.DirectoryTableEntry[]? table, SectionHeader[]? sections)
{
builder.AppendLine(" Import Directory Table Information:");
builder.AppendLine(" -------------------------");
if (table == null || table.Length == 0)
{
builder.AppendLine(" No import directory table items");
}
else
{
for (int i = 0; i < table.Length; i++)
{
var entry = table[i];
builder.AppendLine($" Import Directory Table Entry {i}");
builder.AppendLine(entry.ImportLookupTableRVA, " Import lookup table RVA");
builder.AppendLine(entry.ImportLookupTableRVA.ConvertVirtualAddress(sections), " Import lookup table physical address");
builder.AppendLine(entry.TimeDateStamp, " Time/Date stamp");
builder.AppendLine(entry.ForwarderChain, " Forwarder chain");
builder.AppendLine(entry.NameRVA, " Name RVA");
builder.AppendLine(entry.NameRVA.ConvertVirtualAddress(sections), " Name physical address");
builder.AppendLine(entry.Name, " Name");
builder.AppendLine(entry.ImportAddressTableRVA, " Import address table RVA");
builder.AppendLine(entry.ImportAddressTableRVA.ConvertVirtualAddress(sections), " Import address table physical address");
}
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, Dictionary<int, Models.PortableExecutable.Import.LookupTableEntry[]?>? tables, SectionHeader[]? sections)
{
builder.AppendLine(" Import Lookup Tables Information:");
builder.AppendLine(" -------------------------");
if (tables == null || tables.Count == 0)
{
builder.AppendLine(" No import lookup tables");
}
else
{
foreach (var kvp in tables)
{
int index = kvp.Key;
var importLookupTable = kvp.Value;
builder.AppendLine();
builder.AppendLine($" Import Lookup Table {index} Information:");
builder.AppendLine(" -------------------------");
builder.AppendLine($" Import Lookup Table {index} Information:");
builder.AppendLine(" -------------------------");
if (importLookupTable == null || importLookupTable.Length == 0)
{
builder.AppendLine(" No import lookup table items");
builder.AppendLine(" No import lookup table items");
continue;
}
@@ -770,42 +782,45 @@ namespace SabreTools.Serialization.Printers
{
var entry = importLookupTable[i];
builder.AppendLine($" Import Lookup Table {index} Entry {i}");
builder.AppendLine(entry.OrdinalNameFlag, " Ordinal/Name flag");
builder.AppendLine($" Import Lookup Table {index} Entry {i}");
builder.AppendLine(entry.OrdinalNameFlag, " Ordinal/Name flag");
if (entry.OrdinalNameFlag)
{
builder.AppendLine(entry.OrdinalNumber, " Ordinal number");
builder.AppendLine(entry.OrdinalNumber, " Ordinal number");
}
else
{
builder.AppendLine(entry.HintNameTableRVA, " Hint/Name table RVA");
builder.AppendLine(entry.HintNameTableRVA.ConvertVirtualAddress(sectionTable), " Hint/Name table Physical Address");
builder.AppendLine(entry.HintNameTableRVA, " Hint/Name table RVA");
builder.AppendLine(entry.HintNameTableRVA.ConvertVirtualAddress(sections), " Hint/Name table physical address");
}
}
}
}
builder.AppendLine();
}
builder.AppendLine(" Import Address Tables Information:");
builder.AppendLine(" -------------------------");
if (table.ImportAddressTables == null || table.ImportAddressTables.Count == 0)
private static void Print(StringBuilder builder, Dictionary<int, Models.PortableExecutable.Import.AddressTableEntry[]?>? tables, SectionHeader[]? sections)
{
builder.AppendLine(" Import Address Tables Information:");
builder.AppendLine(" -------------------------");
if (tables == null || tables.Count == 0)
{
builder.AppendLine(" No import address tables");
builder.AppendLine(" No import address tables");
}
else
{
foreach (var kvp in table.ImportAddressTables)
foreach (var kvp in tables)
{
int index = kvp.Key;
var importAddressTable = kvp.Value;
builder.AppendLine();
builder.AppendLine($" Import Address Table {index} Information:");
builder.AppendLine(" -------------------------");
builder.AppendLine($" Import Address Table {index} Information:");
builder.AppendLine(" -------------------------");
if (importAddressTable == null || importAddressTable.Length == 0)
{
builder.AppendLine(" No import address table items");
builder.AppendLine(" No import address table items");
continue;
}
@@ -813,45 +828,48 @@ namespace SabreTools.Serialization.Printers
{
var entry = importAddressTable[i];
builder.AppendLine($" Import Address Table {index} Entry {i}");
builder.AppendLine(entry.OrdinalNameFlag, " Ordinal/Name flag");
builder.AppendLine($" Import Address Table {index} Entry {i}");
builder.AppendLine(entry.OrdinalNameFlag, " Ordinal/Name flag");
if (entry.OrdinalNameFlag)
{
builder.AppendLine(entry.OrdinalNumber, " Ordinal number");
builder.AppendLine(entry.OrdinalNumber, " Ordinal number");
}
else
{
builder.AppendLine(entry.HintNameTableRVA, " Hint/Name table RVA");
builder.AppendLine(entry.HintNameTableRVA.ConvertVirtualAddress(sectionTable), " Hint/Name table Physical Address");
builder.AppendLine(entry.HintNameTableRVA, " Hint/Name table RVA");
builder.AppendLine(entry.HintNameTableRVA.ConvertVirtualAddress(sections), " Hint/Name table physical address");
}
}
}
}
builder.AppendLine();
}
builder.AppendLine(" Hint/Name Table Information:");
builder.AppendLine(" -------------------------");
if (table.HintNameTable == null || table.HintNameTable.Length == 0)
private static void Print(StringBuilder builder, Models.PortableExecutable.Import.HintNameTableEntry[]? table)
{
builder.AppendLine(" Import Hint/Name Table Information:");
builder.AppendLine(" -------------------------");
if (table == null || table.Length == 0)
{
builder.AppendLine(" No hint/name table items");
builder.AppendLine(" No import hint/name table items");
}
else
{
for (int i = 0; i < table.HintNameTable.Length; i++)
for (int i = 0; i < table.Length; i++)
{
var entry = table.HintNameTable[i];
var entry = table[i];
builder.AppendLine($" Hint/Name Table Entry {i}");
builder.AppendLine(entry.Hint, " Hint");
builder.AppendLine(entry.Name, " Name");
builder.AppendLine($" Hint/Name Table Entry {i}");
builder.AppendLine(entry.Hint, " Hint");
builder.AppendLine(entry.Name, " Name");
}
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, ResourceDirectoryTable? table)
private static void Print(StringBuilder builder, Models.PortableExecutable.Resource.DirectoryTable? table, SectionHeader[]? sections)
{
builder.AppendLine(" Resource Directory Table Information:");
builder.AppendLine(" -------------------------");
@@ -862,11 +880,11 @@ namespace SabreTools.Serialization.Printers
return;
}
Print(table, level: 0, types: [], builder);
Print(builder, table, level: 0, types: [], sections);
builder.AppendLine();
}
private static void Print(ResourceDirectoryTable table, int level, List<object> types, StringBuilder builder)
private static void Print(StringBuilder builder, Models.PortableExecutable.Resource.DirectoryTable table, int level, List<object> types, SectionHeader[]? sections)
{
string padding = new(' ', (level + 1) * 2);
@@ -901,12 +919,12 @@ namespace SabreTools.Serialization.Printers
else
newTypes.Add(entry.IntegerID);
PrintResourceDirectoryEntry(entry, level + 1, newTypes, builder);
Print(builder, entry, level + 1, newTypes, sections);
}
}
}
private static void PrintResourceDirectoryEntry(ResourceDirectoryEntry entry, int level, List<object> types, StringBuilder builder)
private static void Print(StringBuilder builder, Models.PortableExecutable.Resource.DirectoryEntry entry, int level, List<object> types, SectionHeader[]? sections)
{
string padding = new(' ', (level + 1) * 2);
@@ -922,12 +940,12 @@ namespace SabreTools.Serialization.Printers
}
if (entry.DataEntry != null)
PrintResourceDataEntry(entry.DataEntry, level: level + 1, types, builder);
Print(builder, entry.DataEntry, level: level + 1, types, sections);
else if (entry.Subdirectory != null)
Print(entry.Subdirectory, level: level + 1, types, builder);
Print(builder, entry.Subdirectory, level: level + 1, types, sections);
}
private static void PrintResourceDataEntry(ResourceDataEntry entry, int level, List<object> types, StringBuilder builder)
private static void Print(StringBuilder builder, Models.PortableExecutable.Resource.DataEntry entry, int level, List<object> types, SectionHeader[]? sections)
{
string padding = new(' ', (level + 1) * 2);
@@ -937,6 +955,7 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(level, $"{padding}Entry level");
builder.AppendLine(entry.DataRVA, $"{padding}Data RVA");
builder.AppendLine(entry.DataRVA.ConvertVirtualAddress(sections), $"{padding}Data physical address");
builder.AppendLine(entry.Size, $"{padding}Size");
builder.AppendLine(entry.Codepage, $"{padding}Codepage");
builder.AppendLine(entry.Reserved, $"{padding}Reserved");
@@ -1022,25 +1041,25 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine();
}
private static void PrintResourceRT_CURSOR(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_CURSOR(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}Hardware-dependent cursor resource found, not parsed yet");
}
private static void PrintResourceRT_BITMAP(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_BITMAP(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}Bitmap resource found, not parsed yet");
}
private static void PrintResourceRT_ICON(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_ICON(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}Hardware-dependent icon resource found, not parsed yet");
}
private static void PrintResourceRT_MENU(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_MENU(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
@@ -1118,7 +1137,7 @@ namespace SabreTools.Serialization.Printers
}
}
private static void PrintResourceRT_DIALOG(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_DIALOG(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
@@ -1253,7 +1272,7 @@ namespace SabreTools.Serialization.Printers
}
}
private static void PrintResourceRT_STRING(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_STRING(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
@@ -1273,19 +1292,19 @@ namespace SabreTools.Serialization.Printers
}
}
private static void PrintResourceRT_FONTDIR(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_FONTDIR(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}Font directory resource found, not parsed yet");
}
private static void PrintResourceRT_FONT(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_FONT(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}Font resource found, not parsed yet");
}
private static void PrintResourceRT_ACCELERATOR(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_ACCELERATOR(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
@@ -1308,7 +1327,7 @@ namespace SabreTools.Serialization.Printers
}
}
private static void PrintResourceRT_RCDATA(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_RCDATA(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}Application-defined resource found, not parsed yet");
@@ -1367,7 +1386,7 @@ namespace SabreTools.Serialization.Printers
}
}
private static void PrintResourceRT_MESSAGETABLE(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_MESSAGETABLE(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
@@ -1434,19 +1453,19 @@ namespace SabreTools.Serialization.Printers
}
}
private static void PrintResourceRT_GROUP_CURSOR(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_GROUP_CURSOR(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}Hardware-independent cursor resource found, not parsed yet");
}
private static void PrintResourceRT_GROUP_ICON(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_GROUP_ICON(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}Hardware-independent icon resource found, not parsed yet");
}
private static void PrintResourceRT_VERSION(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_VERSION(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
@@ -1567,37 +1586,37 @@ namespace SabreTools.Serialization.Printers
}
}
private static void PrintResourceRT_DLGINCLUDE(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_DLGINCLUDE(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}External header resource found, not parsed yet");
}
private static void PrintResourceRT_PLUGPLAY(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_PLUGPLAY(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}Plug and Play resource found, not parsed yet");
}
private static void PrintResourceRT_VXD(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_VXD(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}VXD found, not parsed yet");
}
private static void PrintResourceRT_ANICURSOR(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_ANICURSOR(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}Animated cursor found, not parsed yet");
}
private static void PrintResourceRT_ANIICON(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_ANIICON(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}Animated icon found, not parsed yet");
}
private static void PrintResourceRT_HTML(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_HTML(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
builder.AppendLine($"{padding}HTML resource found, not parsed yet");
@@ -1610,7 +1629,7 @@ namespace SabreTools.Serialization.Printers
// builder.AppendLine(Encoding.Unicode.GetString(entry.Data), $"{padding}Value (Unicode)");
}
private static void PrintResourceRT_MANIFEST(ResourceDataEntry entry, int level, StringBuilder builder)
private static void PrintResourceRT_MANIFEST(Models.PortableExecutable.Resource.DataEntry entry, int level, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);
@@ -1787,7 +1806,7 @@ namespace SabreTools.Serialization.Printers
}
}
private static void PrintResourceUNKNOWN(ResourceDataEntry entry, int level, object resourceType, StringBuilder builder)
private static void PrintResourceUNKNOWN(Models.PortableExecutable.Resource.DataEntry entry, int level, object resourceType, StringBuilder builder)
{
string padding = new(' ', (level + 1) * 2);

View File

@@ -1,6 +1,7 @@
using System.Text;
using SabreTools.Models.WiseInstaller;
using SabreTools.Models.WiseInstaller.Actions;
using SabreTools.Serialization.Extensions;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers

View File

@@ -0,0 +1,58 @@
using System;
namespace SabreTools.Models.ASN1
{
/// <summary>
/// ASN.1 type indicators
/// </summary>
[Flags]
public enum ASN1Type : byte
{
#region Types
V_ASN1_EOC = 0x00,
V_ASN1_BOOLEAN = 0x01,
V_ASN1_INTEGER = 0x02,
V_ASN1_BIT_STRING = 0x03,
V_ASN1_OCTET_STRING = 0x04,
V_ASN1_NULL = 0x05,
V_ASN1_OBJECT = 0x06,
V_ASN1_OBJECT_DESCRIPTOR = 0x07,
V_ASN1_EXTERNAL = 0x08,
V_ASN1_REAL = 0x09,
V_ASN1_ENUMERATED = 0x0A,
V_ASN1_UTF8STRING = 0x0C,
V_ASN1_SEQUENCE = 0x10,
V_ASN1_SET = 0x11,
V_ASN1_NUMERICSTRING = 0x12,
V_ASN1_PRINTABLESTRING = 0x13,
V_ASN1_T61STRING = 0x14,
V_ASN1_TELETEXSTRING = 0x14,
V_ASN1_VIDEOTEXSTRING = 0x15,
V_ASN1_IA5STRING = 0x16,
V_ASN1_UTCTIME = 0x17,
V_ASN1_GENERALIZEDTIME = 0x18,
V_ASN1_GRAPHICSTRING = 0x19,
V_ASN1_ISO64STRING = 0x1A,
V_ASN1_VISIBLESTRING = 0x1A,
V_ASN1_GENERALSTRING = 0x1B,
V_ASN1_UNIVERSALSTRING = 0x1C,
V_ASN1_BMPSTRING = 0x1E,
#endregion
#region Modifiers
// Commented out because it is the default
// and can interfere with V_ASN1_EOC
// V_ASN1_UNIVERSAL = 0x00,
V_ASN1_PRIMITIVE_TAG = 0x1F,
V_ASN1_CONSTRUCTED = 0x20,
V_ASN1_APPLICATION = 0x40,
V_ASN1_CONTEXT_SPECIFIC = 0x80,
V_ASN1_PRIVATE = 0xC0,
#endregion
}
}

View File

@@ -0,0 +1,23 @@
namespace SabreTools.Models.ASN1
{
/// <summary>
/// ASN.1 type/length/value class that all types are based on
/// </summary>
public class TypeLengthValue
{
/// <summary>
/// The ASN.1 type
/// </summary>
public ASN1Type Type { get; set; }
/// <summary>
/// Length of the value
/// </summary>
public ulong Length { get; set; }
/// <summary>
/// Generic value associated with <see cref="Type"/>
/// </summary>
public object? Value { get; set; }
}
}

View File

@@ -8,11 +8,13 @@
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
<IncludeSymbols>true</IncludeSymbols>
<LangVersion>latest</LangVersion>
<!-- Added due to StormLibSharp -->
<NoWarn>CS8600;CS8601;CS8603;CS8604;CS8618;CS8625;CS8634;IL3000</NoWarn>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.9.0</Version>
<Version>1.9.6</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
@@ -26,6 +28,28 @@
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`)) OR $(TargetFramework.StartsWith(`net9`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
</PropertyGroup>
<!-- Set a build flag for Windows specifically -->
<PropertyGroup Condition="'$(RuntimeIdentifier)'=='win-x86'">
<DefineConstants>$(DefineConstants);WINX86</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(RuntimeIdentifier)'=='win-x64'">
<DefineConstants>$(DefineConstants);WINX64</DefineConstants>
</PropertyGroup>
<!-- Exclude certain parts of external modules for by default -->
<PropertyGroup>
<DefaultItemExcludes>
@@ -36,9 +60,8 @@
</DefaultItemExcludes>
</PropertyGroup>
<!-- Exclude all external modules for .NET Framework 2.0, .NET Framework 3.5, or non-Windows
builds -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR !$(RuntimeIdentifier.StartsWith(`win-x86`))">
<!-- Exclude all external modules for .NET Framework 2.0, .NET Framework 3.5, or non-Windows builds -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR !($(RuntimeIdentifier.StartsWith(`win-x86`)) OR $(RuntimeIdentifier.StartsWith(`win-x64`)))">
<DefaultItemExcludes>
$(DefaultItemExcludes);
_EXTERNAL\**
@@ -62,12 +85,11 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.ASN1" Version="1.6.0" />
<PackageReference Include="NetLegacySupport.Numerics" Version="1.0.1" Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`))" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.4" />
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
<PackageReference Include="SabreTools.IO" Version="1.7.1" />
<PackageReference Include="SabreTools.Models" Version="1.7.0" />
<PackageReference Include="SabreTools.Matching" Version="1.6.0" />
<PackageReference Include="SabreTools.IO" Version="1.7.5" />
<PackageReference Include="SabreTools.Models" Version="1.7.2" />
<PackageReference Include="SharpCompress" Version="0.40.0" Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))" />
</ItemGroup>

View File

@@ -112,8 +112,10 @@ namespace SabreTools.Serialization.Serializers
if (stream == null)
return false;
using var fs = File.OpenWrite(path);
using var fs = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
stream.CopyTo(fs);
fs.Flush();
return true;
}

View File

@@ -1,6 +1,4 @@
using System;
using System.IO;
using System.Reflection;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Serializers
@@ -9,7 +7,10 @@ namespace SabreTools.Serialization.Serializers
/// Base class for all binary serializers
/// </summary>
/// <typeparam name="TModel">Type of the model to serialize</typeparam>
/// <remarks>These methods assume there is a concrete implementation of the serializer for the model available</remarks>
/// <remarks>
/// This class allows all inheriting types to only implement <see cref="IStreamSerializer<>"/>
/// and still implicitly implement <see cref="IByteSerializer<>"/> and <see cref="IFileSerializer<>"/>
/// </remarks>
public abstract class BaseBinarySerializer<TModel> :
IByteSerializer<TModel>,
IFileSerializer<TModel>,
@@ -20,7 +21,7 @@ namespace SabreTools.Serialization.Serializers
/// <inheritdoc/>
public virtual byte[]? SerializeArray(TModel? obj)
{
using var stream = SerializeStream(obj);
using var stream = Serialize(obj);
if (stream == null)
return null;
@@ -39,12 +40,14 @@ namespace SabreTools.Serialization.Serializers
if (string.IsNullOrEmpty(path))
return false;
using var stream = SerializeStream(obj);
using var stream = Serialize(obj);
if (stream == null)
return false;
using var fs = File.OpenWrite(path);
using var fs = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
stream.CopyTo(fs);
fs.Flush();
return true;
}
@@ -56,110 +59,5 @@ namespace SabreTools.Serialization.Serializers
public abstract Stream? Serialize(TModel? obj);
#endregion
#region Static Implementations
/// <inheritdoc cref="IByteSerializer.Deserialize(T?)"/>
public static byte[]? SerializeBytes(TModel? obj)
{
var serializer = GetType<IByteSerializer<TModel>>();
if (serializer == null)
return default;
return serializer.SerializeArray(obj);
}
/// <inheritdoc cref="IFileSerializer.Serialize(T?, string?)"/>
public static bool SerializeFile(TModel? obj, string? path)
{
var serializer = GetType<IFileSerializer<TModel>>();
if (serializer == null)
return default;
return serializer.Serialize(obj, path);
}
/// <inheritdoc cref="IStreamSerializer.Serialize(T?)"/>
public static Stream? SerializeStream(TModel? obj)
{
var serializer = GetType<IStreamSerializer<TModel>>();
if (serializer == null)
return default;
return serializer.Serialize(obj);
}
#endregion
#region Helpers
/// <summary>
/// Get a constructed instance of a type, if possible
/// </summary>
/// <typeparam name="TSerializer">Serializer type to construct</typeparam>
/// <returns>Serializer of the requested type, null on error</returns>
private static TSerializer? GetType<TSerializer>()
{
// If the serializer type is invalid
string? serializerName = typeof(TSerializer)?.Name;
if (serializerName == null)
return default;
// If the serializer has no generic arguments
var genericArgs = typeof(TSerializer).GetGenericArguments();
if (genericArgs == null || genericArgs.Length == 0)
return default;
// Loop through all loaded assemblies
Type modelType = genericArgs[0];
foreach (var assembly in AppDomain.CurrentDomain.GetAssemblies())
{
// If the assembly is invalid
if (assembly == null)
return default;
// If not all types can be loaded, use the ones that could be
Type?[] assemblyTypes = [];
try
{
assemblyTypes = assembly.GetTypes();
}
catch (ReflectionTypeLoadException rtle)
{
assemblyTypes = rtle.Types ?? [];
}
// Loop through all types
foreach (Type? type in assemblyTypes)
{
// If the type is invalid
if (type == null)
continue;
// If the type isn't a class
if (!type.IsClass)
continue;
// If the type doesn't implement the interface
var interfaceType = type.GetInterface(serializerName);
if (interfaceType == null)
continue;
// If the interface doesn't use the correct type parameter
var genericTypes = interfaceType.GetGenericArguments();
if (genericTypes.Length != 1 || genericTypes[0] != modelType)
continue;
// Try to create a concrete instance of the type
var instance = (TSerializer?)Activator.CreateInstance(type);
if (instance != null)
return instance;
}
}
return default;
}
#endregion
}
}

View File

@@ -57,8 +57,10 @@ namespace SabreTools.Serialization.Serializers
if (stream == null)
return false;
using var fs = File.OpenWrite(path);
using var fs = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
stream.CopyTo(fs);
fs.Flush();
return true;
}
@@ -270,7 +272,11 @@ namespace SabreTools.Serialization.Serializers
writer.WriteRequiredAttributeString("name", rom.Name, throwOnError: true);
writer.WriteRequiredAttributeString("size", rom.Size, throwOnError: true);
writer.WriteOptionalAttributeString("crc", rom.CRC);
writer.WriteOptionalAttributeString("md2", rom.MD2);
writer.WriteOptionalAttributeString("md4", rom.MD4);
writer.WriteOptionalAttributeString("md5", rom.MD5);
writer.WriteOptionalAttributeString("ripemd128", rom.RIPEMD128);
writer.WriteOptionalAttributeString("ripemd160", rom.RIPEMD160);
writer.WriteOptionalAttributeString("sha1", rom.SHA1);
writer.WriteOptionalAttributeString("sha256", rom.SHA256);
writer.WriteOptionalAttributeString("sha384", rom.SHA384);

View File

@@ -59,8 +59,10 @@ namespace SabreTools.Serialization.Serializers
if (stream == null)
return false;
using var fs = System.IO.File.OpenWrite(path);
using var fs = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
stream.CopyTo(fs);
fs.Flush();
return true;
}
@@ -110,6 +112,12 @@ namespace SabreTools.Serialization.Serializers
case HashType.MD5:
WriteMD5(obj.MD5, writer);
break;
case HashType.RIPEMD128:
WriteRIPEMD128(obj.RIPEMD128, writer);
break;
case HashType.RIPEMD160:
WriteRIPEMD160(obj.RIPEMD160, writer);
break;
case HashType.SHA1:
WriteSHA1(obj.SHA1, writer);
break;
@@ -222,6 +230,50 @@ namespace SabreTools.Serialization.Serializers
}
}
/// <summary>
/// Write RIPEMD128 information to the current writer
/// </summary>
/// <param name="ripemd128s">Array of RIPEMD128 objects representing the files</param>
/// <param name="writer">SeparatedValueWriter representing the output</param>
private static void WriteRIPEMD128(RIPEMD128[]? ripemd128s, SeparatedValueWriter writer)
{
// If the item information is missing, we can't do anything
if (ripemd128s == null || ripemd128s.Length == 0)
return;
// Loop through and write out the items
foreach (var ripemd128 in ripemd128s)
{
if (string.IsNullOrEmpty(ripemd128.Hash) || string.IsNullOrEmpty(ripemd128.File))
continue;
writer.WriteValues([ripemd128.Hash!, ripemd128.File!]);
writer.Flush();
}
}
/// <summary>
/// Write RIPEMD160 information to the current writer
/// </summary>
/// <param name="ripemd160s">Array of RIPEMD160 objects representing the files</param>
/// <param name="writer">SeparatedValueWriter representing the output</param>
private static void WriteRIPEMD160(RIPEMD160[]? ripemd160s, SeparatedValueWriter writer)
{
// If the item information is missing, we can't do anything
if (ripemd160s == null || ripemd160s.Length == 0)
return;
// Loop through and write out the items
foreach (var ripemd160 in ripemd160s)
{
if (string.IsNullOrEmpty(ripemd160.Hash) || string.IsNullOrEmpty(ripemd160.File))
continue;
writer.WriteValues([ripemd160.Hash!, ripemd160.File!]);
writer.Flush();
}
}
/// <summary>
/// Write SHA1 information to the current writer
/// </summary>

View File

@@ -59,8 +59,9 @@ namespace SabreTools.Serialization.Serializers
if (stream == null)
return false;
using var fs = File.OpenWrite(path);
using var fs = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
stream.CopyTo(fs);
fs.Flush();
return true;
}

View File

@@ -100,8 +100,10 @@ namespace SabreTools.Serialization.Serializers
if (stream == null)
return false;
using var fs = File.OpenWrite(path);
using var fs = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
stream.CopyTo(fs);
fs.Flush();
return true;
}

View File

@@ -64,8 +64,9 @@ namespace SabreTools.Serialization.Serializers
if (stream == null)
return false;
using var fs = File.OpenWrite(path);
using var fs = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
stream.CopyTo(fs);
fs.Flush();
return true;
}

View File

@@ -1,11 +1,10 @@
using System;
using System.IO;
using SabreTools.IO.Extensions;
using SabreTools.Matching;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Matching.Extensions;
using SabreTools.Serialization.Wrappers;
namespace SabreTools.Serialization.Wrappers
namespace SabreTools.Serialization
{
public static class WrapperFactory
{
@@ -30,7 +29,7 @@ namespace SabreTools.Serialization.Wrappers
WrapperType.IniFile => null,// TODO: Implement wrapper
WrapperType.InstallShieldArchiveV3 => InstallShieldArchiveV3.Create(data),
WrapperType.InstallShieldCAB => InstallShieldCabinet.Create(data),
WrapperType.LDSCRYPT => null,// TODO: Implement wrapper
WrapperType.LDSCRYPT => LDSCRYPT.Create(data),
WrapperType.LZKWAJ => LZKWAJ.Create(data),
WrapperType.LZQBasic => LZQBasic.Create(data),
WrapperType.LZSZDD => LZSZDD.Create(data),
@@ -47,11 +46,11 @@ namespace SabreTools.Serialization.Wrappers
WrapperType.PlayJPlaylist => PlayJPlaylist.Create(data),
WrapperType.Quantum => Quantum.Create(data),
WrapperType.RAR => RAR.Create(data),
WrapperType.RealArcadeInstaller => null,// TODO: Implement wrapper
WrapperType.RealArcadeMezzanine => null,// TODO: Implement wrapper
WrapperType.RealArcadeInstaller => RealArcadeInstaller.Create(data),
WrapperType.RealArcadeMezzanine => RealArcadeMezzanine.Create(data),
WrapperType.SecuROMDFA => SecuROMDFA.Create(data),
WrapperType.SevenZip => SevenZip.Create(data),
WrapperType.SFFS => null,// TODO: Implement wrapper
WrapperType.SFFS => SFFS.Create(data),
WrapperType.SGA => SGA.Create(data),
WrapperType.TapeArchive => TapeArchive.Create(data),
WrapperType.Textfile => null,// TODO: Implement wrapper
@@ -158,10 +157,9 @@ namespace SabreTools.Serialization.Wrappers
#endregion
// TODO: Use constants from Models here
#region BDPlusSVM
if (magic.StartsWith([0x42, 0x44, 0x53, 0x56, 0x4D, 0x5F, 0x43, 0x43]))
if (magic.StartsWith(Models.BDPlus.Constants.SignatureBytes))
return WrapperType.BDPlusSVM;
if (extension.Equals("svm", StringComparison.OrdinalIgnoreCase))
@@ -191,10 +189,9 @@ namespace SabreTools.Serialization.Wrappers
#endregion
// TODO: Use constants from Models here
#region BZip2
if (magic.StartsWith([0x42, 0x52, 0x68]))
if (magic.StartsWith(Models.BZip2.Constants.SignatureBytes))
return WrapperType.BZip2;
if (extension.Equals("bz2", StringComparison.OrdinalIgnoreCase))
@@ -229,10 +226,9 @@ namespace SabreTools.Serialization.Wrappers
#endregion
// TODO: Use constants from Models here
#region CHD
if (magic.StartsWith([0x4D, 0x43, 0x6F, 0x6D, 0x70, 0x72, 0x48, 0x44]))
if (magic.StartsWith(Models.CHD.Constants.SignatureBytes))
return WrapperType.CHD;
#endregion
@@ -298,10 +294,9 @@ namespace SabreTools.Serialization.Wrappers
#endregion
// TODO: Use constants from Models here
#region GZip
if (magic.StartsWith([Models.GZIP.Constants.ID1, Models.GZIP.Constants.ID2]))
if (magic.StartsWith(new byte[] { Models.GZIP.Constants.ID1, Models.GZIP.Constants.ID2 }))
return WrapperType.GZip;
if (extension.Equals("gz", StringComparison.OrdinalIgnoreCase))
@@ -316,10 +311,9 @@ namespace SabreTools.Serialization.Wrappers
#endregion
// TODO: Use constants from Models here
#region InstallShieldArchiveV3
if (magic.StartsWith([0x13, 0x5D, 0x65, 0x8C]))
if (magic.StartsWith(Models.InstallShieldArchiveV3.Constants.HeaderSignatureBytes))
return WrapperType.InstallShieldArchiveV3;
if (extension.Equals("z", StringComparison.OrdinalIgnoreCase))
@@ -336,10 +330,9 @@ namespace SabreTools.Serialization.Wrappers
#endregion
// TODO: Use constants from Models here
#region LDSCRYPT
if (magic.StartsWith([0x4C, 0x44, 0x53, 0x43, 0x52, 0x59, 0x50, 0x54]))
if (magic.StartsWith(Models.LDSCRYPT.Constants.SignatureBytes))
return WrapperType.LDSCRYPT;
#endregion
@@ -439,6 +432,10 @@ namespace SabreTools.Serialization.Wrappers
// TODO: Use constants from Models here
#region PFF
// Version 0
if (magic.StartsWith([0x14, 0x00, 0x00, 0x00, 0x50, 0x46, 0x46, 0x30]))
return WrapperType.PFF;
// Version 2
if (magic.StartsWith([0x14, 0x00, 0x00, 0x00, 0x50, 0x46, 0x46, 0x32]))
return WrapperType.PFF;
@@ -589,15 +586,14 @@ namespace SabreTools.Serialization.Wrappers
#endregion
// TODO: Use constants from Models here
#region RAR
// RAR archive version 1.50 onwards
if (magic.StartsWith([0x52, 0x61, 0x72, 0x21, 0x1a, 0x07, 0x00]))
if (magic.StartsWith(Models.RAR.Constants.OldSignatureBytes))
return WrapperType.RAR;
// RAR archive version 5.0 onwards
if (magic.StartsWith([0x52, 0x61, 0x72, 0x21, 0x1a, 0x07, 0x01, 0x00]))
if (magic.StartsWith(Models.RAR.Constants.NewSignatureBytes))
return WrapperType.RAR;
if (extension.Equals("rar", StringComparison.OrdinalIgnoreCase))
@@ -605,17 +601,16 @@ namespace SabreTools.Serialization.Wrappers
#endregion
// TODO: Use constants from Models here
#region RealArcade
// RASGI2.0
// Found in the ".rgs files in IA item "Nova_RealArcadeCD_USA".
if (magic.StartsWith([0x52, 0x41, 0x53, 0x47, 0x49, 0x32, 0x2E, 0x30]))
if (magic.StartsWith(Models.RealArcades.Constants.RgsSignatureBytes))
return WrapperType.RealArcadeInstaller;
// XZip2.0
// Found in the ".mez" files in IA item "Nova_RealArcadeCD_USA".
if (magic.StartsWith([0x58, 0x5A, 0x69, 0x70, 0x32, 0x2E, 0x30]))
if (magic.StartsWith(Models.RealArcades.Constants.MezzanineSignatureBytes))
return WrapperType.RealArcadeMezzanine;
#endregion
@@ -627,10 +622,9 @@ namespace SabreTools.Serialization.Wrappers
#endregion
// TODO: Use constants from Models here
#region SevenZip
if (magic.StartsWith([0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c]))
if (magic.StartsWith(Models.SevenZip.Constants.SignatureBytes))
return WrapperType.SevenZip;
if (extension.Equals("7z", StringComparison.OrdinalIgnoreCase))
@@ -641,7 +635,7 @@ namespace SabreTools.Serialization.Wrappers
#region SFFS
// Found in Redump entry 81756, confirmed to be "StarForce Filesystem" by PiD.
if (magic.StartsWith(Models.SFFS.Constants.SignatureBytes))
if (magic.StartsWith(Models.StarForce.Constants.SignatureBytes))
return WrapperType.SFFS;
#endregion
@@ -704,6 +698,14 @@ namespace SabreTools.Serialization.Wrappers
if (magic.StartsWith([0x3C, 0x3F, 0x78, 0x6D, 0x6C]))
return WrapperType.Textfile;
// "<?xml" in UTF-16 encoding
if (magic.StartsWith([0x3C, 0x00, 0x3F, 0x00, 0x78, 0x00, 0x6D, 0x00, 0x6C, 0x00]))
return WrapperType.Textfile;
// "<?xml" in UTF-16 encoding with byte order marks
if (magic.StartsWith([0xFF, 0xFE, 0x3C, 0x00, 0x3F, 0x00, 0x78, 0x00, 0x6D, 0x00, 0x6C, 0x00]))
return WrapperType.Textfile;
// "Description in Zip"
if (extension.Equals("diz", StringComparison.OrdinalIgnoreCase))
return WrapperType.Textfile;
@@ -785,10 +787,9 @@ namespace SabreTools.Serialization.Wrappers
#endregion
// TODO: Use constants from Models here
#region XZ
if (magic.StartsWith([0xfd, 0x37, 0x7a, 0x58, 0x5a, 0x00]))
if (magic.StartsWith(Models.XZ.Constants.SignatureBytes))
return WrapperType.XZ;
if (extension.Equals("xz", StringComparison.OrdinalIgnoreCase))

View File

@@ -24,18 +24,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public AACSMediaKeyBlock(MediaKeyBlock? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public AACSMediaKeyBlock(MediaKeyBlock model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public AACSMediaKeyBlock(MediaKeyBlock? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public AACSMediaKeyBlock(MediaKeyBlock model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public AACSMediaKeyBlock(MediaKeyBlock model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public AACSMediaKeyBlock(MediaKeyBlock model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public AACSMediaKeyBlock(MediaKeyBlock model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public AACSMediaKeyBlock(MediaKeyBlock model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create an AACS media key block from a byte array and offset
@@ -74,12 +82,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.AACS.DeserializeStream(data);
var model = new Deserializers.AACS().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new AACSMediaKeyBlock(model, data);
return new AACSMediaKeyBlock(model, data, currentOffset);
}
catch
{

View File

@@ -24,18 +24,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public BDPlusSVM(SVM? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public BDPlusSVM(SVM model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public BDPlusSVM(SVM? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public BDPlusSVM(SVM model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public BDPlusSVM(SVM model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public BDPlusSVM(SVM model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public BDPlusSVM(SVM model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public BDPlusSVM(SVM model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create a BD+ SVM from a byte array and offset
@@ -74,12 +82,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.BDPlus.DeserializeStream(data);
var model = new Deserializers.BDPlus().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new BDPlusSVM(model, data);
return new BDPlusSVM(model, data, currentOffset);
}
catch
{

View File

@@ -0,0 +1,111 @@
using System;
using System.IO;
using SabreTools.IO.Compression.Deflate;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public partial class BFPK : IExtractable
{
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// If we have no files
if (Files == null || Files.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < Files.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory, includeDebug);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the BFPK to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory, bool includeDebug)
{
// If we have no files
if (Files == null || Files.Length == 0)
return false;
// If we have an invalid index
if (index < 0 || index >= Files.Length)
return false;
// Get the file information
var file = Files[index];
if (file == null)
return false;
// Get the read index and length
int offset = file.Offset + 4;
int compressedSize = file.CompressedSize;
// Some files can lack the length prefix
if (compressedSize > Length)
{
offset -= 4;
compressedSize = file.UncompressedSize;
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Ensure directory separators are consistent
string filename = file.Name ?? $"file{index}";
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.None);
// Read the data block
var data = ReadRangeFromSource(offset, compressedSize);
if (data.Length == 0)
return false;
// If we have uncompressed data
if (compressedSize == file.UncompressedSize)
{
fs.Write(data, 0, compressedSize);
fs.Flush();
}
else
{
using MemoryStream ms = new MemoryStream(data);
using ZlibStream zs = new ZlibStream(ms, CompressionMode.Decompress);
zs.CopyTo(fs);
fs.Flush();
}
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return true;
}
}
}

View File

@@ -1,13 +1,9 @@
using System;
using System.IO;
using SabreTools.IO.Compression.Deflate;
using SabreTools.IO.Extensions;
using System.IO;
using SabreTools.Models.BFPK;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public class BFPK : WrapperBase<Archive>, IExtractable
public partial class BFPK : WrapperBase<Archive>
{
#region Descriptive Properties
@@ -26,18 +22,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public BFPK(Archive? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public BFPK(Archive model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public BFPK(Archive? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public BFPK(Archive model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public BFPK(Archive model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public BFPK(Archive model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public BFPK(Archive model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public BFPK(Archive model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create a BFPK archive from a byte array and offset
@@ -76,12 +80,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.BFPK.DeserializeStream(data);
var model = new Deserializers.BFPK().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new BFPK(model, data);
return new BFPK(model, data, currentOffset);
}
catch
{
@@ -90,110 +93,5 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
#region Extraction
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// If we have no files
if (Files == null || Files.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < Files.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory, includeDebug);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the BFPK to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory, bool includeDebug)
{
// If we have no files
if (Files == null || Files.Length == 0)
return false;
// If we have an invalid index
if (index < 0 || index >= Files.Length)
return false;
// Get the file information
var file = Files[index];
if (file == null)
return false;
// Get the read index and length
int offset = file.Offset + 4;
int compressedSize = file.CompressedSize;
// Some files can lack the length prefix
if (compressedSize > Length)
{
offset -= 4;
compressedSize = file.UncompressedSize;
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Ensure directory separators are consistent
string filename = file.Name ?? $"file{index}";
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using FileStream fs = File.OpenWrite(filename);
// Read the data block
var data = _dataSource.ReadFrom(offset, compressedSize, retainPosition: true);
if (data == null)
return false;
// If we have uncompressed data
if (compressedSize == file.UncompressedSize)
{
fs.Write(data, 0, compressedSize);
fs.Flush();
}
else
{
using MemoryStream ms = new MemoryStream(data);
using ZlibStream zs = new ZlibStream(ms, CompressionMode.Decompress);
zs.CopyTo(fs);
fs.Flush();
}
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return true;
}
#endregion
}
}

View File

@@ -0,0 +1,95 @@
using System;
using System.IO;
using SabreTools.Models.BSP;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public partial class BSP : IExtractable
{
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// If we have no lumps
if (Lumps == null || Lumps.Length == 0)
return false;
// Loop through and extract all lumps to the output
bool allExtracted = true;
for (int i = 0; i < Lumps.Length; i++)
{
allExtracted &= ExtractLump(i, outputDirectory, includeDebug);
}
return allExtracted;
}
/// <summary>
/// Extract a lump from the BSP to an output directory by index
/// </summary>
/// <param name="index">Lump index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>True if the lump extracted, false otherwise</returns>
public bool ExtractLump(int index, string outputDirectory, bool includeDebug)
{
// If we have no lumps
if (Lumps == null || Lumps.Length == 0)
return false;
// If the lumps index is invalid
if (index < 0 || index >= Lumps.Length)
return false;
// Read the data
var lump = Lumps[index];
var data = ReadRangeFromSource(lump.Offset, lump.Length);
if (data.Length == 0)
return false;
// Create the filename
string filename = $"lump_{index}.bin";
switch ((LumpType)index)
{
case LumpType.LUMP_ENTITIES:
filename = "entities.ent";
break;
case LumpType.LUMP_TEXTURES:
filename = "texture_data.bin";
break;
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Ensure directory separators are consistent
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.None);
fs.Write(data, 0, data.Length);
fs.Flush();
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return true;
}
}
}

View File

@@ -1,12 +1,9 @@
using System;
using System.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.BSP;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public class BSP : WrapperBase<BspFile>, IExtractable
public partial class BSP : WrapperBase<BspFile>
{
#region Descriptive Properties
@@ -25,18 +22,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public BSP(BspFile? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public BSP(BspFile model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public BSP(BspFile? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public BSP(BspFile model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public BSP(BspFile model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public BSP(BspFile model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public BSP(BspFile model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public BSP(BspFile model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create a BSP from a byte array and offset
@@ -75,12 +80,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.BSP.DeserializeStream(data);
var model = new Deserializers.BSP().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new BSP(model, data);
return new BSP(model, data, currentOffset);
}
catch
{
@@ -89,94 +93,5 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
#region Extraction
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// If we have no lumps
if (Lumps == null || Lumps.Length == 0)
return false;
// Loop through and extract all lumps to the output
bool allExtracted = true;
for (int i = 0; i < Lumps.Length; i++)
{
allExtracted &= ExtractLump(i, outputDirectory, includeDebug);
}
return allExtracted;
}
/// <summary>
/// Extract a lump from the BSP to an output directory by index
/// </summary>
/// <param name="index">Lump index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>True if the lump extracted, false otherwise</returns>
public bool ExtractLump(int index, string outputDirectory, bool includeDebug)
{
// If we have no lumps
if (Lumps == null || Lumps.Length == 0)
return false;
// If the lumps index is invalid
if (index < 0 || index >= Lumps.Length)
return false;
// Read the data
var lump = Lumps[index];
var data = _dataSource.ReadFrom(lump.Offset, lump.Length, retainPosition: true);
if (data == null)
return false;
// Create the filename
string filename = $"lump_{index}.bin";
switch ((LumpType)index)
{
case LumpType.LUMP_ENTITIES:
filename = "entities.ent";
break;
case LumpType.LUMP_TEXTURES:
filename = "texture_data.bin";
break;
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Ensure directory separators are consistent
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = File.OpenWrite(filename);
fs.Write(data, 0, data.Length);
fs.Flush();
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return true;
}
#endregion
}
}

View File

@@ -0,0 +1,53 @@
using System;
using System.IO;
using SabreTools.IO.Compression.BZip2;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
/// <summary>
/// This is a shell wrapper; one that does not contain
/// any actual parsing. It is used as a placeholder for
/// types that typically do not have models.
/// </summary>
public partial class BZip2 : IExtractable
{
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
if (_dataSource == null || !_dataSource.CanRead)
return false;
try
{
// Try opening the stream
using var bz2File = new BZip2InputStream(_dataSource, true);
// Ensure directory separators are consistent
string filename = Guid.NewGuid().ToString();
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Extract the file
using var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.None);
bz2File.CopyTo(fs);
fs.Flush();
return true;
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
}
}
}

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using SabreTools.IO.Compression.BZip2;
using SabreTools.Serialization.Interfaces;
using SabreTools.Models.BZip2;
namespace SabreTools.Serialization.Wrappers
{
@@ -10,7 +8,7 @@ namespace SabreTools.Serialization.Wrappers
/// any actual parsing. It is used as a placeholder for
/// types that typically do not have models.
/// </summary>
public class BZip2 : WrapperBase, IExtractable
public partial class BZip2 : WrapperBase<Archive>
{
#region Descriptive Properties
@@ -22,18 +20,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public BZip2(byte[]? data, int offset)
: base(data, offset)
{
// All logic is handled by the base class
}
public BZip2(Archive model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public BZip2(Stream? data)
: base(data)
{
// All logic is handled by the base class
}
public BZip2(Archive model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public BZip2(Archive model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public BZip2(Archive model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public BZip2(Archive model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public BZip2(Archive model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create a BZip2 archive from a byte array and offset
@@ -67,7 +73,7 @@ namespace SabreTools.Serialization.Wrappers
if (data == null || !data.CanRead)
return null;
return new BZip2(data);
return new BZip2(new Archive(), data);
}
#endregion
@@ -76,52 +82,10 @@ namespace SabreTools.Serialization.Wrappers
#if NETCOREAPP
/// <inheritdoc/>
public override string ExportJSON() => throw new NotImplementedException();
public override string ExportJSON() => throw new System.NotImplementedException();
#endif
#endregion
#region Extraction
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
if (_dataSource == null || !_dataSource.CanRead)
return false;
try
{
// Try opening the stream
using var bz2File = new BZip2InputStream(_dataSource, true);
// Ensure directory separators are consistent
string filename = Guid.NewGuid().ToString();
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Extract the file
using FileStream fs = File.OpenWrite(filename);
bz2File.CopyTo(fs);
fs.Flush();
return true;
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
}
#endregion
}
}

View File

@@ -0,0 +1,135 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.CFB;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public partial class CFB : IExtractable
{
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// If we have no files
if (DirectoryEntries == null || DirectoryEntries.Length == 0)
return false;
// Loop through and extract all directory entries to the output
bool allExtracted = true;
for (int i = 0; i < DirectoryEntries.Length; i++)
{
allExtracted &= ExtractEntry(i, outputDirectory, includeDebug);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the CFB to an output directory by index
/// </summary>
/// <param name="index">Entry index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractEntry(int index, string outputDirectory, bool includeDebug)
{
// If we have no entries
if (DirectoryEntries == null || DirectoryEntries.Length == 0)
return false;
// If we have an invalid index
if (index < 0 || index >= DirectoryEntries.Length)
return false;
// Get the entry information
var entry = DirectoryEntries[index];
if (entry == null)
return false;
// Only try to extract stream objects
if (entry.ObjectType != ObjectType.StreamObject)
return true;
// Get the entry data
byte[]? data = GetDirectoryEntryData(entry);
if (data == null)
return false;
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Ensure the output filename is trimmed
string filename = entry.Name ?? $"entry{index}";
byte[] nameBytes = Encoding.UTF8.GetBytes(filename);
if (nameBytes[0] == 0xe4 && nameBytes[1] == 0xa1 && nameBytes[2] == 0x80)
filename = Encoding.UTF8.GetString(nameBytes, 3, nameBytes.Length - 3);
foreach (char c in Path.GetInvalidFileNameChars())
{
filename = filename.Replace(c, '_');
}
// Ensure directory separators are consistent
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.None);
fs.Write(data);
fs.Flush();
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return true;
}
/// <summary>
/// Read the entry data for a single directory entry, if possible
/// </summary>
/// <param name="entry">Entry to try to retrieve data for</param>
/// <returns>Byte array representing the entry data on success, null otherwise</returns>
private byte[]? GetDirectoryEntryData(DirectoryEntry entry)
{
// If the CFB is invalid
if (Header == null)
return null;
// Only try to extract stream objects
if (entry.ObjectType != ObjectType.StreamObject)
return null;
// Determine which FAT is being used
bool miniFat = entry.StreamSize < Header.MiniStreamCutoffSize;
// Get the chain data
var chain = miniFat
? GetMiniFATSectorChainData((SectorNumber)entry.StartingSectorLocation)
: GetFATSectorChainData((SectorNumber)entry.StartingSectorLocation);
if (chain == null)
return null;
// Return only the proper amount of data
byte[] data = new byte[entry.StreamSize];
Array.Copy(chain, 0, data, 0, (int)Math.Min(chain.Length, (long)entry.StreamSize));
return data;
}
}
}

View File

@@ -1,14 +1,12 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.CFB;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public class CFB : WrapperBase<Binary>, IExtractable
public partial class CFB : WrapperBase<Binary>
{
#region Descriptive Properties
@@ -71,18 +69,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public CFB(Binary? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public CFB(Binary model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public CFB(Binary? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public CFB(Binary model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public CFB(Binary model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public CFB(Binary model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public CFB(Binary model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public CFB(Binary model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create a Compound File Binary from a byte array and offset
@@ -121,12 +127,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.CFB.DeserializeStream(data);
var model = new Deserializers.CFB().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new CFB(model, data);
return new CFB(model, data, currentOffset);
}
catch
{
@@ -136,133 +141,6 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region Extraction
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// If we have no files
if (DirectoryEntries == null || DirectoryEntries.Length == 0)
return false;
// Loop through and extract all directory entries to the output
bool allExtracted = true;
for (int i = 0; i < DirectoryEntries.Length; i++)
{
allExtracted &= ExtractEntry(i, outputDirectory, includeDebug);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the CFB to an output directory by index
/// </summary>
/// <param name="index">Entry index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractEntry(int index, string outputDirectory, bool includeDebug)
{
// If we have no entries
if (DirectoryEntries == null || DirectoryEntries.Length == 0)
return false;
// If we have an invalid index
if (index < 0 || index >= DirectoryEntries.Length)
return false;
// Get the entry information
var entry = DirectoryEntries[index];
if (entry == null)
return false;
// Only try to extract stream objects
if (entry.ObjectType != ObjectType.StreamObject)
return true;
// Get the entry data
byte[]? data = GetDirectoryEntryData(entry);
if (data == null)
return false;
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Ensure the output filename is trimmed
string filename = entry.Name ?? $"entry{index}";
byte[] nameBytes = Encoding.UTF8.GetBytes(filename);
if (nameBytes[0] == 0xe4 && nameBytes[1] == 0xa1 && nameBytes[2] == 0x80)
filename = Encoding.UTF8.GetString(nameBytes, 3, nameBytes.Length - 3);
foreach (char c in Path.GetInvalidFileNameChars())
{
filename = filename.Replace(c, '_');
}
// Ensure directory separators are consistent
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using FileStream fs = File.OpenWrite(filename);
fs.Write(data);
fs.Flush();
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return true;
}
/// <summary>
/// Read the entry data for a single directory entry, if possible
/// </summary>
/// <param name="entry">Entry to try to retrieve data for</param>
/// <returns>Byte array representing the entry data on success, null otherwise</returns>
private byte[]? GetDirectoryEntryData(DirectoryEntry entry)
{
// If the CFB is invalid
if (Header == null)
return null;
// Only try to extract stream objects
if (entry.ObjectType != ObjectType.StreamObject)
return null;
// Determine which FAT is being used
bool miniFat = entry.StreamSize < Header.MiniStreamCutoffSize;
// Get the chain data
var chain = miniFat
? GetMiniFATSectorChainData((SectorNumber)entry.StartingSectorLocation)
: GetFATSectorChainData((SectorNumber)entry.StartingSectorLocation);
if (chain == null)
return null;
// Return only the proper amount of data
byte[] data = new byte[entry.StreamSize];
Array.Copy(chain, 0, data, 0, (int)Math.Min(chain.Length, (long)entry.StreamSize));
return data;
}
#endregion
#region FAT Sector Data
/// <summary>
@@ -322,8 +200,8 @@ namespace SabreTools.Serialization.Wrappers
return null;
// Try to read the sector data
var sectorData = _dataSource.ReadFrom(sectorDataOffset, (int)SectorSize, retainPosition: true);
if (sectorData == null)
var sectorData = ReadRangeFromSource(sectorDataOffset, (int)SectorSize);
if (sectorData.Length == 0)
return null;
// Add the sector data to the output

View File

@@ -60,18 +60,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public CHD(Header? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public CHD(Header model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public CHD(Header? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public CHD(Header model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public CHD(Header model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public CHD(Header model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public CHD(Header model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public CHD(Header model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create a CHD header from a byte array and offset
@@ -110,12 +118,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.CHD.DeserializeStream(data);
var model = new Deserializers.CHD().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new CHD(model, data);
return new CHD(model, data, currentOffset);
}
catch
{

View File

@@ -15,18 +15,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public CIA(Models.N3DS.CIA? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public CIA(Models.N3DS.CIA model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public CIA(Models.N3DS.CIA? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public CIA(Models.N3DS.CIA model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public CIA(Models.N3DS.CIA model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public CIA(Models.N3DS.CIA model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public CIA(Models.N3DS.CIA model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public CIA(Models.N3DS.CIA model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create a CIA archive from a byte array and offset
@@ -65,12 +73,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.CIA.DeserializeStream(data);
var model = new Deserializers.CIA().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new CIA(model, data);
return new CIA(model, data, currentOffset);
}
catch
{

View File

@@ -0,0 +1,114 @@
using System;
using System.Collections.Generic;
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public partial class GCF : IExtractable
{
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// If we have no files
if (Files == null || Files.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < Files.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory, includeDebug);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the GCF to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory, bool includeDebug)
{
// If we have no files
if (Files == null || Files.Length == 0 || DataBlockOffsets == null)
return false;
// If the files index is invalid
if (index < 0 || index >= Files.Length)
return false;
// Get the file
var file = Files[index];
if (file?.BlockEntries == null || file.Size == 0)
return false;
// If the file is encrypted -- TODO: Revisit later
if (file.Encrypted)
return false;
// Get all data block offsets needed for extraction
var dataBlockOffsets = new List<long>();
for (int i = 0; i < file.BlockEntries.Length; i++)
{
var blockEntry = file.BlockEntries[i];
uint dataBlockIndex = blockEntry.FirstDataBlockIndex;
long blockEntrySize = blockEntry.FileDataSize;
while (blockEntrySize > 0)
{
long dataBlockOffset = DataBlockOffsets[dataBlockIndex++];
dataBlockOffsets.Add(dataBlockOffset);
blockEntrySize -= BlockSize;
}
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Ensure directory separators are consistent
string filename = file.Path ?? $"file{index}";
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.None);
// Now read the data sequentially and write out while we have data left
long fileSize = file.Size;
for (int i = 0; i < dataBlockOffsets.Count; i++)
{
int readSize = (int)Math.Min(BlockSize, fileSize);
var data = ReadRangeFromSource((int)dataBlockOffsets[i], readSize);
if (data.Length == 0)
return false;
fs.Write(data, 0, data.Length);
fs.Flush();
}
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return true;
}
}
}

View File

@@ -1,12 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
using SabreTools.IO.Extensions;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public class GCF : WrapperBase<Models.GCF.File>, IExtractable
public partial class GCF : WrapperBase<Models.GCF.File>
{
#region Descriptive Properties
@@ -167,18 +164,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public GCF(Models.GCF.File? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public GCF(Models.GCF.File model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public GCF(Models.GCF.File? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public GCF(Models.GCF.File model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public GCF(Models.GCF.File model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public GCF(Models.GCF.File model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public GCF(Models.GCF.File model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public GCF(Models.GCF.File model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create an GCF from a byte array and offset
@@ -217,12 +222,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.GCF.DeserializeStream(data);
var model = new Deserializers.GCF().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new GCF(model, data);
return new GCF(model, data, currentOffset);
}
catch
{
@@ -232,114 +236,6 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region Extraction
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// If we have no files
if (Files == null || Files.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < Files.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory, includeDebug);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the GCF to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory, bool includeDebug)
{
// If we have no files
if (Files == null || Files.Length == 0 || DataBlockOffsets == null)
return false;
// If the files index is invalid
if (index < 0 || index >= Files.Length)
return false;
// Get the file
var file = Files[index];
if (file?.BlockEntries == null || file.Size == 0)
return false;
// If the file is encrypted -- TODO: Revisit later
if (file.Encrypted)
return false;
// Get all data block offsets needed for extraction
var dataBlockOffsets = new List<long>();
for (int i = 0; i < file.BlockEntries.Length; i++)
{
var blockEntry = file.BlockEntries[i];
uint dataBlockIndex = blockEntry.FirstDataBlockIndex;
long blockEntrySize = blockEntry.FileDataSize;
while (blockEntrySize > 0)
{
long dataBlockOffset = DataBlockOffsets[dataBlockIndex++];
dataBlockOffsets.Add(dataBlockOffset);
blockEntrySize -= BlockSize;
}
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Ensure directory separators are consistent
string filename = file.Path ?? $"file{index}";
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = File.OpenWrite(filename);
// Now read the data sequentially and write out while we have data left
long fileSize = file.Size;
for (int i = 0; i < dataBlockOffsets.Count; i++)
{
int readSize = (int)Math.Min(BlockSize, fileSize);
var data = _dataSource.ReadFrom((int)dataBlockOffsets[i], readSize, retainPosition: true);
if (data == null)
return false;
fs.Write(data, 0, data.Length);
fs.Flush();
}
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return true;
}
#endregion
#region Helper Classes
/// <summary>

View File

@@ -0,0 +1,71 @@
using System;
using System.IO;
using SabreTools.IO.Compression.Deflate;
using SabreTools.Models.GZIP;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public partial class GZip : IExtractable
{
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// Ensure there is data to extract
if (Header == null || DataOffset < 0)
{
if (includeDebug) Console.Error.WriteLine("Invalid archive detected, skipping...");
return false;
}
// Ensure that DEFLATE is being used
if (Header.CompressionMethod != CompressionMethod.Deflate)
{
if (includeDebug) Console.Error.WriteLine($"Invalid compression method {Header.CompressionMethod} detected, only DEFLATE is supported. Skipping...");
return false;
}
try
{
// Seek to the start of the compressed data
long offset = _dataSource.Seek(DataOffset, SeekOrigin.Begin);
if (offset != DataOffset)
{
if (includeDebug) Console.Error.WriteLine($"Could not seek to compressed data at {DataOffset}");
return false;
}
// Ensure directory separators are consistent
string filename = Header.OriginalFileName
?? (Filename != null ? Path.GetFileName(Filename).Replace(".gz", string.Empty) : null)
?? $"extracted_file";
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Open the source as a DEFLATE stream
var deflateStream = new DeflateStream(_dataSource, CompressionMode.Decompress, leaveOpen: true);
// Write the file
using var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.None);
deflateStream.CopyTo(fs);
fs.Flush();
return true;
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
}
}
}

View File

@@ -1,12 +1,9 @@
using System;
using System.IO;
using SabreTools.IO.Compression.Deflate;
using SabreTools.Models.GZIP;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public class GZip : WrapperBase<Archive>, IExtractable
public partial class GZip : WrapperBase<Archive>
{
#region Descriptive Properties
@@ -74,18 +71,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public GZip(Archive? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public GZip(Archive model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public GZip(Archive? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public GZip(Archive model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public GZip(Archive model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public GZip(Archive model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public GZip(Archive model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public GZip(Archive model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create a GZip archive from a byte array and offset
@@ -124,12 +129,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.GZip.DeserializeStream(data);
var model = new Deserializers.GZip().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new GZip(model, data);
return new GZip(model, data, currentOffset);
}
catch
{
@@ -138,69 +142,5 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
#region Extraction
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// Ensure there is data to extract
if (Header == null || DataOffset < 0)
{
if (includeDebug) Console.Error.WriteLine("Invalid archive detected, skipping...");
return false;
}
// Ensure that DEFLATE is being used
if (Header.CompressionMethod != CompressionMethod.Deflate)
{
if (includeDebug) Console.Error.WriteLine($"Invalid compression method {Header.CompressionMethod} detected, only DEFLATE is supported. Skipping...");
return false;
}
try
{
// Seek to the start of the compressed data
long offset = _dataSource.Seek(DataOffset, SeekOrigin.Begin);
if (offset != DataOffset)
{
if (includeDebug) Console.Error.WriteLine($"Could not seek to compressed data at {DataOffset}");
return false;
}
// Ensure directory separators are consistent
string filename = Header.OriginalFileName
?? (Filename != null ? Path.GetFileName(Filename).Replace(".gz", string.Empty) : null)
?? $"extracted_file";
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Open the source as a DEFLATE stream
var deflateStream = new DeflateStream(_dataSource, CompressionMode.Decompress, leaveOpen: true);
// Write the file
using var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.None);
deflateStream.CopyTo(fs);
fs.Flush();
return true;
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
}
#endregion
}
}

View File

@@ -14,18 +14,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public IRD(Models.IRD.File? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public IRD(Models.IRD.File model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public IRD(Models.IRD.File? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public IRD(Models.IRD.File model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public IRD(Models.IRD.File model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public IRD(Models.IRD.File model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public IRD(Models.IRD.File model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public IRD(Models.IRD.File model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create an IRD from a byte array and offset
@@ -64,12 +72,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.IRD.DeserializeStream(data);
var model = new Deserializers.IRD().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new IRD(model, data);
return new IRD(model, data, currentOffset);
}
catch
{

View File

@@ -0,0 +1,127 @@
using System;
using System.IO;
using SabreTools.IO.Compression.Blast;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
/// <remarks>
/// Reference (de)compressor: https://www.sac.sk/download/pack/icomp95.zip
/// </remarks>
/// <see href="https://github.com/wfr/unshieldv3"/>
public partial class InstallShieldArchiveV3 : IExtractable
{
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// Get the file count
int fileCount = Files.Length;
if (fileCount == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < fileCount; i++)
{
allExtracted &= ExtractFile(i, outputDirectory, includeDebug);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the ISAv3 to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory, bool includeDebug)
{
// If the files index is invalid
if (index < 0 || index >= FileCount)
return false;
// Get the file
var file = Files[index];
if (file == null)
return false;
// Create the filename
var filename = file.Name;
if (filename == null)
return false;
// Get the directory index
int dirIndex = FileDirMap[index];
if (dirIndex < 0 || dirIndex > DirCount)
return false;
// Get the directory name
var dirName = Directories[dirIndex].Name;
if (dirName != null)
filename = Path.Combine(dirName, filename);
// Get and adjust the file offset
long fileOffset = file.Offset + DataStart;
if (fileOffset < 0 || fileOffset >= Length)
return false;
// Get the file sizes
long fileSize = file.CompressedSize;
long outputFileSize = file.UncompressedSize;
// Read the compressed data directly
var compressedData = ReadRangeFromSource((int)fileOffset, (int)fileSize);
if (compressedData.Length == 0)
return false;
// If the compressed and uncompressed sizes match
byte[] data;
if (fileSize == outputFileSize)
{
data = compressedData;
}
else
{
// Decompress the data
var decomp = Decompressor.Create();
using var outData = new MemoryStream();
decomp.CopyTo(compressedData, outData);
data = outData.ToArray();
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Ensure directory separators are consistent
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !System.IO.Directory.Exists(directoryName))
System.IO.Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.None);
fs.Write(data, 0, data.Length);
fs.Flush();
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return false;
}
}
}

View File

@@ -1,10 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using SabreTools.IO.Compression.Blast;
using SabreTools.IO.Extensions;
using SabreTools.Models.InstallShieldArchiveV3;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
@@ -12,7 +8,7 @@ namespace SabreTools.Serialization.Wrappers
/// Reference (de)compressor: https://www.sac.sk/download/pack/icomp95.zip
/// </remarks>
/// <see href="https://github.com/wfr/unshieldv3"/>
public partial class InstallShieldArchiveV3 : WrapperBase<Archive>, IExtractable
public partial class InstallShieldArchiveV3 : WrapperBase<Archive>
{
#region Descriptive Properties
@@ -112,18 +108,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public InstallShieldArchiveV3(Archive? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public InstallShieldArchiveV3(Archive model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public InstallShieldArchiveV3(Archive? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public InstallShieldArchiveV3(Archive model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public InstallShieldArchiveV3(Archive model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public InstallShieldArchiveV3(Archive model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public InstallShieldArchiveV3(Archive model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public InstallShieldArchiveV3(Archive model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create an InstallShield Archive V3 from a byte array and offset
@@ -162,12 +166,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.InstallShieldArchiveV3.DeserializeStream(data);
var model = new Deserializers.InstallShieldArchiveV3().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new InstallShieldArchiveV3(model, data);
return new InstallShieldArchiveV3(model, data, currentOffset);
}
catch
{
@@ -176,122 +179,5 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
#region Extraction
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// Get the file count
int fileCount = Files.Length;
if (fileCount == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < fileCount; i++)
{
allExtracted &= ExtractFile(i, outputDirectory, includeDebug);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the ISAv3 to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <param name="includeDebug">True to include debug data, false otherwise</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory, bool includeDebug)
{
// If the files index is invalid
if (index < 0 || index >= FileCount)
return false;
// Get the file
var file = Files[index];
if (file == null)
return false;
// Create the filename
var filename = file.Name;
if (filename == null)
return false;
// Get the directory index
int dirIndex = FileDirMap[index];
if (dirIndex < 0 || dirIndex > DirCount)
return false;
// Get the directory name
var dirName = Directories[dirIndex].Name;
if (dirName != null)
filename = Path.Combine(dirName, filename);
// Get and adjust the file offset
long fileOffset = file.Offset + DataStart;
if (fileOffset < 0 || fileOffset >= Length)
return false;
// Get the file sizes
long fileSize = file.CompressedSize;
long outputFileSize = file.UncompressedSize;
// Read the compressed data directly
var compressedData = _dataSource.ReadFrom((int)fileOffset, (int)fileSize, retainPosition: true);
if (compressedData == null)
return false;
// If the compressed and uncompressed sizes match
byte[] data;
if (fileSize == outputFileSize)
{
data = compressedData;
}
else
{
// Decompress the data
var decomp = Decompressor.Create();
using var outData = new MemoryStream();
decomp.CopyTo(compressedData, outData);
data = outData.ToArray();
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Ensure directory separators are consistent
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !System.IO.Directory.Exists(directoryName))
System.IO.Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = System.IO.File.OpenWrite(filename);
fs.Write(data, 0, data.Length);
fs.Flush();
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return false;
}
#endregion
}
}

View File

@@ -0,0 +1,838 @@
using System;
using System.IO;
using System.Text.RegularExpressions;
using SabreTools.Hashing;
using SabreTools.IO.Compression.zlib;
using SabreTools.Models.InstallShieldCabinet;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.InstallShieldCabinet.Constants;
namespace SabreTools.Serialization.Wrappers
{
public partial class InstallShieldCabinet : WrapperBase<Cabinet>, IExtractable
{
#region Extension Properties
/// <summary>
/// Reference to the next cabinet header
/// </summary>
/// <remarks>Only used in multi-file</remarks>
public InstallShieldCabinet? Next { get; set; }
/// <summary>
/// Reference to the next previous header
/// </summary>
/// <remarks>Only used in multi-file</remarks>
public InstallShieldCabinet? Prev { get; set; }
/// <summary>
/// Volume index ID, 0 for headers
/// </summary>
/// <remarks>Only used in multi-file</remarks>
public ushort VolumeID { get; set; }
#endregion
#region Extraction State
/// <summary>
/// Base filename path for related CAB files
/// </summary>
internal string? FilenamePattern { get; set; }
#endregion
#region Constants
/// <summary>
/// Default buffer size
/// </summary>
private const int BUFFER_SIZE = 64 * 1024;
/// <summary>
/// Maximum size of the window in bits
/// </summary>
private const int MAX_WBITS = 15;
#endregion
#region Cabinet Set
/// <summary>
/// Open a cabinet set for reading, if possible
/// </summary>
/// <param name="pattern">Filename pattern for matching cabinet files</param>
/// <returns>Wrapper representing the set, null on error</returns>
public static InstallShieldCabinet? OpenSet(string? pattern)
{
// An invalid pattern means no cabinet files
if (string.IsNullOrEmpty(pattern))
return null;
// Create a placeholder wrapper for output
InstallShieldCabinet? set = null;
// Loop until there are no parts left
bool iterate = true;
InstallShieldCabinet? previous = null;
for (ushort i = 1; iterate; i++)
{
var file = OpenFileForReading(pattern, i, HEADER_SUFFIX);
if (file != null)
iterate = false;
else
file = OpenFileForReading(pattern, i, CABINET_SUFFIX);
if (file == null)
break;
var current = Create(file);
if (current == null)
break;
current.VolumeID = i;
if (previous != null)
{
previous.Next = current;
current.Prev = previous;
}
else
{
set = current;
previous = current;
}
}
// Set the pattern, if possible
if (set != null)
set.FilenamePattern = pattern;
return set;
}
/// <summary>
/// Open the numbered cabinet set volume
/// </summary>
/// <param name="volumeId">Volume ID, 1-indexed</param>
/// <returns>Wrapper representing the volume on success, null otherwise</returns>
public InstallShieldCabinet? OpenVolume(ushort volumeId, out Stream? volumeStream)
{
// Normalize the volume ID for odd cases
if (volumeId == ushort.MinValue || volumeId == ushort.MaxValue)
volumeId = 1;
// Try to open the file as a stream
volumeStream = OpenFileForReading(FilenamePattern, volumeId, CABINET_SUFFIX);
if (volumeStream == null)
{
Console.Error.WriteLine($"Failed to open input cabinet file {volumeId}");
return null;
}
// Try to parse the stream into a cabinet
var volume = Create(volumeStream);
if (volume == null)
{
Console.Error.WriteLine($"Failed to open input cabinet file {volumeId}");
return null;
}
// Set the volume ID and return
volume.VolumeID = volumeId;
return volume;
}
/// <summary>
/// Open a cabinet file for reading
/// </summary>
/// <param name="index">Cabinet part index to be opened</param>
/// <param name="suffix">Cabinet files suffix (e.g. `.cab`)</param>
/// <returns>A Stream representing the cabinet part, null on error</returns>
public Stream? OpenFileForReading(int index, string suffix)
=> OpenFileForReading(FilenamePattern, index, suffix);
/// <summary>
/// Create the generic filename pattern to look for from the input filename
/// </summary>
/// <returns>String representing the filename pattern for a cabinet set, null on error</returns>
private static string? CreateFilenamePattern(string filename)
{
string? pattern = null;
if (string.IsNullOrEmpty(filename))
return pattern;
string? directory = Path.GetDirectoryName(Path.GetFullPath(filename));
if (directory != null)
pattern = Path.Combine(directory, Path.GetFileNameWithoutExtension(filename));
else
pattern = Path.GetFileNameWithoutExtension(filename);
return new Regex(@"\d+$").Replace(pattern, string.Empty);
}
/// <summary>
/// Open a cabinet file for reading
/// </summary>
/// <param name="pattern">Filename pattern for matching cabinet files</param>
/// <param name="index">Cabinet part index to be opened</param>
/// <param name="suffix">Cabinet files suffix (e.g. `.cab`)</param>
/// <returns>A Stream representing the cabinet part, null on error</returns>
private static Stream? OpenFileForReading(string? pattern, int index, string suffix)
{
// An invalid pattern means no cabinet files
if (string.IsNullOrEmpty(pattern))
return null;
// Attempt lower-case extension
string filename = $"{pattern}{index}.{suffix}";
if (File.Exists(filename))
return File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
// Attempt upper-case extension
filename = $"{pattern}{index}.{suffix.ToUpperInvariant()}";
if (File.Exists(filename))
return File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
return null;
}
#endregion
#region Extraction
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// Open the full set if possible
var cabinet = this;
if (Filename != null)
{
// Get the name of the first cabinet file or header
string pattern = CreateFilenamePattern(Filename)!;
bool cabinetHeaderExists = File.Exists(pattern + "1.hdr");
bool shouldScanCabinet = cabinetHeaderExists
? Filename.Equals(pattern + "1.hdr", StringComparison.OrdinalIgnoreCase)
: Filename.Equals(pattern + "1.cab", StringComparison.OrdinalIgnoreCase);
// If we have anything but the first file
if (!shouldScanCabinet)
return false;
// Open the set from the pattern
cabinet = OpenSet(pattern);
}
// If the cabinet set could not be opened
if (cabinet == null)
return false;
try
{
for (int i = 0; i < cabinet.FileCount; i++)
{
try
{
// Check if the file is valid first
if (!cabinet.FileIsValid(i))
continue;
// Ensure directory separators are consistent
string filename = cabinet.GetFileName(i) ?? $"BAD_FILENAME{i}";
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
cabinet.FileSave(i, filename, includeDebug);
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
}
}
return true;
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
}
/// <summary>
/// Save the file at the given index to the filename specified
/// </summary>
public bool FileSave(int index, string filename, bool includeDebug, bool useOld = false)
{
// Get the file descriptor
if (!TryGetFileDescriptor(index, out var fileDescriptor) || fileDescriptor == null)
return false;
// If the file is split
if (fileDescriptor.LinkFlags == LinkFlags.LINK_PREV)
return FileSave((int)fileDescriptor.LinkPrevious, filename, includeDebug, useOld);
// Get the reader at the index
var reader = Reader.Create(this, index, fileDescriptor);
if (reader == null)
return false;
// Create the output file and hasher
using var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.None);
var md5 = new HashWrapper(HashType.MD5);
long readBytesLeft = (long)GetReadableBytes(fileDescriptor);
long writeBytesLeft = (long)GetWritableBytes(fileDescriptor);
byte[] inputBuffer;
byte[] outputBuffer = new byte[BUFFER_SIZE];
long totalWritten = 0;
// Read while there are bytes remaining
while (readBytesLeft > 0 && writeBytesLeft > 0)
{
long bytesToWrite = BUFFER_SIZE;
int result;
// Handle compressed files
#if NET20 || NET35
if ((fileDescriptor.Flags & FileFlags.FILE_COMPRESSED) != 0)
#else
if (fileDescriptor.Flags.HasFlag(FileFlags.FILE_COMPRESSED))
#endif
{
// Attempt to read the length value
byte[] lengthArr = new byte[sizeof(ushort)];
if (!reader.Read(lengthArr, 0, lengthArr.Length))
{
Console.Error.WriteLine($"Failed to read {lengthArr.Length} bytes of file {index} ({GetFileName(index)}) from input cabinet file {fileDescriptor.Volume}");
reader.Dispose();
fs?.Close();
return false;
}
// Attempt to read the specified number of bytes
ushort bytesToRead = BitConverter.ToUInt16(lengthArr, 0);
inputBuffer = new byte[BUFFER_SIZE + 1];
if (!reader.Read(inputBuffer, 0, bytesToRead))
{
Console.Error.WriteLine($"Failed to read {lengthArr.Length} bytes of file {index} ({GetFileName(index)}) from input cabinet file {fileDescriptor.Volume}");
reader.Dispose();
fs?.Close();
return false;
}
// Add a null byte to make inflate happy
inputBuffer[bytesToRead] = 0;
ulong readBytes = (ulong)(bytesToRead + 1);
// Uncompress into a buffer
if (useOld)
result = UncompressOld(outputBuffer, ref bytesToWrite, inputBuffer, ref readBytes);
else
result = Uncompress(outputBuffer, ref bytesToWrite, inputBuffer, ref readBytes);
// If we didn't get a positive result that's not a data error (false positives)
if (result != zlibConst.Z_OK && result != zlibConst.Z_DATA_ERROR)
{
Console.Error.WriteLine($"Decompression failed with code {result.ToZlibConstName()}. bytes_to_read={bytesToRead}, volume={fileDescriptor.Volume}, read_bytes={readBytes}");
reader.Dispose();
fs?.Close();
return false;
}
// Set remaining bytes
readBytesLeft -= 2;
readBytesLeft -= bytesToRead;
}
// Handle uncompressed files
else
{
bytesToWrite = Math.Min(readBytesLeft, BUFFER_SIZE);
if (!reader.Read(outputBuffer, 0, (int)bytesToWrite))
{
Console.Error.WriteLine($"Failed to write {bytesToWrite} bytes from input cabinet file {fileDescriptor.Volume}");
reader.Dispose();
fs?.Close();
return false;
}
// Set remaining bytes
readBytesLeft -= (uint)bytesToWrite;
}
// Hash and write the next block
bytesToWrite = Math.Min(bytesToWrite, writeBytesLeft);
md5.Process(outputBuffer, 0, (int)bytesToWrite);
fs?.Write(outputBuffer, 0, (int)bytesToWrite);
totalWritten += bytesToWrite;
writeBytesLeft -= bytesToWrite;
}
// Validate the number of bytes written
if ((long)fileDescriptor.ExpandedSize != totalWritten)
if (includeDebug) Console.WriteLine($"Expanded size of file {index} ({GetFileName(index)}) expected to be {fileDescriptor.ExpandedSize}, but was {totalWritten}");
// Finalize output values
md5.Terminate();
reader?.Dispose();
fs?.Close();
// Validate the data written, if required
if (MajorVersion >= 6)
{
string expectedMd5 = BitConverter.ToString(fileDescriptor.MD5!);
expectedMd5 = expectedMd5.ToLowerInvariant().Replace("-", string.Empty);
string? actualMd5 = md5.CurrentHashString;
if (actualMd5 == null || actualMd5 != expectedMd5)
{
Console.Error.WriteLine($"MD5 checksum failure for file {index} ({GetFileName(index)})");
return false;
}
}
return true;
}
/// <summary>
/// Save the file at the given index to the filename specified as raw
/// </summary>
public bool FileSaveRaw(int index, string filename)
{
// Get the file descriptor
if (!TryGetFileDescriptor(index, out var fileDescriptor) || fileDescriptor == null)
return false;
// If the file is split
if (fileDescriptor.LinkFlags == LinkFlags.LINK_PREV)
return FileSaveRaw((int)fileDescriptor.LinkPrevious, filename);
// Get the reader at the index
var reader = Reader.Create(this, index, fileDescriptor);
if (reader == null)
return false;
// Create the output file
using var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.None);
ulong bytesLeft = GetReadableBytes(fileDescriptor);
byte[] outputBuffer = new byte[BUFFER_SIZE];
// Read while there are bytes remaining
while (bytesLeft > 0)
{
ulong bytesToWrite = Math.Min(bytesLeft, BUFFER_SIZE);
if (!reader.Read(outputBuffer, 0, (int)bytesToWrite))
{
Console.Error.WriteLine($"Failed to read {bytesToWrite} bytes from input cabinet file {fileDescriptor.Volume}");
reader.Dispose();
fs?.Close();
return false;
}
// Set remaining bytes
bytesLeft -= (uint)bytesToWrite;
// Write the next block
fs.Write(outputBuffer, 0, (int)bytesToWrite);
}
// Finalize output values
reader.Dispose();
fs?.Close();
return true;
}
/// <summary>
/// Uncompress a source byte array to a destination
/// </summary>
private unsafe static int Uncompress(byte[] dest, ref long destLen, byte[] source, ref ulong sourceLen)
{
fixed (byte* sourcePtr = source)
fixed (byte* destPtr = dest)
{
var stream = new ZLib.z_stream_s
{
next_in = sourcePtr,
avail_in = (uint)sourceLen,
next_out = destPtr,
avail_out = (uint)destLen,
};
// make second parameter negative to disable checksum verification
int err = ZLib.inflateInit2_(stream, -MAX_WBITS, ZLib.zlibVersion(), source.Length);
if (err != zlibConst.Z_OK)
return err;
err = ZLib.inflate(stream, 1);
if (err != zlibConst.Z_OK && err != zlibConst.Z_STREAM_END)
{
ZLib.inflateEnd(stream);
return err;
}
destLen = stream.total_out;
sourceLen = stream.total_in;
return ZLib.inflateEnd(stream);
}
}
/// <summary>
/// Uncompress a source byte array to a destination (old version)
/// </summary>
private unsafe static int UncompressOld(byte[] dest, ref long destLen, byte[] source, ref ulong sourceLen)
{
fixed (byte* sourcePtr = source)
fixed (byte* destPtr = dest)
{
var stream = new ZLib.z_stream_s
{
next_in = sourcePtr,
avail_in = (uint)sourceLen,
next_out = destPtr,
avail_out = (uint)destLen,
};
destLen = 0;
sourceLen = 0;
// make second parameter negative to disable checksum verification
int err = ZLib.inflateInit2_(stream, -MAX_WBITS, ZLib.zlibVersion(), source.Length);
if (err != zlibConst.Z_OK)
return err;
while (stream.avail_in > 1)
{
err = ZLib.inflate(stream, 1);
if (err != zlibConst.Z_OK)
{
ZLib.inflateEnd(stream);
return err;
}
}
destLen = stream.total_out;
sourceLen = stream.total_in;
return ZLib.inflateEnd(stream);
}
}
#endregion
#region Obfuscation
/// <summary>
/// Deobfuscate a buffer
/// </summary>
public static void Deobfuscate(byte[] buffer, long size, ref uint offset)
{
offset = Deobfuscate(buffer, size, offset);
}
/// <summary>
/// Deobfuscate a buffer with a seed value
/// </summary>
/// <remarks>Seed is 0 at file start</remarks>
public static uint Deobfuscate(byte[] buffer, long size, uint seed)
{
for (int i = 0; size > 0; size--, i++, seed++)
{
buffer[i] = (byte)(ROR8(buffer[i] ^ 0xd5, 2) - (seed % 0x47));
}
return seed;
}
/// <summary>
/// Obfuscate a buffer
/// </summary>
public static void Obfuscate(byte[] buffer, long size, ref uint offset)
{
offset = Obfuscate(buffer, size, offset);
}
/// <summary>
/// Obfuscate a buffer with a seed value
/// </summary>
/// <remarks>Seed is 0 at file start</remarks>
public static uint Obfuscate(byte[] buffer, long size, uint seed)
{
for (int i = 0; size > 0; size--, i++, seed++)
{
buffer[i] = (byte)(ROL8(buffer[i] ^ 0xd5, 2) + (seed % 0x47));
}
return seed;
}
/// <summary>
/// Rotate Right 8
/// </summary>
private static int ROR8(int x, byte n) => (x >> n) | (x << (8 - n));
/// <summary>
/// Rotate Left 8
/// </summary>
private static int ROL8(int x, byte n) => (x << n) | (x >> (8 - n));
#endregion
#region Helper Classes
/// <summary>
/// Helper to read a single file from a cabinet set
/// </summary>
private class Reader : IDisposable
{
#region Private Instance Variables
/// <summary>
/// Cabinet file to read from
/// </summary>
private readonly InstallShieldCabinet _cabinet;
/// <summary>
/// Currently selected index
/// </summary>
private readonly uint _index;
/// <summary>
/// File descriptor defining the currently selected index
/// </summary>
private readonly FileDescriptor _fileDescriptor;
/// <summary>
/// Offset in the data where the file exists
/// </summary>
private ulong _dataOffset;
/// <summary>
/// Number of bytes left in the current volume
/// </summary>
private ulong _volumeBytesLeft;
/// <summary>
/// Handle to the current volume stream
/// </summary>
private Stream? _volumeFile;
/// <summary>
/// Current volume header
/// </summary>
private VolumeHeader? _volumeHeader;
/// <summary>
/// Current volume ID
/// </summary>
private ushort _volumeId;
/// <summary>
/// Offset for obfuscation seed
/// </summary>
private uint _obfuscationOffset;
#endregion
#region Constructors
private Reader(InstallShieldCabinet cabinet, uint index, FileDescriptor fileDescriptor)
{
_cabinet = cabinet;
_index = index;
_fileDescriptor = fileDescriptor;
}
#endregion
/// <summary>
/// Create a new <see cref="Reader"> from an existing cabinet, index, and file descriptor
/// </summary>
public static Reader? Create(InstallShieldCabinet cabinet, int index, FileDescriptor fileDescriptor)
{
var reader = new Reader(cabinet, (uint)index, fileDescriptor);
for (; ; )
{
// If the volume is invalid
if (!reader.OpenVolume(fileDescriptor.Volume))
{
Console.Error.WriteLine($"Failed to open volume {fileDescriptor.Volume}");
return null;
}
else if (reader._volumeFile == null || reader._volumeHeader == null)
{
Console.Error.WriteLine($"Volume {fileDescriptor.Volume} is invalid");
return null;
}
// Start with the correct volume for IS5 cabinets
if (reader._cabinet.MajorVersion <= 5 && index > (int)reader._volumeHeader.LastFileIndex)
{
// Normalize the volume ID for odd cases
if (fileDescriptor.Volume == ushort.MinValue || fileDescriptor.Volume == ushort.MaxValue)
fileDescriptor.Volume = 1;
fileDescriptor.Volume++;
continue;
}
break;
}
return reader;
}
/// <summary>
/// Dispose of the current object
/// </summary>
public void Dispose()
{
_volumeFile?.Close();
}
#region Reading
/// <summary>
/// Read a certain number of bytes from the current volume
/// </summary>
public bool Read(byte[] buffer, int start, long size)
{
long bytesLeft = size;
while (bytesLeft > 0)
{
// Open the next volume, if necessary
if (_volumeBytesLeft == 0)
{
if (!OpenNextVolume(out _))
return false;
}
// Get the number of bytes to read from this volume
int bytesToRead = (int)Math.Min(bytesLeft, (long)_volumeBytesLeft);
if (bytesToRead == 0)
break;
// Read as much as possible from this volume
if (bytesToRead != _volumeFile!.Read(buffer, start, bytesToRead))
return false;
// Set the number of bytes left
bytesLeft -= bytesToRead;
_volumeBytesLeft -= (uint)bytesToRead;
}
#if NET20 || NET35
if ((_fileDescriptor.Flags & FileFlags.FILE_OBFUSCATED) != 0)
#else
if (_fileDescriptor.Flags.HasFlag(FileFlags.FILE_OBFUSCATED))
#endif
Deobfuscate(buffer, size, ref _obfuscationOffset);
return true;
}
/// <summary>
/// Open the next volume based on the current index
/// </summary>
private bool OpenNextVolume(out ushort nextVolume)
{
nextVolume = (ushort)(_volumeId + 1);
return OpenVolume(nextVolume);
}
/// <summary>
/// Open the volume at the inputted index
/// </summary>
private bool OpenVolume(ushort volume)
{
// Read the volume from the cabinet set
var next = _cabinet.OpenVolume(volume, out var volumeStream);
if (next?.VolumeHeader == null || volumeStream == null)
{
Console.Error.WriteLine($"Failed to open input cabinet file {volume}");
return false;
}
// Assign the next items
_volumeFile?.Close();
_volumeFile = volumeStream;
_volumeHeader = next.VolumeHeader;
// Enable support for split archives for IS5
if (_cabinet.MajorVersion == 5)
{
if (_index < (_cabinet.FileCount - 1)
&& _index == _volumeHeader.LastFileIndex
&& _volumeHeader.LastFileSizeCompressed != _fileDescriptor.CompressedSize)
{
_fileDescriptor.Flags |= FileFlags.FILE_SPLIT;
}
else if (_index > 0
&& _index == _volumeHeader.FirstFileIndex
&& _volumeHeader.FirstFileSizeCompressed != _fileDescriptor.CompressedSize)
{
_fileDescriptor.Flags |= FileFlags.FILE_SPLIT;
}
}
ulong volumeBytesLeftCompressed, volumeBytesLeftExpanded;
#if NET20 || NET35
if ((_fileDescriptor.Flags & FileFlags.FILE_SPLIT) != 0)
#else
if (_fileDescriptor.Flags.HasFlag(FileFlags.FILE_SPLIT))
#endif
{
if (_index == _volumeHeader.LastFileIndex && _volumeHeader.LastFileOffset != 0x7FFFFFFF)
{
// can be first file too
_dataOffset = _volumeHeader.LastFileOffset;
volumeBytesLeftExpanded = _volumeHeader.LastFileSizeExpanded;
volumeBytesLeftCompressed = _volumeHeader.LastFileSizeCompressed;
}
else if (_index == _volumeHeader.FirstFileIndex)
{
_dataOffset = _volumeHeader.FirstFileOffset;
volumeBytesLeftExpanded = _volumeHeader.FirstFileSizeExpanded;
volumeBytesLeftCompressed = _volumeHeader.FirstFileSizeCompressed;
}
else
{
return true;
}
}
else
{
_dataOffset = _fileDescriptor.DataOffset;
volumeBytesLeftExpanded = _fileDescriptor.ExpandedSize;
volumeBytesLeftCompressed = _fileDescriptor.CompressedSize;
}
#if NET20 || NET35
if ((_fileDescriptor.Flags & FileFlags.FILE_COMPRESSED) != 0)
#else
if (_fileDescriptor.Flags.HasFlag(FileFlags.FILE_COMPRESSED))
#endif
_volumeBytesLeft = volumeBytesLeftCompressed;
else
_volumeBytesLeft = volumeBytesLeftExpanded;
_volumeFile.Seek((long)_dataOffset, SeekOrigin.Begin);
_volumeId = volume;
return true;
}
#endregion
}
#endregion
}
}

View File

@@ -1,15 +1,11 @@
using System;
using System.IO;
using System.Text.RegularExpressions;
using SabreTools.Hashing;
using SabreTools.IO.Compression.zlib;
using SabreTools.Models.InstallShieldCabinet;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.InstallShieldCabinet.Constants;
using SabreTools.Serialization.Extensions;
namespace SabreTools.Serialization.Wrappers
{
public partial class InstallShieldCabinet : WrapperBase<Cabinet>, IExtractable
public partial class InstallShieldCabinet : WrapperBase<Cabinet>
{
#region Descriptive Properties
@@ -68,64 +64,31 @@ namespace SabreTools.Serialization.Wrappers
/// <inheritdoc cref="Cabinet.VolumeHeader"/>
public VolumeHeader? VolumeHeader => Model.VolumeHeader;
/// <summary>
/// Reference to the next cabinet header
/// </summary>
/// <remarks>Only used in multi-file</remarks>
public InstallShieldCabinet? Next { get; set; }
/// <summary>
/// Reference to the next previous header
/// </summary>
/// <remarks>Only used in multi-file</remarks>
public InstallShieldCabinet? Prev { get; set; }
/// <summary>
/// Volume index ID, 0 for headers
/// </summary>
/// <remarks>Only used in multi-file</remarks>
public ushort VolumeID { get; set; }
#endregion
#region Extraction State
/// <summary>
/// Base filename path for related CAB files
/// </summary>
internal string? FilenamePattern { get; set; }
#endregion
#region Constants
/// <summary>
/// Default buffer size
/// </summary>
private const int BUFFER_SIZE = 64 * 1024;
/// <summary>
/// Maximum size of the window in bits
/// </summary>
private const int MAX_WBITS = 15;
#endregion
#region Constructors
/// <inheritdoc/>
public InstallShieldCabinet(Cabinet? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public InstallShieldCabinet(Cabinet model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public InstallShieldCabinet(Cabinet? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public InstallShieldCabinet(Cabinet model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public InstallShieldCabinet(Cabinet model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public InstallShieldCabinet(Cabinet model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public InstallShieldCabinet(Cabinet model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public InstallShieldCabinet(Cabinet model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create an InstallShield Cabinet from a byte array and offset
@@ -164,12 +127,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.InstallShieldCabinet.DeserializeStream(data);
var model = new Deserializers.InstallShieldCabinet().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new InstallShieldCabinet(model, data);
return new InstallShieldCabinet(model, data, currentOffset);
}
catch
{
@@ -179,148 +141,6 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region Cabinet Set
/// <summary>
/// Open a cabinet set for reading, if possible
/// </summary>
/// <param name="pattern">Filename pattern for matching cabinet files</param>
/// <returns>Wrapper representing the set, null on error</returns>
public static InstallShieldCabinet? OpenSet(string? pattern)
{
// An invalid pattern means no cabinet files
if (string.IsNullOrEmpty(pattern))
return null;
// Create a placeholder wrapper for output
InstallShieldCabinet? set = null;
// Loop until there are no parts left
bool iterate = true;
InstallShieldCabinet? previous = null;
for (ushort i = 1; iterate; i++)
{
var file = OpenFileForReading(pattern, i, HEADER_SUFFIX);
if (file != null)
iterate = false;
else
file = OpenFileForReading(pattern, i, CABINET_SUFFIX);
if (file == null)
break;
var current = Create(file);
if (current == null)
break;
current.VolumeID = i;
if (previous != null)
{
previous.Next = current;
current.Prev = previous;
}
else
{
set = current;
previous = current;
}
}
// Set the pattern, if possible
if (set != null)
set.FilenamePattern = pattern;
return set;
}
/// <summary>
/// Open the numbered cabinet set volume
/// </summary>
/// <param name="volumeId">Volume ID, 1-indexed</param>
/// <returns>Wrapper representing the volume on success, null otherwise</returns>
public InstallShieldCabinet? OpenVolume(ushort volumeId, out Stream? volumeStream)
{
// Normalize the volume ID for odd cases
if (volumeId == ushort.MinValue || volumeId == ushort.MaxValue)
volumeId = 1;
// Try to open the file as a stream
volumeStream = OpenFileForReading(FilenamePattern, volumeId, CABINET_SUFFIX);
if (volumeStream == null)
{
Console.Error.WriteLine($"Failed to open input cabinet file {volumeId}");
return null;
}
// Try to parse the stream into a cabinet
var volume = Create(volumeStream);
if (volume == null)
{
Console.Error.WriteLine($"Failed to open input cabinet file {volumeId}");
return null;
}
// Set the volume ID and return
volume.VolumeID = volumeId;
return volume;
}
/// <summary>
/// Open a cabinet file for reading
/// </summary>
/// <param name="index">Cabinet part index to be opened</param>
/// <param name="suffix">Cabinet files suffix (e.g. `.cab`)</param>
/// <returns>A Stream representing the cabinet part, null on error</returns>
public Stream? OpenFileForReading(int index, string suffix)
=> OpenFileForReading(FilenamePattern, index, suffix);
/// <summary>
/// Create the generic filename pattern to look for from the input filename
/// </summary>
/// <returns>String representing the filename pattern for a cabinet set, null on error</returns>
private static string? CreateFilenamePattern(string filename)
{
string? pattern = null;
if (string.IsNullOrEmpty(filename))
return pattern;
string? directory = Path.GetDirectoryName(Path.GetFullPath(filename));
if (directory != null)
pattern = Path.Combine(directory, Path.GetFileNameWithoutExtension(filename));
else
pattern = Path.GetFileNameWithoutExtension(filename);
return new Regex(@"\d+$").Replace(pattern, string.Empty);
}
/// <summary>
/// Open a cabinet file for reading
/// </summary>
/// <param name="pattern">Filename pattern for matching cabinet files</param>
/// <param name="index">Cabinet part index to be opened</param>
/// <param name="suffix">Cabinet files suffix (e.g. `.cab`)</param>
/// <returns>A Stream representing the cabinet part, null on error</returns>
private static Stream? OpenFileForReading(string? pattern, int index, string suffix)
{
// An invalid pattern means no cabinet files
if (string.IsNullOrEmpty(pattern))
return null;
// Attempt lower-case extension
string filename = $"{pattern}{index}.{suffix}";
if (File.Exists(filename))
return File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
// Attempt upper-case extension
filename = $"{pattern}{index}.{suffix.ToUpperInvariant()}";
if (File.Exists(filename))
return File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
return null;
}
#endregion
#region Component
/// <summary>
@@ -374,336 +194,6 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region Extraction
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// Open the full set if possible
var cabinet = this;
if (Filename != null)
{
// Get the name of the first cabinet file or header
string pattern = CreateFilenamePattern(Filename)!;
bool cabinetHeaderExists = File.Exists(pattern + "1.hdr");
bool shouldScanCabinet = cabinetHeaderExists
? Filename.Equals(pattern + "1.hdr", StringComparison.OrdinalIgnoreCase)
: Filename.Equals(pattern + "1.cab", StringComparison.OrdinalIgnoreCase);
// If we have anything but the first file
if (!shouldScanCabinet)
return false;
// Open the set from the pattern
cabinet = OpenSet(pattern);
}
// If the cabinet set could not be opened
if (cabinet == null)
return false;
try
{
for (int i = 0; i < cabinet.FileCount; i++)
{
try
{
// Check if the file is valid first
if (!cabinet.FileIsValid(i))
continue;
// Ensure directory separators are consistent
string filename = cabinet.GetFileName(i) ?? $"BAD_FILENAME{i}";
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
cabinet.FileSave(i, filename);
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
}
}
return true;
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
}
/// <summary>
/// Save the file at the given index to the filename specified
/// </summary>
public bool FileSave(int index, string filename, bool useOld = false)
{
// Get the file descriptor
if (!TryGetFileDescriptor(index, out var fileDescriptor) || fileDescriptor == null)
return false;
// If the file is split
if (fileDescriptor.LinkFlags == LinkFlags.LINK_PREV)
return FileSave((int)fileDescriptor.LinkPrevious, filename, useOld);
// Get the reader at the index
var reader = Reader.Create(this, index, fileDescriptor);
if (reader == null)
return false;
// Create the output file and hasher
FileStream output = File.OpenWrite(filename);
var md5 = new HashWrapper(HashType.MD5);
long readBytesLeft = (long)GetReadableBytes(fileDescriptor);
long writeBytesLeft = (long)GetWritableBytes(fileDescriptor);
byte[] inputBuffer;
byte[] outputBuffer = new byte[BUFFER_SIZE];
long totalWritten = 0;
// Read while there are bytes remaining
while (readBytesLeft > 0 && writeBytesLeft > 0)
{
long bytesToWrite = BUFFER_SIZE;
int result;
// Handle compressed files
#if NET20 || NET35
if ((fileDescriptor.Flags & FileFlags.FILE_COMPRESSED) != 0)
#else
if (fileDescriptor.Flags.HasFlag(FileFlags.FILE_COMPRESSED))
#endif
{
// Attempt to read the length value
byte[] lengthArr = new byte[sizeof(ushort)];
if (!reader.Read(lengthArr, 0, lengthArr.Length))
{
Console.Error.WriteLine($"Failed to read {lengthArr.Length} bytes of file {index} ({GetFileName(index)}) from input cabinet file {fileDescriptor.Volume}");
reader.Dispose();
output?.Close();
return false;
}
// Attempt to read the specified number of bytes
ushort bytesToRead = BitConverter.ToUInt16(lengthArr, 0);
inputBuffer = new byte[BUFFER_SIZE + 1];
if (!reader.Read(inputBuffer, 0, bytesToRead))
{
Console.Error.WriteLine($"Failed to read {lengthArr.Length} bytes of file {index} ({GetFileName(index)}) from input cabinet file {fileDescriptor.Volume}");
reader.Dispose();
output?.Close();
return false;
}
// Add a null byte to make inflate happy
inputBuffer[bytesToRead] = 0;
ulong readBytes = (ulong)(bytesToRead + 1);
// Uncompress into a buffer
if (useOld)
result = UncompressOld(outputBuffer, ref bytesToWrite, inputBuffer, ref readBytes);
else
result = Uncompress(outputBuffer, ref bytesToWrite, inputBuffer, ref readBytes);
// If we didn't get a positive result that's not a data error (false positives)
if (result != zlibConst.Z_OK && result != zlibConst.Z_DATA_ERROR)
{
Console.Error.WriteLine($"Decompression failed with code {result.ToZlibConstName()}. bytes_to_read={bytesToRead}, volume={fileDescriptor.Volume}, read_bytes={readBytes}");
reader.Dispose();
output?.Close();
return false;
}
// Set remaining bytes
readBytesLeft -= 2;
readBytesLeft -= bytesToRead;
}
// Handle uncompressed files
else
{
bytesToWrite = Math.Min(readBytesLeft, BUFFER_SIZE);
if (!reader.Read(outputBuffer, 0, (int)bytesToWrite))
{
Console.Error.WriteLine($"Failed to write {bytesToWrite} bytes from input cabinet file {fileDescriptor.Volume}");
reader.Dispose();
output?.Close();
return false;
}
// Set remaining bytes
readBytesLeft -= (uint)bytesToWrite;
}
// Hash and write the next block
bytesToWrite = Math.Min(bytesToWrite, writeBytesLeft);
md5.Process(outputBuffer, 0, (int)bytesToWrite);
output?.Write(outputBuffer, 0, (int)bytesToWrite);
totalWritten += bytesToWrite;
writeBytesLeft -= bytesToWrite;
}
// Validate the number of bytes written
if ((long)fileDescriptor.ExpandedSize != totalWritten)
Console.WriteLine($"Expanded size of file {index} ({GetFileName(index)}) expected to be {fileDescriptor.ExpandedSize}, but was {totalWritten}");
// Finalize output values
md5.Terminate();
reader?.Dispose();
output?.Close();
// Validate the data written, if required
if (MajorVersion >= 6)
{
string expectedMd5 = BitConverter.ToString(fileDescriptor.MD5!);
expectedMd5 = expectedMd5.ToLowerInvariant().Replace("-", string.Empty);
string? actualMd5 = md5.CurrentHashString;
if (actualMd5 == null || actualMd5 != expectedMd5)
{
Console.Error.WriteLine($"MD5 checksum failure for file {index} ({GetFileName(index)})");
return false;
}
}
return true;
}
/// <summary>
/// Save the file at the given index to the filename specified as raw
/// </summary>
public bool FileSaveRaw(int index, string filename)
{
// Get the file descriptor
if (!TryGetFileDescriptor(index, out var fileDescriptor) || fileDescriptor == null)
return false;
// If the file is split
if (fileDescriptor.LinkFlags == LinkFlags.LINK_PREV)
return FileSaveRaw((int)fileDescriptor.LinkPrevious, filename);
// Get the reader at the index
var reader = Reader.Create(this, index, fileDescriptor);
if (reader == null)
return false;
// Create the output file
FileStream output = File.OpenWrite(filename);
ulong bytesLeft = GetReadableBytes(fileDescriptor);
byte[] outputBuffer = new byte[BUFFER_SIZE];
// Read while there are bytes remaining
while (bytesLeft > 0)
{
ulong bytesToWrite = Math.Min(bytesLeft, BUFFER_SIZE);
if (!reader.Read(outputBuffer, 0, (int)bytesToWrite))
{
Console.Error.WriteLine($"Failed to read {bytesToWrite} bytes from input cabinet file {fileDescriptor.Volume}");
reader.Dispose();
output?.Close();
return false;
}
// Set remaining bytes
bytesLeft -= (uint)bytesToWrite;
// Write the next block
output.Write(outputBuffer, 0, (int)bytesToWrite);
}
// Finalize output values
reader.Dispose();
output?.Close();
return true;
}
/// <summary>
/// Uncompress a source byte array to a destination
/// </summary>
private unsafe static int Uncompress(byte[] dest, ref long destLen, byte[] source, ref ulong sourceLen)
{
fixed (byte* sourcePtr = source)
fixed (byte* destPtr = dest)
{
var stream = new ZLib.z_stream_s
{
next_in = sourcePtr,
avail_in = (uint)sourceLen,
next_out = destPtr,
avail_out = (uint)destLen,
};
// make second parameter negative to disable checksum verification
int err = ZLib.inflateInit2_(stream, -MAX_WBITS, ZLib.zlibVersion(), source.Length);
if (err != zlibConst.Z_OK)
return err;
err = ZLib.inflate(stream, 1);
if (err != zlibConst.Z_OK && err != zlibConst.Z_STREAM_END)
{
ZLib.inflateEnd(stream);
return err;
}
destLen = stream.total_out;
sourceLen = stream.total_in;
return ZLib.inflateEnd(stream);
}
}
/// <summary>
/// Uncompress a source byte array to a destination (old version)
/// </summary>
private unsafe static int UncompressOld(byte[] dest, ref long destLen, byte[] source, ref ulong sourceLen)
{
fixed (byte* sourcePtr = source)
fixed (byte* destPtr = dest)
{
var stream = new ZLib.z_stream_s
{
next_in = sourcePtr,
avail_in = (uint)sourceLen,
next_out = destPtr,
avail_out = (uint)destLen,
};
destLen = 0;
sourceLen = 0;
// make second parameter negative to disable checksum verification
int err = ZLib.inflateInit2_(stream, -MAX_WBITS, ZLib.zlibVersion(), source.Length);
if (err != zlibConst.Z_OK)
return err;
while (stream.avail_in > 1)
{
err = ZLib.inflate(stream, 1);
if (err != zlibConst.Z_OK)
{
ZLib.inflateEnd(stream);
return err;
}
}
destLen = stream.total_out;
sourceLen = stream.total_in;
return ZLib.inflateEnd(stream);
}
}
#endregion
#region File
/// <summary>
@@ -882,312 +372,5 @@ namespace SabreTools.Serialization.Wrappers
=> GetFileGroupFromFile(index)?.Name;
#endregion
#region Obfuscation
/// <summary>
/// Deobfuscate a buffer
/// </summary>
public static void Deobfuscate(byte[] buffer, long size, ref uint offset)
{
offset = Deobfuscate(buffer, size, offset);
}
/// <summary>
/// Deobfuscate a buffer with a seed value
/// </summary>
/// <remarks>Seed is 0 at file start</remarks>
public static uint Deobfuscate(byte[] buffer, long size, uint seed)
{
for (int i = 0; size > 0; size--, i++, seed++)
{
buffer[i] = (byte)(ROR8(buffer[i] ^ 0xd5, 2) - (seed % 0x47));
}
return seed;
}
/// <summary>
/// Obfuscate a buffer
/// </summary>
public static void Obfuscate(byte[] buffer, long size, ref uint offset)
{
offset = Obfuscate(buffer, size, offset);
}
/// <summary>
/// Obfuscate a buffer with a seed value
/// </summary>
/// <remarks>Seed is 0 at file start</remarks>
public static uint Obfuscate(byte[] buffer, long size, uint seed)
{
for (int i = 0; size > 0; size--, i++, seed++)
{
buffer[i] = (byte)(ROL8(buffer[i] ^ 0xd5, 2) + (seed % 0x47));
}
return seed;
}
/// <summary>
/// Rotate Right 8
/// </summary>
private static int ROR8(int x, byte n) => (x >> n) | (x << (8 - n));
/// <summary>
/// Rotate Left 8
/// </summary>
private static int ROL8(int x, byte n) => (x << n) | (x >> (8 - n));
#endregion
#region Helper Classes
/// <summary>
/// Helper to read a single file from a cabinet set
/// </summary>
private class Reader : IDisposable
{
#region Private Instance Variables
/// <summary>
/// Cabinet file to read from
/// </summary>
private readonly InstallShieldCabinet _cabinet;
/// <summary>
/// Currently selected index
/// </summary>
private readonly uint _index;
/// <summary>
/// File descriptor defining the currently selected index
/// </summary>
private readonly FileDescriptor _fileDescriptor;
/// <summary>
/// Offset in the data where the file exists
/// </summary>
private ulong _dataOffset;
/// <summary>
/// Number of bytes left in the current volume
/// </summary>
private ulong _volumeBytesLeft;
/// <summary>
/// Handle to the current volume stream
/// </summary>
private Stream? _volumeFile;
/// <summary>
/// Current volume header
/// </summary>
private VolumeHeader? _volumeHeader;
/// <summary>
/// Current volume ID
/// </summary>
private ushort _volumeId;
/// <summary>
/// Offset for obfuscation seed
/// </summary>
private uint _obfuscationOffset;
#endregion
#region Constructors
private Reader(InstallShieldCabinet cabinet, uint index, FileDescriptor fileDescriptor)
{
_cabinet = cabinet;
_index = index;
_fileDescriptor = fileDescriptor;
}
#endregion
/// <summary>
/// Create a new <see cref="Reader"> from an existing cabinet, index, and file descriptor
/// </summary>
public static Reader? Create(InstallShieldCabinet cabinet, int index, FileDescriptor fileDescriptor)
{
var reader = new Reader(cabinet, (uint)index, fileDescriptor);
for (; ; )
{
// If the volume is invalid
if (!reader.OpenVolume(fileDescriptor.Volume))
{
Console.Error.WriteLine($"Failed to open volume {fileDescriptor.Volume}");
return null;
}
else if (reader._volumeFile == null || reader._volumeHeader == null)
{
Console.Error.WriteLine($"Volume {fileDescriptor.Volume} is invalid");
return null;
}
// Start with the correct volume for IS5 cabinets
if (reader._cabinet.MajorVersion <= 5 && index > (int)reader._volumeHeader.LastFileIndex)
{
// Normalize the volume ID for odd cases
if (fileDescriptor.Volume == ushort.MinValue || fileDescriptor.Volume == ushort.MaxValue)
fileDescriptor.Volume = 1;
fileDescriptor.Volume++;
continue;
}
break;
}
return reader;
}
/// <summary>
/// Dispose of the current object
/// </summary>
public void Dispose()
{
_volumeFile?.Close();
}
#region Reading
/// <summary>
/// Read a certain number of bytes from the current volume
/// </summary>
public bool Read(byte[] buffer, int start, long size)
{
long bytesLeft = size;
while (bytesLeft > 0)
{
// Open the next volume, if necessary
if (_volumeBytesLeft == 0)
{
if (!OpenNextVolume(out _))
return false;
}
// Get the number of bytes to read from this volume
int bytesToRead = (int)Math.Min(bytesLeft, (long)_volumeBytesLeft);
if (bytesToRead == 0)
break;
// Read as much as possible from this volume
if (bytesToRead != _volumeFile!.Read(buffer, start, bytesToRead))
return false;
// Set the number of bytes left
bytesLeft -= bytesToRead;
_volumeBytesLeft -= (uint)bytesToRead;
}
#if NET20 || NET35
if ((_fileDescriptor.Flags & FileFlags.FILE_OBFUSCATED) != 0)
#else
if (_fileDescriptor.Flags.HasFlag(FileFlags.FILE_OBFUSCATED))
#endif
Deobfuscate(buffer, size, ref _obfuscationOffset);
return true;
}
/// <summary>
/// Open the next volume based on the current index
/// </summary>
private bool OpenNextVolume(out ushort nextVolume)
{
nextVolume = (ushort)(_volumeId + 1);
return OpenVolume(nextVolume);
}
/// <summary>
/// Open the volume at the inputted index
/// </summary>
private bool OpenVolume(ushort volume)
{
// Read the volume from the cabinet set
var next = _cabinet.OpenVolume(volume, out var volumeStream);
if (next?.VolumeHeader == null || volumeStream == null)
{
Console.Error.WriteLine($"Failed to open input cabinet file {volume}");
return false;
}
// Assign the next items
_volumeFile?.Close();
_volumeFile = volumeStream;
_volumeHeader = next.VolumeHeader;
// Enable support for split archives for IS5
if (_cabinet.MajorVersion == 5)
{
if (_index < (_cabinet.FileCount - 1)
&& _index == _volumeHeader.LastFileIndex
&& _volumeHeader.LastFileSizeCompressed != _fileDescriptor.CompressedSize)
{
_fileDescriptor.Flags |= FileFlags.FILE_SPLIT;
}
else if (_index > 0
&& _index == _volumeHeader.FirstFileIndex
&& _volumeHeader.FirstFileSizeCompressed != _fileDescriptor.CompressedSize)
{
_fileDescriptor.Flags |= FileFlags.FILE_SPLIT;
}
}
ulong volumeBytesLeftCompressed, volumeBytesLeftExpanded;
#if NET20 || NET35
if ((_fileDescriptor.Flags & FileFlags.FILE_SPLIT) != 0)
#else
if (_fileDescriptor.Flags.HasFlag(FileFlags.FILE_SPLIT))
#endif
{
if (_index == _volumeHeader.LastFileIndex && _volumeHeader.LastFileOffset != 0x7FFFFFFF)
{
// can be first file too
_dataOffset = _volumeHeader.LastFileOffset;
volumeBytesLeftExpanded = _volumeHeader.LastFileSizeExpanded;
volumeBytesLeftCompressed = _volumeHeader.LastFileSizeCompressed;
}
else if (_index == _volumeHeader.FirstFileIndex)
{
_dataOffset = _volumeHeader.FirstFileOffset;
volumeBytesLeftExpanded = _volumeHeader.FirstFileSizeExpanded;
volumeBytesLeftCompressed = _volumeHeader.FirstFileSizeCompressed;
}
else
{
return true;
}
}
else
{
_dataOffset = _fileDescriptor.DataOffset;
volumeBytesLeftExpanded = _fileDescriptor.ExpandedSize;
volumeBytesLeftCompressed = _fileDescriptor.CompressedSize;
}
#if NET20 || NET35
if ((_fileDescriptor.Flags & FileFlags.FILE_COMPRESSED) != 0)
#else
if (_fileDescriptor.Flags.HasFlag(FileFlags.FILE_COMPRESSED))
#endif
_volumeBytesLeft = volumeBytesLeftCompressed;
else
_volumeBytesLeft = volumeBytesLeftExpanded;
_volumeFile.Seek((long)_dataOffset, SeekOrigin.Begin);
_volumeId = volume;
return true;
}
#endregion
}
#endregion
}
}

View File

@@ -0,0 +1,91 @@
using System.IO;
using SabreTools.Models.LDSCRYPT;
namespace SabreTools.Serialization.Wrappers
{
/// <summary>
/// This is a shell wrapper; one that does not contain
/// any actual parsing. It is used as a placeholder for
/// types that typically do not have models.
/// </summary>
public class LDSCRYPT : WrapperBase<EncryptedFile>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Link Data Security encrypted file";
#endregion
#region Constructors
/// <inheritdoc/>
public LDSCRYPT(EncryptedFile model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public LDSCRYPT(EncryptedFile model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public LDSCRYPT(EncryptedFile model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public LDSCRYPT(EncryptedFile model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public LDSCRYPT(EncryptedFile model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public LDSCRYPT(EncryptedFile model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create a LDSCRYPT file from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A LDSCRYPT wrapper on success, null on failure</returns>
public static LDSCRYPT? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null || data.Length == 0)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a LDSCRYPT file from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A LDSCRYPT wrapper on success, null on failure</returns>
public static LDSCRYPT? Create(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
return new LDSCRYPT(new EncryptedFile(), data);
}
#endregion
#region JSON Export
#if NETCOREAPP
/// <inheritdoc/>
public override string ExportJSON() => throw new System.NotImplementedException();
#endif
#endregion
}
}

View File

@@ -0,0 +1,66 @@
using System;
using System.IO;
using SabreTools.IO.Compression.SZDD;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public partial class LZKWAJ : IExtractable
{
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// Get the length of the compressed data
long compressedSize = Length - DataOffset;
if (compressedSize < DataOffset)
return false;
// Read in the data as an array
byte[]? contents = ReadRangeFromSource(DataOffset, (int)compressedSize);
if (contents.Length == 0)
return false;
// Get the decompressor
var decompressor = Decompressor.CreateKWAJ(contents, (ushort)CompressionType);
if (decompressor == null)
return false;
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
string filename = FileName ?? "tempfile";
if (FileExtension != null)
filename += $".{FileExtension}";
// Ensure directory separators are consistent
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using var fs = File.Open(filename, FileMode.Create, FileAccess.Write, FileShare.None);
decompressor.CopyTo(fs);
fs.Flush();
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return true;
}
}
}

View File

@@ -1,13 +1,9 @@
using System;
using System.IO;
using SabreTools.IO.Compression.SZDD;
using SabreTools.IO.Extensions;
using SabreTools.Models.LZ;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public class LZKWAJ : WrapperBase<KWAJFile>, IExtractable
public partial class LZKWAJ : WrapperBase<KWAJFile>
{
#region Descriptive Properties
@@ -35,18 +31,26 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public LZKWAJ(KWAJFile? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
public LZKWAJ(KWAJFile model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public LZKWAJ(KWAJFile? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
public LZKWAJ(KWAJFile model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public LZKWAJ(KWAJFile model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public LZKWAJ(KWAJFile model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public LZKWAJ(KWAJFile model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public LZKWAJ(KWAJFile model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create an LZ (KWAJ variant) from a byte array and offset
@@ -85,12 +89,11 @@ namespace SabreTools.Serialization.Wrappers
// Cache the current offset
long currentOffset = data.Position;
var model = Deserializers.LZKWAJ.DeserializeStream(data);
var model = new Deserializers.LZKWAJ().Deserialize(data);
if (model == null)
return null;
data.Seek(currentOffset, SeekOrigin.Begin);
return new LZKWAJ(model, data);
return new LZKWAJ(model, data, currentOffset);
}
catch
{
@@ -99,65 +102,5 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
#region Extraction
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
// Get the length of the compressed data
long compressedSize = Length - DataOffset;
if (compressedSize < DataOffset)
return false;
// Read in the data as an array
byte[]? contents = _dataSource.ReadFrom(DataOffset, (int)compressedSize, retainPosition: true);
if (contents == null)
return false;
// Get the decompressor
var decompressor = Decompressor.CreateKWAJ(contents, CompressionType);
if (decompressor == null)
return false;
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
string filename = FileName ?? "tempfile";
if (FileExtension != null)
filename += $".{FileExtension}";
// Ensure directory separators are consistent
if (Path.DirectorySeparatorChar == '\\')
filename = filename.Replace('/', '\\');
else if (Path.DirectorySeparatorChar == '/')
filename = filename.Replace('\\', '/');
// Ensure the full output directory exists
filename = Path.Combine(outputDirectory, filename);
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null && !Directory.Exists(directoryName))
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = File.OpenWrite(filename);
decompressor.CopyTo(fs);
fs.Flush();
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return false;
}
return true;
}
#endregion
}
}

Some files were not shown because too many files have changed in this diff Show More