Compare commits

...

61 Commits
1.4.3 ... 1.5.4

Author SHA1 Message Date
Matt Nadareski
3bf78c78e3 Bump version 2024-04-23 21:53:36 -04:00
Matt Nadareski
e38ecaec4c Fix build 2024-04-23 21:49:57 -04:00
Matt Nadareski
af40c78b56 Handle more directly-marshalled types 2024-04-23 21:45:54 -04:00
Matt Nadareski
12b206f9fa Update SabreTools.Models 2024-04-23 21:07:42 -04:00
Matt Nadareski
2cc51ba089 Fix build, oops 2024-04-23 21:05:50 -04:00
Matt Nadareski
52f0846d5d Add and fix some deserializers 2024-04-23 21:00:10 -04:00
Matt Nadareski
3fa8848e77 Make more parsing methods public 2024-04-23 15:38:33 -04:00
Matt Nadareski
41276e3d7e Port more helpers 2024-04-23 15:38:21 -04:00
Matt Nadareski
4cef93c95e Port some accessors from UnshieldSharp 2024-04-23 15:27:07 -04:00
Matt Nadareski
cdd999ee03 Fix other instances of string reading 2024-04-23 15:18:18 -04:00
Matt Nadareski
4f253323db Fix IS-CAB deserialization 2024-04-23 15:14:18 -04:00
Matt Nadareski
351f749e20 Add ISAv3 deserializer 2024-04-23 15:12:42 -04:00
Matt Nadareski
1e83fc4b9a Update packages 2024-04-23 15:12:39 -04:00
Matt Nadareski
c532bd1063 Bump version 2024-04-18 12:16:02 -04:00
Matt Nadareski
e4631a8176 Update SabreTools.IO 2024-04-18 12:04:54 -04:00
Matt Nadareski
ee8dad0c87 Bump version 2024-04-17 13:00:07 -04:00
Matt Nadareski
4163b2f22a Create non-typed variants of IWrapper and WrapperBase 2024-04-17 12:28:08 -04:00
Matt Nadareski
6aaf3afa38 Fix namespace issues 2024-04-17 11:52:22 -04:00
Matt Nadareski
2a7eb44281 Bump version 2024-04-17 11:45:26 -04:00
Matt Nadareski
08bbc93793 Update SabreTools.IO 2024-04-17 11:44:58 -04:00
Matt Nadareski
789478df13 Update SabreTools.IO 2024-04-16 13:16:59 -04:00
Matt Nadareski
cd4f1c9d97 Merge pull request #7 from SabreTools/deserializer-base
Deserializer base
2024-04-04 14:17:57 -04:00
Matt Nadareski
1a2e9fb942 Add PlayJ playlist wrapper 2024-04-04 14:15:27 -04:00
Matt Nadareski
d0865739de Add PIC wrapper 2024-04-04 14:13:03 -04:00
Matt Nadareski
0696bbab72 Add MoPaQ wrapper 2024-04-04 14:06:59 -04:00
Matt Nadareski
4b4c17ac24 Make model type inherent to interface 2024-04-04 14:03:02 -04:00
Matt Nadareski
d768172da1 Update README 2024-04-04 12:03:19 -04:00
Matt Nadareski
2a4d24309d Create base class for serializers 2024-04-04 12:03:19 -04:00
Matt Nadareski
bc01ce4552 Enforce IStreamDeserializer in base class 2024-04-04 12:03:19 -04:00
Matt Nadareski
ef0efe66bd Migrate IFileDeserializer implementation to base class 2024-04-04 12:03:19 -04:00
Matt Nadareski
7c21f65723 Migrate IByteSerializer implementation to base class 2024-04-04 12:03:19 -04:00
Matt Nadareski
cec53e907f Move static deserializers to base class 2024-04-04 12:03:19 -04:00
Matt Nadareski
006ced0430 Bump version 2024-04-04 12:00:53 -04:00
Matt Nadareski
bee6c0ba11 Add byte deserializers for remaining stream deserializers 2024-04-03 23:25:54 -04:00
Matt Nadareski
3cb880ff3f Update SabreTools.Models 2024-04-03 22:54:30 -04:00
Matt Nadareski
f1d54e4a14 Add XML byte deserialization 2024-04-03 22:41:49 -04:00
Matt Nadareski
f4aaed7f9c Move overlay extension method 2024-04-03 22:31:24 -04:00
Matt Nadareski
46ad76c5d2 Add flag for long/short SeparatedValue writing 2024-04-03 22:26:23 -04:00
Matt Nadareski
8f731cebc8 Add array constants for AttractMode 2024-04-03 22:21:39 -04:00
Matt Nadareski
6dbf9dacd6 Add flag for long/short AttractMode writing 2024-04-03 22:18:05 -04:00
Matt Nadareski
94ab760c67 Remove unnecessary code 2024-04-03 21:57:14 -04:00
Matt Nadareski
4ed3880bad Use SabreTools.Hashing for hash types 2024-04-03 21:56:21 -04:00
Matt Nadareski
ff8dcd30a5 Move some constants to the deserializer 2024-04-03 21:46:35 -04:00
Matt Nadareski
8ced91d0fa Move some constants to the deserializer 2024-04-03 21:44:02 -04:00
Matt Nadareski
f2c6fa2b8e Convert string serialization to new framework 2024-04-03 21:42:08 -04:00
Matt Nadareski
9b1bacd167 Move serializers to new organization 2024-04-03 21:27:50 -04:00
Matt Nadareski
964c97200c Move stream deserializers to new organization 2024-04-03 20:55:02 -04:00
Matt Nadareski
a3f3384ac9 Move file deserializers to new organization 2024-04-03 17:27:08 -04:00
Matt Nadareski
8b546fbf27 Start splitting serialziation differently for clarity 2024-04-03 16:41:54 -04:00
Matt Nadareski
15da711087 Rename IByteSerializer to IByteDeserializer 2024-04-03 16:37:26 -04:00
Matt Nadareski
bfee7dd449 Use better naming 2024-04-03 16:35:54 -04:00
Matt Nadareski
9de2a91e80 Fix inheritdoc 2024-04-03 16:15:48 -04:00
Matt Nadareski
bf50c801b2 Simplify some static deserializers 2024-04-03 16:00:18 -04:00
Matt Nadareski
c19a4a94f4 Use new static stream serializer 2024-04-03 15:50:06 -04:00
Matt Nadareski
b6fe94116b Add static serializers for IStreamSerializer 2024-04-03 15:43:36 -04:00
Matt Nadareski
bcf604c773 Use new static stream deserializer 2024-04-03 15:23:49 -04:00
Matt Nadareski
74984a9114 Add static deserializers for IStreamSerializer 2024-04-03 15:22:35 -04:00
Matt Nadareski
8c1e241286 Add static serializers for IFileSerializer 2024-04-03 14:58:06 -04:00
Matt Nadareski
f666d737cb Add static deserializers for IFileSerializer 2024-04-03 14:28:47 -04:00
Matt Nadareski
a01609f1d1 Add static serializers for IByteSerializer 2024-04-03 14:06:23 -04:00
Matt Nadareski
8ca9ccaf00 Handle invalid texture counts 2024-04-02 16:55:43 -04:00
306 changed files with 4563 additions and 6555 deletions

View File

@@ -4,26 +4,30 @@ This library comprises of serializers that both read and write from files and st
Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTools.Serialization).
## `SabreTools.Serialization.Bytes`
## Interfaces
This namespace comprises of deserializers that take byte arrays to convert into models.
Below is a table representing the various interfaces that are implemented within this library.
## `SabreTools.Serialization.CrossModel`
| Interface Name | Source Type | Destination Type |
| --- | --- | --- |
| `IByteDeserializer` | `byte[]?` | Model |
| `IByteSerializer` | Model | `byte[]?` |
| `IFileDeserializer` | `string?` path | Model |
| `IFileSerializer` | Model | `string?` path |
| `IModelSerializer` | Model | Model |
| `IStreamDeserializer` | `Stream?` | Model |
| `IStreamSerializer` | Model | `Stream?` |
| `IStringDeserializer` | `string?` representation | Model |
| `IStringSerializer` | Model | `string?` representation |
| `IWrapper` | N/A | N/A |
This namespace comprises of serializers and deserializers that convert models to other common ones. This is mainly used for metadata files converting to and from a common, `Dictionary`-based model.
## Namespaces
## `SabreTools.Serialization.Files`
Below is a table of all namespaces within the library and what they represent
This namespace comprises of serializers and deserializers that can convert to and from files on disk. Most of the serializers are symmetric, but this is not guaranteed. Unimplemented methods will throw `NotImplementedException`.
## `SabreTools.Serialization.Streams`
This namespace comprises of serializers and deserializers that can convert to and from any type of stream. Most of the serializers are symmetric, but this is not guaranteed. Unimplemented methods will throw `NotImplementedException`.
## `SabreTools.Serialization.Strings`
This namespace comprises of serializers and deserializers that can convert to and from strings. Most of the serializers are symmetric, but this is not guaranteed. Unimplemented methods will throw `NotImplementedException`.
## `SabreTools.Serialization.Wrappers`
This namespace comrpises of wrapping classes that include keeping a reference to the source of each serializable model. Some of the wrappers may also include what are referred to as "extension properties", which are generated properties derived from either parts of the model or the underlying source.
| Namespace | Description |
| --- | --- |
| `SabreTools.Serialization.CrossModel` | Convert between models; mainly used for metadata files converting to and from a common, `Dictionary`-based model |
| `SabreTools.Serialization.Deserializers` | Convert from external sources to models |
| `SabreTools.Serialization.Serializers` | Convert from models to external sources |
| `SabreTools.Serialization.Wrappers` | Classes that wrap serialization and models to allow for including extension properties |

View File

@@ -1,14 +0,0 @@
namespace SabreTools.Serialization
{
/// <summary>
/// Separated value serializer/deserializer for AttractMode romlists
/// </summary>
public static class AttractMode
{
public const string HeaderWithoutRomname = "#Name;Title;Emulator;CloneOf;Year;Manufacturer;Category;Players;Rotation;Control;Status;DisplayCount;DisplayType;AltRomname;AltTitle;Extra;Buttons";
public const int HeaderWithoutRomnameCount = 17;
public const string HeaderWithRomname = "#Romname;Title;Emulator;Cloneof;Year;Manufacturer;Category;Players;Rotation;Control;Status;DisplayCount;DisplayType;AltRomname;AltTitle;Extra;Buttons;Favourite;Tags;PlayedCount;PlayedTime;FileIsAvailable";
public const int HeaderWithRomnameCount = 22;
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.AACS;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class AACS : IByteSerializer<MediaKeyBlock>
{
/// <inheritdoc/>
public MediaKeyBlock? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.AACS().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.BDPlus;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class BDPlus : IByteSerializer<SVM>
{
/// <inheritdoc/>
public SVM? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.BDPlus().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.BFPK;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class BFPK : IByteSerializer<Archive>
{
/// <inheritdoc/>
public Archive? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.BFPK().Deserialize(dataStream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class BSP : IByteSerializer<Models.BSP.File>
{
/// <inheritdoc/>
public Models.BSP.File? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.BSP().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.CFB;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class CFB : IByteSerializer<Binary>
{
/// <inheritdoc/>
public Binary? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.CFB().Deserialize(dataStream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class CIA : IByteSerializer<Models.N3DS.CIA>
{
/// <inheritdoc/>
public Models.N3DS.CIA? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.CIA().Deserialize(dataStream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class GCF : IByteSerializer<Models.GCF.File>
{
/// <inheritdoc/>
public Models.GCF.File? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.GCF().Deserialize(dataStream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class IRD : IByteSerializer<Models.IRD.File>
{
/// <inheritdoc/>
public Models.IRD.File? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.IRD().Deserialize(dataStream);
}
}
}

View File

@@ -1,26 +0,0 @@
using System.IO;
using SabreTools.Models.InstallShieldCabinet;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
// TODO: Add multi-cabinet reading
public partial class InstallShieldCabinet : IByteSerializer<Cabinet>
{
/// <inheritdoc/>
public Cabinet? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.InstallShieldCabinet().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.LinearExecutable;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class LinearExecutable : IByteSerializer<Executable>
{
/// <inheritdoc/>
public Executable? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.LinearExecutable().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.MSDOS;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class MSDOS : IByteSerializer<Executable>
{
/// <inheritdoc/>
public Executable? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.MSDOS().Deserialize(dataStream);
}
}
}

View File

@@ -1,26 +0,0 @@
using System.IO;
using SabreTools.Models.MicrosoftCabinet;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
// TODO: Add multi-cabinet reading
public partial class MicrosoftCabinet : IByteSerializer<Cabinet>
{
/// <inheritdoc/>
public Cabinet? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.MicrosoftCabinet().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.MoPaQ;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class MoPaQ : IByteSerializer<Archive>
{
/// <inheritdoc/>
public Archive? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.MoPaQ().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.N3DS;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class N3DS : IByteSerializer<Cart>
{
/// <inheritdoc/>
public Cart? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.N3DS().Deserialize(dataStream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class NCF : IByteSerializer<Models.NCF.File>
{
/// <inheritdoc/>
public Models.NCF.File? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.NCF().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.NewExecutable;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class NewExecutable : IByteSerializer<Executable>
{
/// <inheritdoc/>
public Executable? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.NewExecutable().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.Nitro;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class Nitro : IByteSerializer<Cart>
{
/// <inheritdoc/>
public Cart? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.Nitro().Deserialize(dataStream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class PAK : IByteSerializer<Models.PAK.File>
{
/// <inheritdoc/>
public Models.PAK.File? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.PAK().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.PFF;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class PFF : IByteSerializer<Archive>
{
/// <inheritdoc/>
public Archive? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.PFF().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.PlayJ;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class PlayJAudio : IByteSerializer<AudioFile>
{
/// <inheritdoc/>
public AudioFile? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.PlayJAudio().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.PlayJ;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class PlayJPlaylist : IByteSerializer<Playlist>
{
/// <inheritdoc/>
public Playlist? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.PlayJPlaylist().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.PortableExecutable;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class PortableExecutable : IByteSerializer<Executable>
{
/// <inheritdoc/>
public Executable? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.PortableExecutable().Deserialize(dataStream);
}
}
}

View File

@@ -1,25 +0,0 @@
using System.IO;
using SabreTools.Models.Quantum;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class Quantum : IByteSerializer<Archive>
{
/// <inheritdoc/>
public Archive? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.Quantum().Deserialize(dataStream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class SGA : IByteSerializer<Models.SGA.File>
{
/// <inheritdoc/>
public Models.SGA.File? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.SGA().Deserialize(dataStream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class VBSP : IByteSerializer<Models.VBSP.File>
{
/// <inheritdoc/>
public Models.VBSP.File? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.VBSP().Deserialize(dataStream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class VPK : IByteSerializer<Models.VPK.File>
{
/// <inheritdoc/>
public Models.VPK.File? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.VPK().Deserialize(dataStream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class WAD : IByteSerializer<Models.WAD.File>
{
/// <inheritdoc/>
public Models.WAD.File? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.WAD().Deserialize(dataStream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Bytes
{
public partial class XZP : IByteSerializer<Models.XZP.File>
{
/// <inheritdoc/>
public Models.XZP.File? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return new Streams.XZP().Deserialize(dataStream);
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.Linq;
using SabreTools.Hashing;
using SabreTools.Models.Hashfile;
using SabreTools.Serialization.Interfaces;
@@ -8,10 +9,10 @@ namespace SabreTools.Serialization.CrossModel
public partial class Hashfile : IModelSerializer<Models.Hashfile.Hashfile, Models.Metadata.MetadataFile>
{
/// <inheritdoc/>
public Models.Hashfile.Hashfile? Deserialize(Models.Metadata.MetadataFile? obj) => Deserialize(obj, Hash.CRC);
public Models.Hashfile.Hashfile? Deserialize(Models.Metadata.MetadataFile? obj) => Deserialize(obj, HashType.CRC32);
/// <inheritdoc/>
public Models.Hashfile.Hashfile? Deserialize(Models.Metadata.MetadataFile? obj, Hash hash)
public Models.Hashfile.Hashfile? Deserialize(Models.Metadata.MetadataFile? obj, HashType hash)
{
if (obj == null)
return null;
@@ -73,7 +74,7 @@ namespace SabreTools.Serialization.CrossModel
/// <summary>
/// Convert from <cref="Models.Metadata.MetadataFile"/> to an array of <cref="Models.Hashfile.Hashfile"/>
/// </summary>
public static Models.Hashfile.Hashfile[]? ConvertArrayFromInternalModel(Models.Metadata.MetadataFile? item, Hash hash)
public static Models.Hashfile.Hashfile[]? ConvertArrayFromInternalModel(Models.Metadata.MetadataFile? item, HashType hash)
{
if (item == null)
return null;
@@ -93,7 +94,7 @@ namespace SabreTools.Serialization.CrossModel
/// <summary>
/// Convert from <cref="Models.Metadata.Machine"/> to <cref="Models.Hashfile.Hashfile"/>
/// </summary>
private static Models.Hashfile.Hashfile ConvertMachineFromInternalModel(Models.Metadata.Machine item, Hash hash)
private static Models.Hashfile.Hashfile ConvertMachineFromInternalModel(Models.Metadata.Machine item, HashType hash)
{
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
if (roms == null)
@@ -101,43 +102,43 @@ namespace SabreTools.Serialization.CrossModel
return new Models.Hashfile.Hashfile
{
SFV = hash == Hash.CRC
SFV = hash == HashType.CRC32 || hash == HashType.CRC32_ISO || hash == HashType.CRC32_Naive || hash == HashType.CRC32_Optimized || hash == HashType.CRC32_Parallel
? roms
.Where(r => r != null)
.Select(ConvertToSFV)
.ToArray()
: null,
MD5 = hash == Hash.MD5
MD5 = hash == HashType.MD5
? roms
.Where(r => r != null)
.Select(ConvertToMD5)
.ToArray()
: null,
SHA1 = hash == Hash.SHA1
SHA1 = hash == HashType.SHA1
? roms
.Where(r => r != null)
.Select(ConvertToSHA1)
.ToArray()
: null,
SHA256 = hash == Hash.SHA256
SHA256 = hash == HashType.SHA256
? roms
.Where(r => r != null)
.Select(ConvertToSHA256)
.ToArray()
: null,
SHA384 = hash == Hash.SHA384
SHA384 = hash == HashType.SHA384
? roms
.Where(r => r != null)
.Select(ConvertToSHA384)
.ToArray()
: null,
SHA512 = hash == Hash.SHA512
SHA512 = hash == HashType.SHA512
? roms
.Where(r => r != null)
.Select(ConvertToSHA512)
.ToArray()
: null,
SpamSum = hash == Hash.SpamSum
SpamSum = hash == HashType.SpamSum
? roms
.Where(r => r != null)
.Select(ConvertToSpamSum)

View File

@@ -2,16 +2,15 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.AACS;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class AACS : IStreamSerializer<MediaKeyBlock>
public class AACS : BaseBinaryDeserializer<MediaKeyBlock>
{
/// <inheritdoc/>
public MediaKeyBlock? Deserialize(Stream? data)
public override MediaKeyBlock? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)

View File

@@ -0,0 +1,7 @@
namespace SabreTools.Serialization.Deserializers
{
public class ArchiveDotOrg : XmlFile<Models.ArchiveDotOrg.Files>
{
// All logic taken care of in the base class
}
}

View File

@@ -4,14 +4,23 @@ using System.Linq;
using System.Text;
using SabreTools.IO.Readers;
using SabreTools.Models.AttractMode;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class AttractMode : IStreamSerializer<MetadataFile>
public class AttractMode : BaseBinaryDeserializer<MetadataFile>
{
#region Constants
public const int HeaderWithoutRomnameCount = 17;
public const int HeaderWithRomnameCount = 22;
#endregion
#region IStreamDeserializer
/// <inheritdoc/>
public MetadataFile? Deserialize(Stream? data)
public override MetadataFile? Deserialize(Stream? data)
{
// If the stream is null
if (data == null)
@@ -41,7 +50,7 @@ namespace SabreTools.Serialization.Streams
// Parse the line into a row
Row row;
if (reader.Line.Count < Serialization.AttractMode.HeaderWithRomnameCount)
if (reader.Line.Count < HeaderWithRomnameCount)
{
row = new Row
{
@@ -65,8 +74,8 @@ namespace SabreTools.Serialization.Streams
};
// If we have additional fields
if (reader.Line.Count > Serialization.AttractMode.HeaderWithoutRomnameCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(Serialization.AttractMode.HeaderWithoutRomnameCount).ToArray();
if (reader.Line.Count > HeaderWithoutRomnameCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithoutRomnameCount).ToArray();
}
else
{
@@ -92,8 +101,8 @@ namespace SabreTools.Serialization.Streams
};
// If we have additional fields
if (reader.Line.Count > Serialization.AttractMode.HeaderWithRomnameCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(Serialization.AttractMode.HeaderWithRomnameCount).ToArray();
if (reader.Line.Count > HeaderWithRomnameCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithRomnameCount).ToArray();
}
rows.Add(row);
@@ -103,5 +112,7 @@ namespace SabreTools.Serialization.Streams
dat.Row = rows.ToArray();
return dat;
}
#endregion
}
}

View File

@@ -1,16 +1,15 @@
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.BDPlus;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.BDPlus.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class BDPlus : IStreamSerializer<SVM>
public class BDPlus : BaseBinaryDeserializer<SVM>
{
/// <inheritdoc/>
public SVM? Deserialize(Stream? data)
public override SVM? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)

View File

@@ -1,16 +1,15 @@
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.BFPK;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.BFPK.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class BFPK : IStreamSerializer<Archive>
public class BFPK : BaseBinaryDeserializer<Archive>
{
/// <inheritdoc/>
public Archive? Deserialize(Stream? data)
public override Archive? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -71,20 +70,13 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
byte[]? magic = data.ReadBytes(4);
if (magic == null)
if (header == null)
return null;
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.Version = data.ReadInt32();
header.Files = data.ReadInt32();
return header;
}
@@ -93,10 +85,10 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file entry on success, null on error</returns>
private static FileEntry ParseFileEntry(Stream data)
private static FileEntry? ParseFileEntry(Stream data)
{
// TODO: Use marshalling here instead of building
FileEntry fileEntry = new FileEntry();
var fileEntry = new FileEntry();
fileEntry.NameSize = data.ReadInt32();
if (fileEntry.NameSize > 0)

View File

@@ -1,17 +1,16 @@
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.BSP;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.BSP.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class BSP : IStreamSerializer<Models.BSP.File>
public class BSP : BaseBinaryDeserializer<Models.BSP.File>
{
/// <inheritdoc/>
public Models.BSP.File? Deserialize(Stream? data)
public override Models.BSP.File? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -48,6 +47,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < HL_BSP_LUMP_COUNT; i++)
{
var lump = ParseLump(data);
if (lump == null)
return null;
file.Lumps[i] = lump;
}
@@ -103,13 +105,13 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level header on success, null on error</returns>
/// <remarks>Only recognized versions are 29 and 30</remarks>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
// Only recognized versions are 29 and 30
header.Version = data.ReadUInt32();
if (header == null)
return null;
if (header.Version != 29 && header.Version != 30)
return null;
@@ -121,15 +123,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled lump on success, null on error</returns>
private static Lump ParseLump(Stream data)
private static Lump? ParseLump(Stream data)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.Length = data.ReadUInt32();
return lump;
return data.ReadType<Lump>();
}
/// <summary>
@@ -140,7 +136,7 @@ namespace SabreTools.Serialization.Streams
private static TextureHeader ParseTextureHeader(Stream data)
{
// TODO: Use marshalling here instead of building
TextureHeader textureHeader = new TextureHeader();
var textureHeader = new TextureHeader();
textureHeader.TextureCount = data.ReadUInt32();
@@ -149,6 +145,8 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < textureHeader.TextureCount; i++)
{
offsets[i] = data.ReadUInt32();
if (data.Position >= data.Length)
break;
}
textureHeader.Offsets = offsets;
@@ -165,7 +163,7 @@ namespace SabreTools.Serialization.Streams
private static Texture ParseTexture(Stream data, uint mipmap = 0)
{
// TODO: Use marshalling here instead of building
Texture texture = new Texture();
var texture = new Texture();
byte[]? name = data.ReadBytes(16)?.TakeWhile(c => c != '\0')?.ToArray();
if (name != null)

View File

@@ -0,0 +1,134 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Deserializers
{
/// <summary>
/// Base class for all binary deserializers
/// </summary>
/// <typeparam name="TModel">Type of the model to deserialize</typeparam>
/// <remarks>These methods assume there is a concrete implementation of the deserialzier for the model available</remarks>
public abstract class BaseBinaryDeserializer<TModel> :
IByteDeserializer<TModel>,
IFileDeserializer<TModel>,
IStreamDeserializer<TModel>
{
#region IByteDeserializer
/// <inheritdoc/>
public virtual TModel? Deserialize(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return default;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return default;
// Create a memory stream and parse that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return DeserializeStream(dataStream);
}
#endregion
#region IFileDeserializer
/// <inheritdoc/>
public virtual TModel? Deserialize(string? path)
{
using var stream = PathProcessor.OpenStream(path);
return DeserializeStream(stream);
}
#endregion
#region IStreamDeserializer
/// <inheritdoc/>
public abstract TModel? Deserialize(Stream? data);
#endregion
#region Static Implementations
/// <inheritdoc cref="IByteDeserializer.Deserialize(byte[]?, int)"/>
public static TModel? DeserializeBytes(byte[]? data, int offset)
{
var deserializer = GetType<IByteDeserializer<TModel>>();
if (deserializer == null)
return default;
return deserializer.Deserialize(data, offset);
}
/// <inheritdoc cref="IFileDeserializer.Deserialize(string?)"/>
public static TModel? DeserializeFile(string? path)
{
var deserializer = GetType<IFileDeserializer<TModel>>();
if (deserializer == null)
return default;
return deserializer.Deserialize(path);
}
/// <inheritdoc cref="IStreamDeserializer.Deserialize(Stream?)"/>
public static TModel? DeserializeStream(Stream? data)
{
var deserializer = GetType<IStreamDeserializer<TModel>>();
if (deserializer == null)
return default;
return deserializer.Deserialize(data);
}
#endregion
#region Helpers
/// <summary>
/// Get a constructed instance of a type, if possible
/// </summary>
/// <typeparam name="TDeserializer">Deserializer type to construct</typeparam>
/// <returns>Deserializer of the requested type, null on error</returns>
private static TDeserializer? GetType<TDeserializer>()
{
var assembly = Assembly.GetExecutingAssembly();
if (assembly == null)
return default;
// If not all types can be loaded, use the ones that could be
List<Type> assemblyTypes = [];
try
{
assemblyTypes = assembly.GetTypes().ToList<Type>();
}
catch (ReflectionTypeLoadException rtle)
{
assemblyTypes = rtle.Types.Where(t => t != null)!.ToList<Type>();
}
// Loop through all types
foreach (Type type in assemblyTypes)
{
// If the type isn't a class or doesn't implement the interface
if (!type.IsClass || type.GetInterface(typeof(TDeserializer).Name) == null)
continue;
// Try to create a concrete instance of the type
var instance = (TDeserializer?)Activator.CreateInstance(type);
if (instance != null)
return instance;
}
return default;
}
#endregion
}
}

View File

@@ -2,17 +2,16 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.CFB;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.CFB.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class CFB : IStreamSerializer<Binary>
public class CFB : BaseBinaryDeserializer<Binary>
{
/// <inheritdoc/>
public Binary? Deserialize(Stream? data)
public override Binary? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -43,7 +42,7 @@ namespace SabreTools.Serialization.Streams
#region DIFAT Sector Numbers
// Create a DIFAT sector table
var difatSectors = new List<SectorNumber?>();
var difatSectors = new List<SectorNumber>();
// Add the sectors from the header
if (fileHeader.DIFAT != null)
@@ -85,7 +84,7 @@ namespace SabreTools.Serialization.Streams
#region FAT Sector Numbers
// Create a FAT sector table
var fatSectors = new List<SectorNumber?>();
var fatSectors = new List<SectorNumber>();
// Loop through and add the FAT sectors
currentSector = binary.DIFATSectorNumbers[0];
@@ -123,7 +122,7 @@ namespace SabreTools.Serialization.Streams
#region Mini FAT Sector Numbers
// Create a mini FAT sector table
var miniFatSectors = new List<SectorNumber?>();
var miniFatSectors = new List<SectorNumber>();
// Loop through and add the mini FAT sectors
currentSector = (SectorNumber)fileHeader.FirstMiniFATSectorLocation;
@@ -234,49 +233,23 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled file header on success, null on error</returns>
private static FileHeader? ParseFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
FileHeader header = new FileHeader();
var header = data.ReadType<FileHeader>();
header.Signature = data.ReadUInt64();
if (header == null)
return null;
if (header.Signature != SignatureUInt64)
return null;
header.CLSID = data.ReadGuid();
header.MinorVersion = data.ReadUInt16();
header.MajorVersion = data.ReadUInt16();
header.ByteOrder = data.ReadUInt16();
if (header.ByteOrder != 0xFFFE)
return null;
header.SectorShift = data.ReadUInt16();
if (header.MajorVersion == 3 && header.SectorShift != 0x0009)
return null;
else if (header.MajorVersion == 4 && header.SectorShift != 0x000C)
return null;
header.MiniSectorShift = data.ReadUInt16();
header.Reserved = data.ReadBytes(6);
header.NumberOfDirectorySectors = data.ReadUInt32();
if (header.MajorVersion == 3 && header.NumberOfDirectorySectors != 0)
return null;
header.NumberOfFATSectors = data.ReadUInt32();
header.FirstDirectorySectorLocation = data.ReadUInt32();
header.TransactionSignatureNumber = data.ReadUInt32();
header.MiniStreamCutoffSize = data.ReadUInt32();
if (header.MiniStreamCutoffSize != 0x00001000)
return null;
header.FirstMiniFATSectorLocation = data.ReadUInt32();
header.NumberOfMiniFATSectors = data.ReadUInt32();
header.FirstDIFATSectorLocation = data.ReadUInt32();
header.NumberOfDIFATSectors = data.ReadUInt32();
header.DIFAT = new SectorNumber?[109];
for (int i = 0; i < header.DIFAT.Length; i++)
{
header.DIFAT[i] = (SectorNumber)data.ReadUInt32();
}
// Skip rest of sector for version 4
if (header.MajorVersion == 4)
_ = data.ReadBytes(3584);
@@ -290,11 +263,11 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="sectorShift">Sector shift from the header</param>
/// <returns>Filled sector full of sector numbers on success, null on error</returns>
private static SectorNumber?[] ParseSectorNumbers(Stream data, ushort sectorShift)
private static SectorNumber[] ParseSectorNumbers(Stream data, ushort sectorShift)
{
// TODO: Use marshalling here instead of building
int sectorCount = (int)(Math.Pow(2, sectorShift) / sizeof(uint));
var sectorNumbers = new SectorNumber?[sectorCount];
var sectorNumbers = new SectorNumber[sectorCount];
for (int i = 0; i < sectorNumbers.Length; i++)
{
@@ -316,7 +289,7 @@ namespace SabreTools.Serialization.Streams
// TODO: Use marshalling here instead of building
const int directoryEntrySize = 64 + 2 + 1 + 1 + 4 + 4 + 4 + 16 + 4 + 8 + 8 + 4 + 8;
int sectorCount = (int)(Math.Pow(2, sectorShift) / directoryEntrySize);
DirectoryEntry[] directoryEntries = new DirectoryEntry[sectorCount];
var directoryEntries = new DirectoryEntry[sectorCount];
for (int i = 0; i < directoryEntries.Length; i++)
{
@@ -336,26 +309,17 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version from the header</param>
/// <returns>Filled directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data, ushort majorVersion)
private static DirectoryEntry? ParseDirectoryEntry(Stream data, ushort majorVersion)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
var directoryEntry = data.ReadType<DirectoryEntry>();
byte[]? name = data.ReadBytes(64);
if (name != null)
directoryEntry.Name = Encoding.Unicode.GetString(name).TrimEnd('\0');
directoryEntry.NameLength = data.ReadUInt16();
directoryEntry.ObjectType = (ObjectType)data.ReadByteValue();
directoryEntry.ColorFlag = (ColorFlag)data.ReadByteValue();
directoryEntry.LeftSiblingID = (StreamID)data.ReadUInt32();
directoryEntry.RightSiblingID = (StreamID)data.ReadUInt32();
directoryEntry.ChildID = (StreamID)data.ReadUInt32();
directoryEntry.CLSID = data.ReadGuid();
directoryEntry.StateBits = data.ReadUInt32();
directoryEntry.CreationTime = data.ReadUInt64();
directoryEntry.ModifiedTime = data.ReadUInt64();
directoryEntry.StartingSectorLocation = data.ReadUInt32();
directoryEntry.StreamSize = data.ReadUInt64();
if (directoryEntry == null)
return null;
// TEMPORARY FIX FOR ASCII -> UNICODE
directoryEntry.Name = Encoding.Unicode.GetString(Encoding.ASCII.GetBytes(directoryEntry.Name!));
// Handle version 3 entries
if (majorVersion == 3)
directoryEntry.StreamSize &= 0x0000FFFF;

View File

@@ -1,16 +1,15 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.N3DS;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class CIA : IStreamSerializer<Models.N3DS.CIA>
public class CIA : BaseBinaryDeserializer<Models.N3DS.CIA>
{
/// <inheritdoc/>
public Models.N3DS.CIA? Deserialize(Stream? data)
public override Models.N3DS.CIA? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -146,22 +145,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled CIA header on success, null on error</returns>
private static CIAHeader ParseCIAHeader(Stream data)
public static CIAHeader? ParseCIAHeader(Stream data)
{
// TODO: Use marshalling here instead of building
CIAHeader ciaHeader = new CIAHeader();
ciaHeader.HeaderSize = data.ReadUInt32();
ciaHeader.Type = data.ReadUInt16();
ciaHeader.Version = data.ReadUInt16();
ciaHeader.CertificateChainSize = data.ReadUInt32();
ciaHeader.TicketSize = data.ReadUInt32();
ciaHeader.TMDFileSize = data.ReadUInt32();
ciaHeader.MetaSize = data.ReadUInt32();
ciaHeader.ContentSize = data.ReadUInt64();
ciaHeader.ContentIndex = data.ReadBytes(0x2000);
return ciaHeader;
return data.ReadType<CIAHeader>();
}
/// <summary>
@@ -169,7 +155,7 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled certificate on success, null on error</returns>
private static Certificate? ParseCertificate(Stream data)
public static Certificate? ParseCertificate(Stream data)
{
// TODO: Use marshalling here instead of building
Certificate certificate = new Certificate();
@@ -245,7 +231,7 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="fromCdn">Indicates if the ticket is from CDN</param>
/// <returns>Filled ticket on success, null on error</returns>
private static Ticket? ParseTicket(Stream data, bool fromCdn = false)
public static Ticket? ParseTicket(Stream data, bool fromCdn = false)
{
// TODO: Use marshalling here instead of building
Ticket ticket = new Ticket();
@@ -350,10 +336,10 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="fromCdn">Indicates if the ticket is from CDN</param>
/// <returns>Filled title metadata on success, null on error</returns>
private static TitleMetadata? ParseTitleMetadata(Stream data, bool fromCdn = false)
public static TitleMetadata? ParseTitleMetadata(Stream data, bool fromCdn = false)
{
// TODO: Use marshalling here instead of building
TitleMetadata titleMetadata = new TitleMetadata();
var titleMetadata = new TitleMetadata();
titleMetadata.SignatureType = (SignatureType)data.ReadUInt32();
switch (titleMetadata.SignatureType)
@@ -421,11 +407,19 @@ namespace SabreTools.Serialization.Streams
titleMetadata.ContentInfoRecords = new ContentInfoRecord[64];
for (int i = 0; i < 64; i++)
{
var contentInfoRecord = ParseContentInfoRecord(data);
if (contentInfoRecord == null)
return null;
titleMetadata.ContentInfoRecords[i] = ParseContentInfoRecord(data);
}
titleMetadata.ContentChunkRecords = new ContentChunkRecord[titleMetadata.ContentCount];
for (int i = 0; i < titleMetadata.ContentCount; i++)
{
var contentChunkRecord = ParseContentChunkRecord(data);
if (contentChunkRecord == null)
return null;
titleMetadata.ContentChunkRecords[i] = ParseContentChunkRecord(data);
}
@@ -451,16 +445,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled content info record on success, null on error</returns>
private static ContentInfoRecord ParseContentInfoRecord(Stream data)
public static ContentInfoRecord? ParseContentInfoRecord(Stream data)
{
// TODO: Use marshalling here instead of building
ContentInfoRecord contentInfoRecord = new ContentInfoRecord();
contentInfoRecord.ContentIndexOffset = data.ReadUInt16();
contentInfoRecord.ContentCommandCount = data.ReadUInt16();
contentInfoRecord.UnhashedContentRecordsSHA256Hash = data.ReadBytes(0x20);
return contentInfoRecord;
return data.ReadType<ContentInfoRecord>();
}
/// <summary>
@@ -468,18 +455,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled content chunk record on success, null on error</returns>
private static ContentChunkRecord ParseContentChunkRecord(Stream data)
public static ContentChunkRecord? ParseContentChunkRecord(Stream data)
{
// TODO: Use marshalling here instead of building
ContentChunkRecord contentChunkRecord = new ContentChunkRecord();
contentChunkRecord.ContentId = data.ReadUInt32();
contentChunkRecord.ContentIndex = (ContentIndex)data.ReadUInt16();
contentChunkRecord.ContentType = (TMDContentType)data.ReadUInt16();
contentChunkRecord.ContentSize = data.ReadUInt64();
contentChunkRecord.SHA256Hash = data.ReadBytes(0x20);
return contentChunkRecord;
return data.ReadType<ContentChunkRecord>();
}
/// <summary>
@@ -487,18 +465,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled meta data on success, null on error</returns>
private static MetaData ParseMetaData(Stream data)
public static MetaData? ParseMetaData(Stream data)
{
// TODO: Use marshalling here instead of building
MetaData metaData = new MetaData();
metaData.TitleIDDependencyList = data.ReadBytes(0x180);
metaData.Reserved1 = data.ReadBytes(0x180);
metaData.CoreVersion = data.ReadUInt32();
metaData.Reserved2 = data.ReadBytes(0xFC);
metaData.IconData = data.ReadBytes(0x36C0);
return metaData;
return data.ReadType<MetaData>();
}
}
}

View File

@@ -0,0 +1,32 @@
using System.IO;
using System.Text;
namespace SabreTools.Serialization.Deserializers
{
public class Catalog : JsonFile<Models.Xbox.Catalog>
{
#region IByteDeserializer
/// <remarks>Catalog.js file is encoded as UTF-16 LE</remarks>
public override Models.Xbox.Catalog? Deserialize(byte[]? data, int offset)
=> Deserialize(data, offset, new UnicodeEncoding());
#endregion
#region IFileDeserializer
/// <remarks>Catalog.js file is encoded as UTF-16 LE</remarks>
public override Models.Xbox.Catalog? Deserialize(string? path)
=> Deserialize(path, new UnicodeEncoding());
#endregion
#region IStreamDeserializer
/// <remarks>Catalog.js file is encoded as UTF-16 LE</remarks>
public override Models.Xbox.Catalog? Deserialize(Stream? data)
=> Deserialize(data, new UnicodeEncoding());
#endregion
}
}

View File

@@ -4,14 +4,76 @@ using System.IO;
using System.Text;
using SabreTools.IO.Readers;
using SabreTools.Models.ClrMamePro;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class ClrMamePro : IStreamSerializer<MetadataFile>
public class ClrMamePro : BaseBinaryDeserializer<MetadataFile>
{
#region IByteDeserializer
/// <inheritdoc cref="IByteDeserializer.Deserialize(byte[]?, int)"/>
public static MetadataFile? DeserializeBytes(byte[]? data, int offset, bool quotes = true)
{
var deserializer = new ClrMamePro();
return deserializer.Deserialize(data, offset, quotes);
}
/// <inheritdoc/>
public MetadataFile? Deserialize(Stream? data) => Deserialize(data, true);
public override MetadataFile? Deserialize(byte[]? data, int offset)
=> Deserialize(data, offset, true);
/// <inheritdoc/>
public MetadataFile? Deserialize(byte[]? data, int offset, bool quotes)
{
// If the data is invalid
if (data == null)
return default;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return default;
// Create a memory stream and parse that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return DeserializeStream(dataStream, quotes);
}
#endregion
#region IFileDeserializer
/// <inheritdoc cref="IFileDeserializer.Deserialize(string?)"/>
public static MetadataFile? DeserializeFile(string? path, bool quotes = true)
{
var deserializer = new ClrMamePro();
return deserializer.Deserialize(path, quotes);
}
/// <inheritdoc/>
public override MetadataFile? Deserialize(string? path)
=> Deserialize(path, true);
/// <inheritdoc/>
public MetadataFile? Deserialize(string? path, bool quotes)
{
using var stream = PathProcessor.OpenStream(path);
return DeserializeStream(stream, quotes);
}
#endregion
#region IStreamDeserializer
/// <inheritdoc cref="IStreamDeserializer.Deserialize(Stream?)"/>
public static MetadataFile? DeserializeStream(Stream? data, bool quotes = true)
{
var deserializer = new ClrMamePro();
return deserializer.Deserialize(data, quotes);
}
/// <inheritdoc/>
public override MetadataFile? Deserialize(Stream? data)
=> Deserialize(data, true);
/// <inheritdoc cref="Deserialize(Stream)"/>
public MetadataFile? Deserialize(Stream? data, bool quotes)
@@ -891,5 +953,7 @@ namespace SabreTools.Serialization.Streams
driver.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
return driver;
}
#endregion
}
}

View File

@@ -3,16 +3,15 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.CueSheets;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class CueSheet : IStreamSerializer<Models.CueSheets.CueSheet>
public class CueSheet : BaseBinaryDeserializer<Models.CueSheets.CueSheet>
{
/// <inheritdoc/>
public Models.CueSheets.CueSheet? Deserialize(Stream? data)
public override Models.CueSheets.CueSheet? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -513,26 +512,15 @@ namespace SabreTools.Serialization.Streams
/// <returns>CueFileType, if possible</returns>
private static CueFileType GetFileType(string? fileType)
{
switch (fileType?.ToLowerInvariant())
return (fileType?.ToLowerInvariant()) switch
{
case "binary":
return CueFileType.BINARY;
case "motorola":
return CueFileType.MOTOROLA;
case "aiff":
return CueFileType.AIFF;
case "wave":
return CueFileType.WAVE;
case "mp3":
return CueFileType.MP3;
default:
return CueFileType.BINARY;
}
"binary" => CueFileType.BINARY,
"motorola" => CueFileType.MOTOROLA,
"aiff" => CueFileType.AIFF,
"wave" => CueFileType.WAVE,
"mp3" => CueFileType.MP3,
_ => CueFileType.BINARY,
};
}
/// <summary>
@@ -542,35 +530,18 @@ namespace SabreTools.Serialization.Streams
/// <returns>CueTrackDataType, if possible (default AUDIO)</returns>
private static CueTrackDataType GetDataType(string? dataType)
{
switch (dataType?.ToLowerInvariant())
return (dataType?.ToLowerInvariant()) switch
{
case "audio":
return CueTrackDataType.AUDIO;
case "cdg":
return CueTrackDataType.CDG;
case "mode1/2048":
return CueTrackDataType.MODE1_2048;
case "mode1/2352":
return CueTrackDataType.MODE1_2352;
case "mode2/2336":
return CueTrackDataType.MODE2_2336;
case "mode2/2352":
return CueTrackDataType.MODE2_2352;
case "cdi/2336":
return CueTrackDataType.CDI_2336;
case "cdi/2352":
return CueTrackDataType.CDI_2352;
default:
return CueTrackDataType.AUDIO;
}
"audio" => CueTrackDataType.AUDIO,
"cdg" => CueTrackDataType.CDG,
"mode1/2048" => CueTrackDataType.MODE1_2048,
"mode1/2352" => CueTrackDataType.MODE1_2352,
"mode2/2336" => CueTrackDataType.MODE2_2336,
"mode2/2352" => CueTrackDataType.MODE2_2352,
"cdi/2336" => CueTrackDataType.CDI_2336,
"cdi/2352" => CueTrackDataType.CDI_2352,
_ => CueTrackDataType.AUDIO,
};
}
/// <summary>
@@ -619,4 +590,4 @@ namespace SabreTools.Serialization.Streams
#endregion
}
}
}

View File

@@ -3,14 +3,13 @@ using System.IO;
using System.Text;
using SabreTools.IO.Readers;
using SabreTools.Models.DosCenter;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class DosCenter : IStreamSerializer<MetadataFile>
public class DosCenter : BaseBinaryDeserializer<MetadataFile>
{
/// <inheritdoc/>
public MetadataFile? Deserialize(Stream? data)
public override MetadataFile? Deserialize(Stream? data)
{
// If the stream is null
if (data == null)

View File

@@ -4,14 +4,13 @@ using System.Linq;
using System.Text;
using SabreTools.IO.Readers;
using SabreTools.Models.EverdriveSMDB;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class EverdriveSMDB : IStreamSerializer<MetadataFile>
public class EverdriveSMDB : BaseBinaryDeserializer<MetadataFile>
{
/// <inheritdoc/>
public MetadataFile? Deserialize(Stream? data)
public override MetadataFile? Deserialize(Stream? data)
{
// If the stream is null
if (data == null)

View File

@@ -1,16 +1,15 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.GCF;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class GCF : IStreamSerializer<Models.GCF.File>
public class GCF : BaseBinaryDeserializer<Models.GCF.File>
{
/// <inheritdoc/>
public Models.GCF.File? Deserialize(Stream? data)
public override Models.GCF.File? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -59,6 +58,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < blockEntryHeader.BlockCount; i++)
{
var blockEntry = ParseBlockEntry(data);
if (blockEntry == null)
return null;
file.BlockEntries[i] = blockEntry;
}
@@ -85,6 +87,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < fragmentationMapHeader.BlockCount; i++)
{
var fragmentationMap = ParseFragmentationMap(data);
if (fragmentationMap == null)
return null;
file.FragmentationMaps[i] = fragmentationMap;
}
@@ -116,6 +121,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < file.BlockEntryMapHeader.BlockCount; i++)
{
var blockEntryMap = ParseBlockEntryMap(data);
if (blockEntryMap == null)
return null;
file.BlockEntryMaps[i] = blockEntryMap;
}
}
@@ -146,6 +154,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
if (directoryEntry == null)
return null;
file.DirectoryEntries[i] = directoryEntry;
}
@@ -168,7 +179,7 @@ namespace SabreTools.Serialization.Streams
while (data.Position < directoryNamesEnd)
{
long nameOffset = data.Position - directoryNamesStart;
string? directoryName = data.ReadString(Encoding.ASCII);
string? directoryName = data.ReadNullTerminatedAnsiString();
if (data.Position > directoryNamesEnd)
{
data.Seek(-directoryName?.Length ?? 0, SeekOrigin.Current);
@@ -181,13 +192,6 @@ namespace SabreTools.Serialization.Streams
file.DirectoryNames[nameOffset] = directoryName;
}
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
if (directoryEntry != null)
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}
#endregion
@@ -201,6 +205,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
if (directoryInfo1Entry == null)
return null;
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
}
@@ -215,6 +222,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
if (directoryInfo2Entry == null)
return null;
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
}
@@ -229,6 +239,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.CopyCount; i++)
{
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
if (directoryCopyEntry == null)
return null;
file.DirectoryCopyEntries[i] = directoryCopyEntry;
}
@@ -243,6 +256,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.LocalCount; i++)
{
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
if (directoryLocalEntry == null)
return null;
file.DirectoryLocalEntries[i] = directoryLocalEntry;
}
@@ -275,6 +291,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryMapEntry = ParseDirectoryMapEntry(data);
if (directoryMapEntry == null)
return null;
file.DirectoryMapEntries[i] = directoryMapEntry;
}
@@ -316,6 +335,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = ParseChecksumMapEntry(data);
if (checksumMapEntry == null)
return null;
file.ChecksumMapEntries[i] = checksumMapEntry;
}
@@ -330,6 +352,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = ParseChecksumEntry(data);
if (checksumEntry == null)
return null;
file.ChecksumEntries[i] = checksumEntry;
}
@@ -360,30 +385,17 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
header.Dummy0 = data.ReadUInt32();
if (header == null)
return null;
if (header.Dummy0 != 0x00000001)
return null;
header.MajorVersion = data.ReadUInt32();
if (header.MajorVersion != 0x00000001)
return null;
header.MinorVersion = data.ReadUInt32();
if (header.MinorVersion != 3 && header.MinorVersion != 5 && header.MinorVersion != 6)
return null;
header.CacheID = data.ReadUInt32();
header.LastVersionPlayed = data.ReadUInt32();
header.Dummy1 = data.ReadUInt32();
header.Dummy2 = data.ReadUInt32();
header.FileSize = data.ReadUInt32();
header.BlockSize = data.ReadUInt32();
header.BlockCount = data.ReadUInt32();
header.Dummy3 = data.ReadUInt32();
return header;
}
@@ -392,21 +404,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry header on success, null on error</returns>
private static BlockEntryHeader ParseBlockEntryHeader(Stream data)
private static BlockEntryHeader? ParseBlockEntryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryHeader blockEntryHeader = new BlockEntryHeader();
blockEntryHeader.BlockCount = data.ReadUInt32();
blockEntryHeader.BlocksUsed = data.ReadUInt32();
blockEntryHeader.Dummy0 = data.ReadUInt32();
blockEntryHeader.Dummy1 = data.ReadUInt32();
blockEntryHeader.Dummy2 = data.ReadUInt32();
blockEntryHeader.Dummy3 = data.ReadUInt32();
blockEntryHeader.Dummy4 = data.ReadUInt32();
blockEntryHeader.Checksum = data.ReadUInt32();
return blockEntryHeader;
return data.ReadType<BlockEntryHeader>();
}
/// <summary>
@@ -414,20 +414,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry on success, null on error</returns>
private static BlockEntry ParseBlockEntry(Stream data)
private static BlockEntry? ParseBlockEntry(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntry blockEntry = new BlockEntry();
blockEntry.EntryFlags = data.ReadUInt32();
blockEntry.FileDataOffset = data.ReadUInt32();
blockEntry.FileDataSize = data.ReadUInt32();
blockEntry.FirstDataBlockIndex = data.ReadUInt32();
blockEntry.NextBlockEntryIndex = data.ReadUInt32();
blockEntry.PreviousBlockEntryIndex = data.ReadUInt32();
blockEntry.DirectoryIndex = data.ReadUInt32();
return blockEntry;
return data.ReadType<BlockEntry>();
}
/// <summary>
@@ -435,17 +424,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache fragmentation map header on success, null on error</returns>
private static FragmentationMapHeader ParseFragmentationMapHeader(Stream data)
private static FragmentationMapHeader? ParseFragmentationMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
FragmentationMapHeader fragmentationMapHeader = new FragmentationMapHeader();
fragmentationMapHeader.BlockCount = data.ReadUInt32();
fragmentationMapHeader.FirstUnusedEntry = data.ReadUInt32();
fragmentationMapHeader.Terminator = data.ReadUInt32();
fragmentationMapHeader.Checksum = data.ReadUInt32();
return fragmentationMapHeader;
return data.ReadType<FragmentationMapHeader>();
}
/// <summary>
@@ -453,14 +434,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache fragmentation map on success, null on error</returns>
private static FragmentationMap ParseFragmentationMap(Stream data)
private static FragmentationMap? ParseFragmentationMap(Stream data)
{
// TODO: Use marshalling here instead of building
FragmentationMap fragmentationMap = new FragmentationMap();
fragmentationMap.NextDataBlockIndex = data.ReadUInt32();
return fragmentationMap;
return data.ReadType<FragmentationMap>();
}
/// <summary>
@@ -468,18 +444,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry map header on success, null on error</returns>
private static BlockEntryMapHeader ParseBlockEntryMapHeader(Stream data)
private static BlockEntryMapHeader? ParseBlockEntryMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryMapHeader blockEntryMapHeader = new BlockEntryMapHeader();
blockEntryMapHeader.BlockCount = data.ReadUInt32();
blockEntryMapHeader.FirstBlockEntryIndex = data.ReadUInt32();
blockEntryMapHeader.LastBlockEntryIndex = data.ReadUInt32();
blockEntryMapHeader.Dummy0 = data.ReadUInt32();
blockEntryMapHeader.Checksum = data.ReadUInt32();
return blockEntryMapHeader;
return data.ReadType<BlockEntryMapHeader>();
}
/// <summary>
@@ -487,15 +454,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry map on success, null on error</returns>
private static BlockEntryMap ParseBlockEntryMap(Stream data)
private static BlockEntryMap? ParseBlockEntryMap(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryMap blockEntryMap = new BlockEntryMap();
blockEntryMap.PreviousBlockEntryIndex = data.ReadUInt32();
blockEntryMap.NextBlockEntryIndex = data.ReadUInt32();
return blockEntryMap;
return data.ReadType<BlockEntryMap>();
}
/// <summary>
@@ -503,27 +464,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory header on success, null on error</returns>
private static DirectoryHeader ParseDirectoryHeader(Stream data)
private static DirectoryHeader? ParseDirectoryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryHeader directoryHeader = new DirectoryHeader();
directoryHeader.Dummy0 = data.ReadUInt32();
directoryHeader.CacheID = data.ReadUInt32();
directoryHeader.LastVersionPlayed = data.ReadUInt32();
directoryHeader.ItemCount = data.ReadUInt32();
directoryHeader.FileCount = data.ReadUInt32();
directoryHeader.Dummy1 = data.ReadUInt32();
directoryHeader.DirectorySize = data.ReadUInt32();
directoryHeader.NameSize = data.ReadUInt32();
directoryHeader.Info1Count = data.ReadUInt32();
directoryHeader.CopyCount = data.ReadUInt32();
directoryHeader.LocalCount = data.ReadUInt32();
directoryHeader.Dummy2 = data.ReadUInt32();
directoryHeader.Dummy3 = data.ReadUInt32();
directoryHeader.Checksum = data.ReadUInt32();
return directoryHeader;
return data.ReadType<DirectoryHeader>();
}
/// <summary>
@@ -531,20 +474,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
private static DirectoryEntry? ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.NameOffset = data.ReadUInt32();
directoryEntry.ItemSize = data.ReadUInt32();
directoryEntry.ChecksumIndex = data.ReadUInt32();
directoryEntry.DirectoryFlags = (HL_GCF_FLAG)data.ReadUInt32();
directoryEntry.ParentIndex = data.ReadUInt32();
directoryEntry.NextIndex = data.ReadUInt32();
directoryEntry.FirstIndex = data.ReadUInt32();
return directoryEntry;
return data.ReadType<DirectoryEntry>();
}
/// <summary>
@@ -552,14 +484,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory info 1 entry on success, null on error</returns>
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
private static DirectoryInfo1Entry? ParseDirectoryInfo1Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
return directoryInfo1Entry;
return data.ReadType<DirectoryInfo1Entry>();
}
/// <summary>
@@ -567,14 +494,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory info 2 entry on success, null on error</returns>
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
private static DirectoryInfo2Entry? ParseDirectoryInfo2Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
return directoryInfo2Entry;
return data.ReadType<DirectoryInfo2Entry>();
}
/// <summary>
@@ -582,14 +504,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory copy entry on success, null on error</returns>
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
private static DirectoryCopyEntry? ParseDirectoryCopyEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
return directoryCopyEntry;
return data.ReadType<DirectoryCopyEntry>();
}
/// <summary>
@@ -597,14 +514,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory local entry on success, null on error</returns>
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
private static DirectoryLocalEntry? ParseDirectoryLocalEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
return directoryLocalEntry;
return data.ReadType<DirectoryLocalEntry>();
}
/// <summary>
@@ -614,14 +526,12 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Half-Life Game Cache directory map header on success, null on error</returns>
private static DirectoryMapHeader? ParseDirectoryMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapHeader directoryMapHeader = new DirectoryMapHeader();
var directoryMapHeader = data.ReadType<DirectoryMapHeader>();
directoryMapHeader.Dummy0 = data.ReadUInt32();
if (directoryMapHeader == null)
return null;
if (directoryMapHeader.Dummy0 != 0x00000001)
return null;
directoryMapHeader.Dummy1 = data.ReadUInt32();
if (directoryMapHeader.Dummy1 != 0x00000000)
return null;
@@ -633,14 +543,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory map entry on success, null on error</returns>
private static DirectoryMapEntry ParseDirectoryMapEntry(Stream data)
private static DirectoryMapEntry? ParseDirectoryMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapEntry directoryMapEntry = new DirectoryMapEntry();
directoryMapEntry.FirstBlockIndex = data.ReadUInt32();
return directoryMapEntry;
return data.ReadType<DirectoryMapEntry>();
}
/// <summary>
@@ -650,15 +555,13 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Half-Life Game Cache checksum header on success, null on error</returns>
private static ChecksumHeader? ParseChecksumHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumHeader checksumHeader = new ChecksumHeader();
var checksumHeader = data.ReadType<ChecksumHeader>();
checksumHeader.Dummy0 = data.ReadUInt32();
if (checksumHeader == null)
return null;
if (checksumHeader.Dummy0 != 0x00000001)
return null;
checksumHeader.ChecksumSize = data.ReadUInt32();
return checksumHeader;
}
@@ -669,20 +572,15 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Half-Life Game Cache checksum map header on success, null on error</returns>
private static ChecksumMapHeader? ParseChecksumMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
var checksumMapHeader = data.ReadType<ChecksumMapHeader>();
checksumMapHeader.Dummy0 = data.ReadUInt32();
if (checksumMapHeader == null)
return null;
if (checksumMapHeader.Dummy0 != 0x14893721)
return null;
checksumMapHeader.Dummy1 = data.ReadUInt32();
if (checksumMapHeader.Dummy1 != 0x00000001)
return null;
checksumMapHeader.ItemCount = data.ReadUInt32();
checksumMapHeader.ChecksumCount = data.ReadUInt32();
return checksumMapHeader;
}
@@ -691,15 +589,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum map entry on success, null on error</returns>
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
private static ChecksumMapEntry? ParseChecksumMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
checksumMapEntry.ChecksumCount = data.ReadUInt32();
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
return checksumMapEntry;
return data.ReadType<ChecksumMapEntry>();
}
/// <summary>
@@ -707,14 +599,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum entry on success, null on error</returns>
private static ChecksumEntry ParseChecksumEntry(Stream data)
private static ChecksumEntry? ParseChecksumEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumEntry checksumEntry = new ChecksumEntry();
checksumEntry.Checksum = data.ReadUInt32();
return checksumEntry;
return data.ReadType<ChecksumEntry>();
}
/// <summary>
@@ -723,10 +610,10 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="minorVersion">Minor version field from the header</param>
/// <returns>Filled Half-Life Game Cache data block header on success, null on error</returns>
private static DataBlockHeader ParseDataBlockHeader(Stream data, uint minorVersion)
private static DataBlockHeader? ParseDataBlockHeader(Stream data, uint minorVersion)
{
// TODO: Use marshalling here instead of building
DataBlockHeader dataBlockHeader = new DataBlockHeader();
var dataBlockHeader = new DataBlockHeader();
// In version 3 the DataBlockHeader is missing the LastVersionPlayed field.
if (minorVersion >= 5)

View File

@@ -2,18 +2,82 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SabreTools.Hashing;
using SabreTools.Models.Hashfile;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class Hashfile : IStreamSerializer<Models.Hashfile.Hashfile>
// TODO: Create variants for the implemented types
public class Hashfile : BaseBinaryDeserializer<Models.Hashfile.Hashfile>
{
#region IByteDeserializer
/// <inheritdoc cref="IByteDeserializer.Deserialize(byte[]?, int)"/>
public static Models.Hashfile.Hashfile? DeserializeBytes(byte[]? data, int offset, HashType hash = HashType.CRC32)
{
var deserializer = new Hashfile();
return deserializer.Deserialize(data, offset, hash);
}
/// <inheritdoc/>
public Models.Hashfile.Hashfile? Deserialize(Stream? data) => Deserialize(data, Hash.CRC);
public override Models.Hashfile.Hashfile? Deserialize(byte[]? data, int offset)
=> Deserialize(data, offset, HashType.CRC32);
/// <inheritdoc/>
public Models.Hashfile.Hashfile? Deserialize(byte[]? data, int offset, HashType hash)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return DeserializeStream(dataStream, hash);
}
#endregion
#region IFileDeserializer
/// <inheritdoc cref="IFileDeserializer.Deserialize(string?)"/>
public static Models.Hashfile.Hashfile? DeserializeFile(string? path, HashType hash = HashType.CRC32)
{
var deserializer = new Hashfile();
return deserializer.Deserialize(path, hash);
}
/// <inheritdoc/>
public override Models.Hashfile.Hashfile? Deserialize(string? path)
=> Deserialize(path, HashType.CRC32);
/// <inheritdoc/>
public Models.Hashfile.Hashfile? Deserialize(string? path, HashType hash)
{
using var stream = PathProcessor.OpenStream(path);
return DeserializeStream(stream, hash);
}
#endregion
#region IStreamDeserializer
/// <inheritdoc cref="IStreamDeserializer.Deserialize(Stream?)"/>
public static Models.Hashfile.Hashfile? DeserializeStream(Stream? data, HashType hash = HashType.CRC32)
{
var deserializer = new Hashfile();
return deserializer.Deserialize(data, hash);
}
/// <inheritdoc/>
public override Models.Hashfile.Hashfile? Deserialize(Stream? data)
=> Deserialize(data, HashType.CRC32);
/// <inheritdoc cref="Deserialize(Stream)"/>
public Models.Hashfile.Hashfile? Deserialize(Stream? data, Hash hash)
public Models.Hashfile.Hashfile? Deserialize(Stream? data, HashType hash)
{
// If the stream is null
if (data == null)
@@ -41,7 +105,11 @@ namespace SabreTools.Serialization.Streams
// Parse the line into a hash
switch (hash)
{
case Hash.CRC:
case HashType.CRC32:
case HashType.CRC32_ISO:
case HashType.CRC32_Naive:
case HashType.CRC32_Optimized:
case HashType.CRC32_Parallel:
var sfv = new SFV
{
#if NETFRAMEWORK
@@ -54,7 +122,7 @@ namespace SabreTools.Serialization.Streams
};
hashes.Add(sfv);
break;
case Hash.MD5:
case HashType.MD5:
var md5 = new MD5
{
Hash = lineParts[0],
@@ -66,7 +134,7 @@ namespace SabreTools.Serialization.Streams
};
hashes.Add(md5);
break;
case Hash.SHA1:
case HashType.SHA1:
var sha1 = new SHA1
{
Hash = lineParts[0],
@@ -78,7 +146,7 @@ namespace SabreTools.Serialization.Streams
};
hashes.Add(sha1);
break;
case Hash.SHA256:
case HashType.SHA256:
var sha256 = new SHA256
{
Hash = lineParts[0],
@@ -90,7 +158,7 @@ namespace SabreTools.Serialization.Streams
};
hashes.Add(sha256);
break;
case Hash.SHA384:
case HashType.SHA384:
var sha384 = new SHA384
{
Hash = lineParts[0],
@@ -102,7 +170,7 @@ namespace SabreTools.Serialization.Streams
};
hashes.Add(sha384);
break;
case Hash.SHA512:
case HashType.SHA512:
var sha512 = new SHA512
{
Hash = lineParts[0],
@@ -114,7 +182,7 @@ namespace SabreTools.Serialization.Streams
};
hashes.Add(sha512);
break;
case Hash.SpamSum:
case HashType.SpamSum:
var spamSum = new SpamSum
{
Hash = lineParts[0],
@@ -132,30 +200,36 @@ namespace SabreTools.Serialization.Streams
// Assign the hashes to the hashfile and return
switch (hash)
{
case Hash.CRC:
case HashType.CRC32:
case HashType.CRC32_ISO:
case HashType.CRC32_Naive:
case HashType.CRC32_Optimized:
case HashType.CRC32_Parallel:
dat.SFV = hashes.Cast<SFV>().ToArray();
break;
case Hash.MD5:
case HashType.MD5:
dat.MD5 = hashes.Cast<MD5>().ToArray();
break;
case Hash.SHA1:
case HashType.SHA1:
dat.SHA1 = hashes.Cast<SHA1>().ToArray();
break;
case Hash.SHA256:
case HashType.SHA256:
dat.SHA256 = hashes.Cast<SHA256>().ToArray();
break;
case Hash.SHA384:
case HashType.SHA384:
dat.SHA384 = hashes.Cast<SHA384>().ToArray();
break;
case Hash.SHA512:
case HashType.SHA512:
dat.SHA512 = hashes.Cast<SHA512>().ToArray();
break;
case Hash.SpamSum:
case HashType.SpamSum:
dat.SpamSum = hashes.Cast<SpamSum>().ToArray();
break;
}
dat.ADDITIONAL_ELEMENTS = [.. additional];
return dat;
}
#endregion
}
}

View File

@@ -1,15 +1,13 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.Serialization.Interfaces;
using SabreTools.IO.Extensions;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class IRD : IStreamSerializer<Models.IRD.File>
public class IRD : BaseBinaryDeserializer<Models.IRD.File>
{
/// <inheritdoc/>
public Models.IRD.File? Deserialize(Stream? data)
public override Models.IRD.File? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -22,7 +20,7 @@ namespace SabreTools.Serialization.Streams
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new media key block to fill
// Create a new IRD to fill
var ird = new Models.IRD.File();
ird.Magic = data.ReadBytes(4);

View File

@@ -0,0 +1,137 @@
using System.Collections.Generic;
using System.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.InstallShieldArchiveV3;
namespace SabreTools.Serialization.Deserializers
{
public class InstallShieldArchiveV3 : BaseBinaryDeserializer<Archive>
{
public override Archive? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the archive header
archive.Header = header;
#endregion
#region Directories
// Get the directories offset
uint directoriesOffset = header.TocAddress;
if (directoriesOffset < 0 || directoriesOffset >= data.Length)
return null;
// Seek to the directories
data.Seek(directoriesOffset, SeekOrigin.Begin);
// Try to parse the directories
var directories = new List<Models.InstallShieldArchiveV3.Directory>();
for (int i = 0; i < header.DirCount; i++)
{
var directory = ParseDirectory(data, out uint chunkSize);
if (directory?.Name == null)
return null;
directories.Add(directory);
data.Seek(chunkSize - directory.Name.Length - 6, SeekOrigin.Current);
}
// Set the directories
archive.Directories = [.. directories];
#endregion
#region Files
// Try to parse the files
var files = new List<Models.InstallShieldArchiveV3.File>();
for (int i = 0; i < archive.Directories.Length; i++)
{
var directory = archive.Directories[i];
for (int j = 0; j < directory.FileCount; j++)
{
var file = ParseFile(data);
if (file?.Name == null)
return null;
files.Add(file);
data.Seek(file.ChunkSize - file.Name.Length - 30, SeekOrigin.Current);
}
}
// Set the files
archive.Files = [.. files];
#endregion
return archive;
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
public static Header? ParseHeader(Stream data)
{
var header = data.ReadType<Header>();
if (header == null)
return null;
if (header.Signature1 != 0x8C655D13) // TODO: Move constant to Models
return null;
if (header.TocAddress >= data.Length)
return null;
return header;
}
/// <summary>
/// Parse a Stream into a directory
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled directory on success, null on error</returns>
public static Models.InstallShieldArchiveV3.Directory? ParseDirectory(Stream data, out uint chunkSize)
{
// TODO: Use ReadType when model is fixed
var directory = new Models.InstallShieldArchiveV3.Directory();
directory.FileCount = data.ReadUInt16();
chunkSize = data.ReadUInt16(); // TODO: Add to model and remove from output params
directory.Name = data.ReadPrefixedAnsiString();
return directory;
}
/// <summary>
/// Parse a Stream into a file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file on success, null on error</returns>
public static Models.InstallShieldArchiveV3.File? ParseFile(Stream data)
{
return data.ReadType<Models.InstallShieldArchiveV3.File>();
}
}
}

View File

@@ -1,18 +1,16 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.InstallShieldCabinet;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.InstallShieldCabinet.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
// TODO: Add multi-cabinet reading
public partial class InstallShieldCabinet : IStreamSerializer<Cabinet>
public class InstallShieldCabinet : BaseBinaryDeserializer<Cabinet>
{
/// <inheritdoc/>
public Cabinet? Deserialize(Stream? data)
public override Cabinet? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -334,23 +332,15 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled common header on success, null on error</returns>
private static CommonHeader? ParseCommonHeader(Stream data)
public static CommonHeader? ParseCommonHeader(Stream data)
{
CommonHeader commonHeader = new CommonHeader();
var commonHeader = data.ReadType<CommonHeader>();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (commonHeader == null)
return null;
commonHeader.Signature = Encoding.ASCII.GetString(signature);
if (commonHeader.Signature != SignatureString)
return null;
commonHeader.Version = data.ReadUInt32();
commonHeader.VolumeInfo = data.ReadUInt32();
commonHeader.DescriptorOffset = data.ReadUInt32();
commonHeader.DescriptorSize = data.ReadUInt32();
return commonHeader;
}
@@ -360,7 +350,7 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled volume header on success, null on error</returns>
private static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
public static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
{
VolumeHeader volumeHeader = new VolumeHeader();
@@ -407,46 +397,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled descriptor on success, null on error</returns>
private static Descriptor ParseDescriptor(Stream data)
public static Descriptor? ParseDescriptor(Stream data)
{
Descriptor descriptor = new Descriptor();
descriptor.StringsOffset = data.ReadUInt32();
descriptor.Reserved0 = data.ReadBytes(4);
descriptor.ComponentListOffset = data.ReadUInt32();
descriptor.FileTableOffset = data.ReadUInt32();
descriptor.Reserved1 = data.ReadBytes(4);
descriptor.FileTableSize = data.ReadUInt32();
descriptor.FileTableSize2 = data.ReadUInt32();
descriptor.DirectoryCount = data.ReadUInt16();
descriptor.Reserved2 = data.ReadBytes(4);
descriptor.Reserved3 = data.ReadBytes(2);
descriptor.Reserved4 = data.ReadBytes(4);
descriptor.FileCount = data.ReadUInt32();
descriptor.FileTableOffset2 = data.ReadUInt32();
descriptor.ComponentTableInfoCount = data.ReadUInt16();
descriptor.ComponentTableOffset = data.ReadUInt32();
descriptor.Reserved5 = data.ReadBytes(4);
descriptor.Reserved6 = data.ReadBytes(4);
descriptor.FileGroupOffsets = new uint[MAX_FILE_GROUP_COUNT];
for (int i = 0; i < descriptor.FileGroupOffsets.Length; i++)
{
descriptor.FileGroupOffsets[i] = data.ReadUInt32();
}
descriptor.ComponentOffsets = new uint[MAX_COMPONENT_COUNT];
for (int i = 0; i < descriptor.ComponentOffsets.Length; i++)
{
descriptor.ComponentOffsets[i] = data.ReadUInt32();
}
descriptor.SetupTypesOffset = data.ReadUInt32();
descriptor.SetupTableOffset = data.ReadUInt32();
descriptor.Reserved7 = data.ReadBytes(4);
descriptor.Reserved8 = data.ReadBytes(4);
return descriptor;
return data.ReadType<Descriptor>();
}
/// <summary>
@@ -456,9 +409,9 @@ namespace SabreTools.Serialization.Streams
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled offset list on success, null on error</returns>
private static OffsetList ParseOffsetList(Stream data, int majorVersion, uint descriptorOffset)
public static OffsetList ParseOffsetList(Stream data, int majorVersion, uint descriptorOffset)
{
OffsetList offsetList = new OffsetList();
var offsetList = new OffsetList();
offsetList.NameOffset = data.ReadUInt32();
offsetList.DescriptorOffset = data.ReadUInt32();
@@ -472,9 +425,9 @@ namespace SabreTools.Serialization.Streams
// Read the string
if (majorVersion >= 17)
offsetList.Name = data.ReadString(Encoding.Unicode);
offsetList.Name = data.ReadNullTerminatedUnicodeString();
else
offsetList.Name = data.ReadString(Encoding.ASCII);
offsetList.Name = data.ReadNullTerminatedAnsiString();
// Seek back to the correct offset
data.Seek(currentOffset, SeekOrigin.Begin);
@@ -489,9 +442,9 @@ namespace SabreTools.Serialization.Streams
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file group on success, null on error</returns>
private static FileGroup ParseFileGroup(Stream data, int majorVersion, uint descriptorOffset)
public static FileGroup ParseFileGroup(Stream data, int majorVersion, uint descriptorOffset)
{
FileGroup fileGroup = new FileGroup();
var fileGroup = new FileGroup();
fileGroup.NameOffset = data.ReadUInt32();
@@ -534,9 +487,9 @@ namespace SabreTools.Serialization.Streams
// Read the string
if (majorVersion >= 17)
fileGroup.Name = data.ReadString(Encoding.Unicode);
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
else
fileGroup.Name = data.ReadString(Encoding.ASCII);
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
}
// Seek back to the correct offset
@@ -552,14 +505,14 @@ namespace SabreTools.Serialization.Streams
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled component on success, null on error</returns>
private static Component ParseComponent(Stream data, int majorVersion, uint descriptorOffset)
public static Component ParseComponent(Stream data, int majorVersion, uint descriptorOffset)
{
Component component = new Component();
var component = new Component();
component.IdentifierOffset = data.ReadUInt32();
component.DescriptorOffset = data.ReadUInt32();
component.DisplayNameOffset = data.ReadUInt32();
component.Reserved0 = data.ReadBytes(2);
component.Reserved0 = data.ReadUInt16();
component.ReservedOffset0 = data.ReadUInt32();
component.ReservedOffset1 = data.ReadUInt32();
component.ComponentIndex = data.ReadUInt16();
@@ -596,9 +549,9 @@ namespace SabreTools.Serialization.Streams
// Read the string
if (majorVersion >= 17)
component.Identifier = data.ReadString(Encoding.Unicode);
component.Identifier = data.ReadNullTerminatedUnicodeString();
else
component.Identifier = data.ReadString(Encoding.ASCII);
component.Identifier = data.ReadNullTerminatedAnsiString();
}
// Read the display name, if possible
@@ -609,9 +562,9 @@ namespace SabreTools.Serialization.Streams
// Read the string
if (majorVersion >= 17)
component.DisplayName = data.ReadString(Encoding.Unicode);
component.DisplayName = data.ReadNullTerminatedUnicodeString();
else
component.DisplayName = data.ReadString(Encoding.ASCII);
component.DisplayName = data.ReadNullTerminatedAnsiString();
}
// Read the name, if possible
@@ -622,9 +575,9 @@ namespace SabreTools.Serialization.Streams
// Read the string
if (majorVersion >= 17)
component.Name = data.ReadString(Encoding.Unicode);
component.Name = data.ReadNullTerminatedUnicodeString();
else
component.Name = data.ReadString(Encoding.ASCII);
component.Name = data.ReadNullTerminatedAnsiString();
}
// Read the CLSID, if possible
@@ -657,9 +610,9 @@ namespace SabreTools.Serialization.Streams
data.Seek(nameOffset + descriptorOffset, SeekOrigin.Begin);
if (majorVersion >= 17)
component.FileGroupNames[j] = data.ReadString(Encoding.Unicode) ?? string.Empty;
component.FileGroupNames[j] = data.ReadNullTerminatedUnicodeString() ?? string.Empty;
else
component.FileGroupNames[j] = data.ReadString(Encoding.ASCII) ?? string.Empty;
component.FileGroupNames[j] = data.ReadNullTerminatedAnsiString() ?? string.Empty;
// Seek back to the original position
data.Seek(preNameOffset, SeekOrigin.Begin);
@@ -678,13 +631,13 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled directory name on success, null on error</returns>
private static string? ParseDirectoryName(Stream data, int majorVersion)
public static string? ParseDirectoryName(Stream data, int majorVersion)
{
// Read the string
if (majorVersion >= 17)
return data.ReadString(Encoding.Unicode);
return data.ReadNullTerminatedUnicodeString();
else
return data.ReadString(Encoding.ASCII);
return data.ReadNullTerminatedAnsiString();
}
/// <summary>
@@ -694,7 +647,7 @@ namespace SabreTools.Serialization.Streams
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file descriptor on success, null on error</returns>
private static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset)
public static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset)
{
FileDescriptor fileDescriptor = new FileDescriptor();
@@ -741,9 +694,9 @@ namespace SabreTools.Serialization.Streams
// Read the string
if (majorVersion >= 17)
fileDescriptor.Name = data.ReadString(Encoding.Unicode);
fileDescriptor.Name = data.ReadNullTerminatedUnicodeString();
else
fileDescriptor.Name = data.ReadString(Encoding.ASCII);
fileDescriptor.Name = data.ReadNullTerminatedAnsiString();
}
// Seek back to the correct offset
@@ -758,7 +711,7 @@ namespace SabreTools.Serialization.Streams
/// Get the major version of the cabinet
/// </summary>
/// <remarks>This should live in the wrapper but is needed during parsing</remarks>
private static int GetMajorVersion(CommonHeader commonHeader)
public static int GetMajorVersion(CommonHeader commonHeader)
{
uint majorVersion = commonHeader.Version;
if (majorVersion >> 24 == 1)

View File

@@ -0,0 +1,95 @@
using System.IO;
using System.Text;
using Newtonsoft.Json;
namespace SabreTools.Serialization.Deserializers
{
/// <summary>
/// Base class for other JSON serializers
/// </summary>
/// <typeparam name="T"></typeparam>
public class JsonFile<T> : BaseBinaryDeserializer<T>
{
#region IByteDeserializer
/// <inheritdoc/>
public override T? Deserialize(byte[]? data, int offset)
=> Deserialize(data, offset, new UTF8Encoding(false));
/// <summary>
/// Deserialize a byte array into <typeparamref name="T"/>
/// </summary>
/// <typeparam name="T">Type of object to deserialize to</typeparam>
/// <param name="data">Byte array to parse</param>
/// <param name="offset">Offset into the byte array</param>
/// <param name="encoding">Encoding to parse text as</param>
/// <returns>Filled object on success, null on error</returns>
public T? Deserialize(byte[]? data, int offset, Encoding encoding)
{
// If the data is invalid
if (data == null)
return default;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return default;
// Create a memory stream and parse that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Deserialize(dataStream, encoding);
}
#endregion
#region IFileDeserializer
/// <inheritdoc/>
public override T? Deserialize(string? path)
=> Deserialize(path, new UTF8Encoding(false));
/// <summary>
/// Deserialize a file into <typeparamref name="T"/>
/// </summary>
/// <typeparam name="T">Type of object to deserialize to</typeparam>
/// <param name="path">Path to deserialize from</param>
/// <param name="encoding">Encoding to parse text as</param>
/// <returns>Filled object on success, null on error</returns>
public T? Deserialize(string? path, Encoding encoding)
{
using var data = PathProcessor.OpenStream(path);
return Deserialize(data, encoding);
}
#endregion
#region IStreamDeserializer
/// <inheritdoc/>
public override T? Deserialize(Stream? data)
=> Deserialize(data, new UTF8Encoding(false));
/// <summary>
/// Deserialize a Stream into <typeparamref name="T"/>
/// </summary>
/// <typeparam name="T">Type of object to deserialize to</typeparam>
/// <param name="data">Stream to parse</param>
/// <param name="encoding">Text encoding to use</param>
/// <returns>Filled object on success, null on error</returns>
public T? Deserialize(Stream? data, Encoding encoding)
{
// If the stream is null
if (data == null)
return default;
// Setup the serializer and the reader
var serializer = JsonSerializer.Create();
var streamReader = new StreamReader(data, encoding);
var jsonReader = new JsonTextReader(streamReader);
// Perform the deserialization and return
return serializer.Deserialize<T>(jsonReader);
}
#endregion
}
}

View File

@@ -1,17 +1,16 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.LinearExecutable;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.LinearExecutable.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class LinearExecutable : IStreamSerializer<Executable>
public class LinearExecutable : BaseBinaryDeserializer<Executable>
{
/// <inheritdoc/>
public Executable? Deserialize(Stream? data)
public override Executable? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -426,63 +425,13 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled information block on success, null on error</returns>
public static InformationBlock? ParseInformationBlock(Stream data)
{
// TODO: Use marshalling here instead of building
var informationBlock = new InformationBlock();
var informationBlock = data.ReadType<InformationBlock>();
byte[]? magic = data.ReadBytes(2);
if (magic == null)
if (informationBlock == null)
return null;
informationBlock.Signature = Encoding.ASCII.GetString(magic);
if (informationBlock.Signature != LESignatureString && informationBlock.Signature != LXSignatureString)
return null;
informationBlock.ByteOrder = (ByteOrder)data.ReadByteValue();
informationBlock.WordOrder = (WordOrder)data.ReadByteValue();
informationBlock.ExecutableFormatLevel = data.ReadUInt32();
informationBlock.CPUType = (CPUType)data.ReadUInt16();
informationBlock.ModuleOS = (OperatingSystem)data.ReadUInt16();
informationBlock.ModuleVersion = data.ReadUInt32();
informationBlock.ModuleTypeFlags = (ModuleFlags)data.ReadUInt32();
informationBlock.ModuleNumberPages = data.ReadUInt32();
informationBlock.InitialObjectCS = data.ReadUInt32();
informationBlock.InitialEIP = data.ReadUInt32();
informationBlock.InitialObjectSS = data.ReadUInt32();
informationBlock.InitialESP = data.ReadUInt32();
informationBlock.MemoryPageSize = data.ReadUInt32();
informationBlock.BytesOnLastPage = data.ReadUInt32();
informationBlock.FixupSectionSize = data.ReadUInt32();
informationBlock.FixupSectionChecksum = data.ReadUInt32();
informationBlock.LoaderSectionSize = data.ReadUInt32();
informationBlock.LoaderSectionChecksum = data.ReadUInt32();
informationBlock.ObjectTableOffset = data.ReadUInt32();
informationBlock.ObjectTableCount = data.ReadUInt32();
informationBlock.ObjectPageMapOffset = data.ReadUInt32();
informationBlock.ObjectIterateDataMapOffset = data.ReadUInt32();
informationBlock.ResourceTableOffset = data.ReadUInt32();
informationBlock.ResourceTableCount = data.ReadUInt32();
informationBlock.ResidentNamesTableOffset = data.ReadUInt32();
informationBlock.EntryTableOffset = data.ReadUInt32();
informationBlock.ModuleDirectivesTableOffset = data.ReadUInt32();
informationBlock.ModuleDirectivesCount = data.ReadUInt32();
informationBlock.FixupPageTableOffset = data.ReadUInt32();
informationBlock.FixupRecordTableOffset = data.ReadUInt32();
informationBlock.ImportedModulesNameTableOffset = data.ReadUInt32();
informationBlock.ImportedModulesCount = data.ReadUInt32();
informationBlock.ImportProcedureNameTableOffset = data.ReadUInt32();
informationBlock.PerPageChecksumTableOffset = data.ReadUInt32();
informationBlock.DataPagesOffset = data.ReadUInt32();
informationBlock.PreloadPageCount = data.ReadUInt32();
informationBlock.NonResidentNamesTableOffset = data.ReadUInt32();
informationBlock.NonResidentNamesTableLength = data.ReadUInt32();
informationBlock.NonResidentNamesTableChecksum = data.ReadUInt32();
informationBlock.AutomaticDataObject = data.ReadUInt32();
informationBlock.DebugInformationOffset = data.ReadUInt32();
informationBlock.DebugInformationLength = data.ReadUInt32();
informationBlock.PreloadInstancePagesNumber = data.ReadUInt32();
informationBlock.DemandInstancePagesNumber = data.ReadUInt32();
informationBlock.ExtraHeapAllocation = data.ReadUInt32();
return informationBlock;
}
@@ -491,19 +440,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled object table entry on success, null on error</returns>
public static ObjectTableEntry ParseObjectTableEntry(Stream data)
public static ObjectTableEntry? ParseObjectTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ObjectTableEntry();
entry.VirtualSegmentSize = data.ReadUInt32();
entry.RelocationBaseAddress = data.ReadUInt32();
entry.ObjectFlags = (ObjectFlags)data.ReadUInt16();
entry.PageTableIndex = data.ReadUInt32();
entry.PageTableEntries = data.ReadUInt32();
entry.Reserved = data.ReadUInt32();
return entry;
return data.ReadType<ObjectTableEntry>();
}
/// <summary>
@@ -511,16 +450,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled object page map entry on success, null on error</returns>
public static ObjectPageMapEntry ParseObjectPageMapEntry(Stream data)
public static ObjectPageMapEntry? ParseObjectPageMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ObjectPageMapEntry();
entry.PageDataOffset = data.ReadUInt32();
entry.DataSize = data.ReadUInt16();
entry.Flags = (ObjectPageFlags)data.ReadUInt16();
return entry;
return data.ReadType<ObjectPageMapEntry>();
}
/// <summary>
@@ -528,18 +460,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resource table entry on success, null on error</returns>
public static ResourceTableEntry ParseResourceTableEntry(Stream data)
public static ResourceTableEntry? ParseResourceTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ResourceTableEntry();
entry.TypeID = (ResourceTableEntryType)data.ReadUInt32();
entry.NameID = data.ReadUInt16();
entry.ResourceSize = data.ReadUInt32();
entry.ObjectNumber = data.ReadUInt16();
entry.Offset = data.ReadUInt32();
return entry;
return data.ReadType<ResourceTableEntry>();
}
/// <summary>
@@ -632,16 +555,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled module format directives table entry on success, null on error</returns>
public static ModuleFormatDirectivesTableEntry ParseModuleFormatDirectivesTableEntry(Stream data)
public static ModuleFormatDirectivesTableEntry? ParseModuleFormatDirectivesTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ModuleFormatDirectivesTableEntry();
entry.DirectiveNumber = (DirectiveNumber)data.ReadUInt16();
entry.DirectiveDataLength = data.ReadUInt16();
entry.DirectiveDataOffset = data.ReadUInt32();
return entry;
return data.ReadType<ModuleFormatDirectivesTableEntry>();
}
/// <summary>
@@ -649,20 +565,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled verify record directive table entry on success, null on error</returns>
public static VerifyRecordDirectiveTableEntry ParseVerifyRecordDirectiveTableEntry(Stream data)
public static VerifyRecordDirectiveTableEntry? ParseVerifyRecordDirectiveTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new VerifyRecordDirectiveTableEntry();
entry.EntryCount = data.ReadUInt16();
entry.OrdinalIndex = data.ReadUInt16();
entry.Version = data.ReadUInt16();
entry.ObjectEntriesCount = data.ReadUInt16();
entry.ObjectNumberInModule = data.ReadUInt16();
entry.ObjectLoadBaseAddress = data.ReadUInt16();
entry.ObjectVirtualAddressSize = data.ReadUInt16();
return entry;
return data.ReadType<VerifyRecordDirectiveTableEntry>();
}
/// <summary>
@@ -670,14 +575,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled fix-up page table entry on success, null on error</returns>
public static FixupPageTableEntry ParseFixupPageTableEntry(Stream data)
public static FixupPageTableEntry? ParseFixupPageTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new FixupPageTableEntry();
entry.Offset = data.ReadUInt32();
return entry;
return data.ReadType<FixupPageTableEntry>();
}
/// <summary>
@@ -946,14 +846,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled per-page checksum table entry on success, null on error</returns>
public static PerPageChecksumTableEntry ParsePerPageChecksumTableEntry(Stream data)
public static PerPageChecksumTableEntry? ParsePerPageChecksumTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new PerPageChecksumTableEntry();
entry.Checksum = data.ReadUInt32();
return entry;
return data.ReadType<PerPageChecksumTableEntry>();
}
/// <summary>

View File

@@ -3,14 +3,13 @@ using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.Models.Listrom;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class Listrom : IStreamSerializer<MetadataFile>
public class Listrom : BaseBinaryDeserializer<MetadataFile>
{
/// <inheritdoc/>
public MetadataFile? Deserialize(Stream? data)
public override MetadataFile? Deserialize(Stream? data)
{
// If the stream is null
if (data == null)

View File

@@ -0,0 +1,7 @@
namespace SabreTools.Serialization.Deserializers
{
public class Listxml : XmlFile<Models.Listxml.Mame>
{
// All logic taken care of in the base class
}
}

View File

@@ -0,0 +1,7 @@
namespace SabreTools.Serialization.Deserializers
{
public class Logiqx : XmlFile<Models.Logiqx.Datafile>
{
// All logic taken care of in the base class
}
}

View File

@@ -0,0 +1,7 @@
namespace SabreTools.Serialization.Deserializers
{
public class M1 : XmlFile<Models.Listxml.M1>
{
// All logic taken care of in the base class
}
}

View File

@@ -1,16 +1,15 @@
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.MSDOS;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.MSDOS.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class MSDOS : IStreamSerializer<Executable>
public class MSDOS : BaseBinaryDeserializer<Executable>
{
/// <inheritdoc/>
public Executable? Deserialize(Stream? data)
public override Executable? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -127,20 +126,31 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of relocation table entries to read</param>
/// <returns>Filled relocation table on success, null on error</returns>
private static RelocationEntry[] ParseRelocationTable(Stream data, int count)
private static RelocationEntry[]? ParseRelocationTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var relocationTable = new RelocationEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new RelocationEntry();
entry.Offset = data.ReadUInt16();
entry.Segment = data.ReadUInt16();
var entry = ParseRelocationEntry(data);
if (entry == null)
return null;
relocationTable[i] = entry;
}
return relocationTable;
}
/// <summary>
/// Parse a Stream into a relocation table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled relocation table entry on success, null on error</returns>
public static RelocationEntry? ParseRelocationEntry(Stream data)
{
return data.ReadType<RelocationEntry>();
}
}
}

View File

@@ -1,17 +1,16 @@
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.MicrosoftCabinet;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.MicrosoftCabinet.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
// TODO: Add multi-cabinet reading
public partial class MicrosoftCabinet : IStreamSerializer<Cabinet>
public class MicrosoftCabinet : BaseBinaryDeserializer<Cabinet>
{
/// <inheritdoc/>
public Cabinet? Deserialize(Stream? data)
public override Cabinet? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -139,8 +138,8 @@ namespace SabreTools.Serialization.Streams
if (header.Flags.HasFlag(HeaderFlags.PREV_CABINET))
#endif
{
header.CabinetPrev = data.ReadString(Encoding.ASCII);
header.DiskPrev = data.ReadString(Encoding.ASCII);
header.CabinetPrev = data.ReadNullTerminatedAnsiString();
header.DiskPrev = data.ReadNullTerminatedAnsiString();
}
#if NET20 || NET35
@@ -149,8 +148,8 @@ namespace SabreTools.Serialization.Streams
if (header.Flags.HasFlag(HeaderFlags.NEXT_CABINET))
#endif
{
header.CabinetNext = data.ReadString(Encoding.ASCII);
header.DiskNext = data.ReadString(Encoding.ASCII);
header.CabinetNext = data.ReadNullTerminatedAnsiString();
header.DiskNext = data.ReadNullTerminatedAnsiString();
}
return header;
@@ -235,9 +234,9 @@ namespace SabreTools.Serialization.Streams
#else
if (file.Attributes.HasFlag(Models.MicrosoftCabinet.FileAttributes.NAME_IS_UTF))
#endif
file.Name = data.ReadString(Encoding.Unicode);
file.Name = data.ReadNullTerminatedUnicodeString();
else
file.Name = data.ReadString(Encoding.ASCII);
file.Name = data.ReadNullTerminatedAnsiString();
return file;
}

View File

@@ -2,17 +2,16 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.MoPaQ;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.MoPaQ.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class MoPaQ : IStreamSerializer<Archive>
public class MoPaQ : BaseBinaryDeserializer<Archive>
{
/// <inheritdoc/>
public Archive? Deserialize(Stream? data)
public override Archive? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -405,20 +404,13 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled user data on success, null on error</returns>
private static UserData? ParseUserData(Stream data)
{
UserData userData = new UserData();
var userData = data.ReadType<UserData>();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (userData == null)
return null;
userData.Signature = Encoding.ASCII.GetString(signature);
if (userData.Signature != UserDataSignatureString)
return null;
userData.UserDataSize = data.ReadUInt32();
userData.HeaderOffset = data.ReadUInt32();
userData.UserDataHeaderSize = data.ReadUInt32();
return userData;
}
@@ -429,7 +421,7 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled HET table on success, null on error</returns>
private static HetTable? ParseHetTable(Stream data)
{
HetTable hetTable = new HetTable();
var hetTable = new HetTable();
// Common Headers
byte[]? signature = data.ReadBytes(4);
@@ -466,7 +458,7 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled BET table on success, null on error</returns>
private static BetTable? ParseBetTable(Stream data)
{
BetTable betTable = new BetTable();
var betTable = new BetTable();
// Common Headers
byte[]? signature = data.ReadBytes(4);
@@ -520,18 +512,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled hash entry on success, null on error</returns>
private static HashEntry ParseHashEntry(Stream data)
private static HashEntry? ParseHashEntry(Stream data)
{
// TODO: Use marshalling here instead of building
HashEntry hashEntry = new HashEntry();
hashEntry.NameHashPartA = data.ReadUInt32();
hashEntry.NameHashPartB = data.ReadUInt32();
hashEntry.Locale = (Locale)data.ReadUInt16();
hashEntry.Platform = data.ReadUInt16();
hashEntry.BlockIndex = data.ReadUInt32();
return hashEntry;
return data.ReadType<HashEntry>();
}
/// <summary>
@@ -539,16 +522,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled block entry on success, null on error</returns>
private static BlockEntry ParseBlockEntry(Stream data)
private static BlockEntry? ParseBlockEntry(Stream data)
{
BlockEntry blockEntry = new BlockEntry();
blockEntry.FilePosition = data.ReadUInt32();
blockEntry.CompressedSize = data.ReadUInt32();
blockEntry.UncompressedSize = data.ReadUInt32();
blockEntry.Flags = (FileFlags)data.ReadUInt32();
return blockEntry;
return data.ReadType<BlockEntry>();
}
/// <summary>
@@ -556,19 +532,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled patch info on success, null on error</returns>
private static PatchInfo ParsePatchInfo(Stream data)
private static PatchInfo? ParsePatchInfo(Stream data)
{
// TODO: Use marshalling here instead of building
PatchInfo patchInfo = new PatchInfo();
patchInfo.Length = data.ReadUInt32();
patchInfo.Flags = data.ReadUInt32();
patchInfo.DataSize = data.ReadUInt32();
patchInfo.MD5 = data.ReadBytes(0x10);
// TODO: Fill the sector offset table
return patchInfo;
return data.ReadType<PatchInfo>();
}
#region Helpers

View File

@@ -0,0 +1,412 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.N3DS;
using static SabreTools.Models.N3DS.Constants;
namespace SabreTools.Serialization.Deserializers
{
public class N3DS : BaseBinaryDeserializer<Cart>
{
/// <inheritdoc/>
public override Cart? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new cart image to fill
var cart = new Cart();
#region NCSD Header
// Try to parse the header
var header = ParseNCSDHeader(data);
if (header == null)
return null;
// Set the cart image header
cart.Header = header;
#endregion
#region Card Info Header
// Try to parse the card info header
var cardInfoHeader = ParseCardInfoHeader(data);
if (cardInfoHeader == null)
return null;
// Set the card info header
cart.CardInfoHeader = cardInfoHeader;
#endregion
#region Development Card Info Header
// Try to parse the development card info header
var developmentCardInfoHeader = ParseDevelopmentCardInfoHeader(data);
if (developmentCardInfoHeader == null)
return null;
// Set the development card info header
cart.DevelopmentCardInfoHeader = developmentCardInfoHeader;
#endregion
#region Partitions
// Create the partition table
cart.Partitions = new NCCHHeader[8];
// Iterate and build the partitions
for (int i = 0; i < 8; i++)
{
cart.Partitions[i] = ParseNCCHHeader(data);
}
#endregion
// Cache the media unit size for further use
long mediaUnitSize = 0;
if (header.PartitionFlags != null)
mediaUnitSize = (uint)(0x200 * Math.Pow(2, header.PartitionFlags[(int)NCSDFlags.MediaUnitSize]));
#region Extended Headers
// Create the extended header table
cart.ExtendedHeaders = new NCCHExtendedHeader[8];
// Iterate and build the extended headers
for (int i = 0; i < 8; i++)
{
// If we have an encrypted or invalid partition
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
continue;
// If we have no partitions table
if (cart.Header!.PartitionsTable == null)
continue;
// Get the extended header offset
long offset = (cart.Header.PartitionsTable[i]!.Offset * mediaUnitSize) + 0x200;
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the extended header
data.Seek(offset, SeekOrigin.Begin);
// Parse the extended header
var extendedHeader = ParseNCCHExtendedHeader(data);
if (extendedHeader != null)
cart.ExtendedHeaders[i] = extendedHeader;
}
#endregion
#region ExeFS Headers
// Create the ExeFS header table
cart.ExeFSHeaders = new ExeFSHeader[8];
// Iterate and build the ExeFS headers
for (int i = 0; i < 8; i++)
{
// If we have an encrypted or invalid partition
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
continue;
// If we have no partitions table
if (cart.Header!.PartitionsTable == null)
continue;
// Get the ExeFS header offset
long offset = (cart.Header.PartitionsTable[i]!.Offset + cart.Partitions[i]!.ExeFSOffsetInMediaUnits) * mediaUnitSize;
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the ExeFS header
data.Seek(offset, SeekOrigin.Begin);
// Parse the ExeFS header
var exeFsHeader = ParseExeFSHeader(data);
if (exeFsHeader == null)
return null;
cart.ExeFSHeaders[i] = exeFsHeader;
}
#endregion
#region RomFS Headers
// Create the RomFS header table
cart.RomFSHeaders = new RomFSHeader[8];
// Iterate and build the RomFS headers
for (int i = 0; i < 8; i++)
{
// If we have an encrypted or invalid partition
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
continue;
// If we have no partitions table
if (cart.Header!.PartitionsTable == null)
continue;
// Get the RomFS header offset
long offset = (cart.Header.PartitionsTable[i]!.Offset + cart.Partitions[i]!.RomFSOffsetInMediaUnits) * mediaUnitSize;
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the RomFS header
data.Seek(offset, SeekOrigin.Begin);
// Parse the RomFS header
var romFsHeader = ParseRomFSHeader(data);
if (romFsHeader != null)
cart.RomFSHeaders[i] = romFsHeader;
}
#endregion
return cart;
}
/// <summary>
/// Parse a Stream into an NCSD header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled NCSD header on success, null on error</returns>
public static NCSDHeader? ParseNCSDHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new NCSDHeader();
header.RSA2048Signature = data.ReadBytes(0x100);
byte[]? magicNumber = data.ReadBytes(4);
if (magicNumber == null)
return null;
header.MagicNumber = Encoding.ASCII.GetString(magicNumber).TrimEnd('\0'); ;
if (header.MagicNumber != NCSDMagicNumber)
return null;
header.ImageSizeInMediaUnits = data.ReadUInt32();
header.MediaId = data.ReadBytes(8);
header.PartitionsFSType = (FilesystemType)data.ReadUInt64();
header.PartitionsCryptType = data.ReadBytes(8);
header.PartitionsTable = new PartitionTableEntry[8];
for (int i = 0; i < 8; i++)
{
var partitionTableEntry = ParsePartitionTableEntry(data);
if (partitionTableEntry == null)
return null;
header.PartitionsTable[i] = partitionTableEntry;
}
if (header.PartitionsFSType == FilesystemType.Normal || header.PartitionsFSType == FilesystemType.None)
{
header.ExheaderHash = data.ReadBytes(0x20);
header.AdditionalHeaderSize = data.ReadUInt32();
header.SectorZeroOffset = data.ReadUInt32();
header.PartitionFlags = data.ReadBytes(8);
header.PartitionIdTable = new ulong[8];
for (int i = 0; i < 8; i++)
{
header.PartitionIdTable[i] = data.ReadUInt64();
}
header.Reserved1 = data.ReadBytes(0x20);
header.Reserved2 = data.ReadBytes(0x0E);
header.FirmUpdateByte1 = data.ReadByteValue();
header.FirmUpdateByte2 = data.ReadByteValue();
}
else if (header.PartitionsFSType == FilesystemType.FIRM)
{
header.Unknown = data.ReadBytes(0x5E);
header.EncryptedMBR = data.ReadBytes(0x42);
}
return header;
}
/// <summary>
/// Parse a Stream into a partition table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled partition table entry on success, null on error</returns>
public static PartitionTableEntry? ParsePartitionTableEntry(Stream data)
{
return data.ReadType<PartitionTableEntry>();
}
/// <summary>
/// Parse a Stream into a card info header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled card info header on success, null on error</returns>
public static CardInfoHeader? ParseCardInfoHeader(Stream data)
{
return data.ReadType<CardInfoHeader>();
}
/// <summary>
/// Parse a Stream into a development card info header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled development card info header on success, null on error</returns>
public static DevelopmentCardInfoHeader? ParseDevelopmentCardInfoHeader(Stream data)
{
return data.ReadType<DevelopmentCardInfoHeader>();
}
/// <summary>
/// Parse a Stream into an NCCH header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="skipSignature">Indicates if the signature should be skipped</param>
/// <returns>Filled NCCH header on success, null on error</returns>
public static NCCHHeader ParseNCCHHeader(Stream data, bool skipSignature = false)
{
// TODO: Use marshalling here instead of building
var header = new NCCHHeader();
if (!skipSignature)
header.RSA2048Signature = data.ReadBytes(0x100);
byte[]? magicId = data.ReadBytes(4);
if (magicId != null)
header.MagicID = Encoding.ASCII.GetString(magicId).TrimEnd('\0');
header.ContentSizeInMediaUnits = data.ReadUInt32();
header.PartitionId = data.ReadUInt64();
header.MakerCode = data.ReadUInt16();
header.Version = data.ReadUInt16();
header.VerificationHash = data.ReadUInt32();
header.ProgramId = data.ReadBytes(8);
header.Reserved1 = data.ReadBytes(0x10);
header.LogoRegionHash = data.ReadBytes(0x20);
byte[]? productCode = data.ReadBytes(0x10);
if (productCode != null)
header.ProductCode = Encoding.ASCII.GetString(productCode).TrimEnd('\0');
header.ExtendedHeaderHash = data.ReadBytes(0x20);
header.ExtendedHeaderSizeInBytes = data.ReadUInt32();
header.Reserved2 = data.ReadUInt32();
header.Flags = ParseNCCHHeaderFlags(data);
header.PlainRegionOffsetInMediaUnits = data.ReadUInt32();
header.PlainRegionSizeInMediaUnits = data.ReadUInt32();
header.LogoRegionOffsetInMediaUnits = data.ReadUInt32();
header.LogoRegionSizeInMediaUnits = data.ReadUInt32();
header.ExeFSOffsetInMediaUnits = data.ReadUInt32();
header.ExeFSSizeInMediaUnits = data.ReadUInt32();
header.ExeFSHashRegionSizeInMediaUnits = data.ReadUInt32();
header.Reserved3 = data.ReadUInt32();
header.RomFSOffsetInMediaUnits = data.ReadUInt32();
header.RomFSSizeInMediaUnits = data.ReadUInt32();
header.RomFSHashRegionSizeInMediaUnits = data.ReadUInt32();
header.Reserved4 = data.ReadUInt32();
header.ExeFSSuperblockHash = data.ReadBytes(0x20);
header.RomFSSuperblockHash = data.ReadBytes(0x20);
return header;
}
/// <summary>
/// Parse a Stream into an NCCH header flags
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled NCCH header flags on success, null on error</returns>
public static NCCHHeaderFlags? ParseNCCHHeaderFlags(Stream data)
{
return data.ReadType<NCCHHeaderFlags>();
}
/// <summary>
/// Parse a Stream into an NCCH extended header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled NCCH extended header on success, null on error</returns>
public static NCCHExtendedHeader? ParseNCCHExtendedHeader(Stream data)
{
return data.ReadType<NCCHExtendedHeader>();
}
/// <summary>
/// Parse a Stream into an ExeFS header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ExeFS header on success, null on error</returns>
public static ExeFSHeader? ParseExeFSHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var exeFSHeader = new ExeFSHeader();
exeFSHeader.FileHeaders = new ExeFSFileHeader[10];
for (int i = 0; i < 10; i++)
{
var exeFsFileHeader = ParseExeFSFileHeader(data);
if (exeFsFileHeader == null)
return null;
exeFSHeader.FileHeaders[i] = exeFsFileHeader;
}
exeFSHeader.Reserved = data.ReadBytes(0x20);
exeFSHeader.FileHashes = new byte[10][];
for (int i = 0; i < 10; i++)
{
exeFSHeader.FileHashes[i] = data.ReadBytes(0x20) ?? [];
}
return exeFSHeader;
}
/// <summary>
/// Parse a Stream into an ExeFS file header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ExeFS file header on success, null on error</returns>
public static ExeFSFileHeader? ParseExeFSFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var exeFSFileHeader = new ExeFSFileHeader();
byte[]? fileName = data.ReadBytes(8);
if (fileName != null)
exeFSFileHeader.FileName = Encoding.ASCII.GetString(fileName).TrimEnd('\0');
exeFSFileHeader.FileOffset = data.ReadUInt32();
exeFSFileHeader.FileSize = data.ReadUInt32();
return exeFSFileHeader;
}
/// <summary>
/// Parse a Stream into an RomFS header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled RomFS header on success, null on error</returns>
public static RomFSHeader? ParseRomFSHeader(Stream data)
{
var romFSHeader = data.ReadType<RomFSHeader>();
if (romFSHeader == null)
return null;
if (romFSHeader.MagicString != RomFSMagicNumber)
return null;
if (romFSHeader.MagicNumber != RomFSSecondMagicNumber)
return null;
return romFSHeader;
}
}
}

View File

@@ -1,16 +1,15 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.NCF;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class NCF : IStreamSerializer<Models.NCF.File>
public class NCF : BaseBinaryDeserializer<Models.NCF.File>
{
/// <inheritdoc/>
public Models.NCF.File? Deserialize(Stream? data)
public override Models.NCF.File? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -62,6 +61,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
if (directoryEntry == null)
return null;
file.DirectoryEntries[i] = directoryEntry;
}
@@ -84,7 +86,7 @@ namespace SabreTools.Serialization.Streams
while (data.Position < directoryNamesEnd)
{
long nameOffset = data.Position - directoryNamesStart;
string? directoryName = data.ReadString(Encoding.ASCII);
string? directoryName = data.ReadNullTerminatedAnsiString();
if (data.Position > directoryNamesEnd)
{
data.Seek(-directoryName?.Length ?? 0, SeekOrigin.Current);
@@ -97,13 +99,6 @@ namespace SabreTools.Serialization.Streams
file.DirectoryNames[nameOffset] = directoryName;
}
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
if (directoryEntry != null)
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}
#endregion
@@ -117,6 +112,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
if (directoryInfo1Entry == null)
return null;
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
}
@@ -131,6 +129,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
if (directoryInfo2Entry == null)
return null;
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
}
@@ -145,6 +146,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.CopyCount; i++)
{
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
if (directoryCopyEntry == null)
return null;
file.DirectoryCopyEntries[i] = directoryCopyEntry;
}
@@ -159,6 +163,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.LocalCount; i++)
{
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
if (directoryLocalEntry == null)
return null;
file.DirectoryLocalEntries[i] = directoryLocalEntry;
}
@@ -188,6 +195,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var unknownEntry = ParseUnknownEntry(data);
if (unknownEntry == null)
return null;
file.UnknownEntries[i] = unknownEntry;
}
@@ -229,6 +239,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = ParseChecksumMapEntry(data);
if (checksumMapEntry == null)
return null;
file.ChecksumMapEntries[i] = checksumMapEntry;
}
@@ -243,6 +256,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = ParseChecksumEntry(data);
if (checksumEntry == null)
return null;
file.ChecksumEntries[i] = checksumEntry;
}
@@ -261,30 +277,17 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Half-Life No Cache header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
header.Dummy0 = data.ReadUInt32();
if (header == null)
return null;
if (header.Dummy0 != 0x00000001)
return null;
header.MajorVersion = data.ReadUInt32();
if (header.MajorVersion != 0x00000002)
return null;
header.MinorVersion = data.ReadUInt32();
if (header.MinorVersion != 1)
return null;
header.CacheID = data.ReadUInt32();
header.LastVersionPlayed = data.ReadUInt32();
header.Dummy1 = data.ReadUInt32();
header.Dummy2 = data.ReadUInt32();
header.FileSize = data.ReadUInt32();
header.BlockSize = data.ReadUInt32();
header.BlockCount = data.ReadUInt32();
header.Dummy3 = data.ReadUInt32();
return header;
}
@@ -295,27 +298,13 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Half-Life No Cache directory header on success, null on error</returns>
private static DirectoryHeader? ParseDirectoryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryHeader directoryHeader = new DirectoryHeader();
var directoryHeader = data.ReadType<DirectoryHeader>();
directoryHeader.Dummy0 = data.ReadUInt32();
if (directoryHeader == null)
return null;
if (directoryHeader.Dummy0 != 0x00000004)
return null;
directoryHeader.CacheID = data.ReadUInt32();
directoryHeader.LastVersionPlayed = data.ReadUInt32();
directoryHeader.ItemCount = data.ReadUInt32();
directoryHeader.FileCount = data.ReadUInt32();
directoryHeader.ChecksumDataLength = data.ReadUInt32();
directoryHeader.DirectorySize = data.ReadUInt32();
directoryHeader.NameSize = data.ReadUInt32();
directoryHeader.Info1Count = data.ReadUInt32();
directoryHeader.CopyCount = data.ReadUInt32();
directoryHeader.LocalCount = data.ReadUInt32();
directoryHeader.Dummy1 = data.ReadUInt32();
directoryHeader.Dummy2 = data.ReadUInt32();
directoryHeader.Checksum = data.ReadUInt32();
return directoryHeader;
}
@@ -324,20 +313,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
private static DirectoryEntry? ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.NameOffset = data.ReadUInt32();
directoryEntry.ItemSize = data.ReadUInt32();
directoryEntry.ChecksumIndex = data.ReadUInt32();
directoryEntry.DirectoryFlags = (HL_NCF_FLAG)data.ReadUInt32();
directoryEntry.ParentIndex = data.ReadUInt32();
directoryEntry.NextIndex = data.ReadUInt32();
directoryEntry.FirstIndex = data.ReadUInt32();
return directoryEntry;
return data.ReadType<DirectoryEntry>();
}
/// <summary>
@@ -345,14 +323,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory info 1 entry on success, null on error</returns>
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
private static DirectoryInfo1Entry? ParseDirectoryInfo1Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
return directoryInfo1Entry;
return data.ReadType<DirectoryInfo1Entry>();
}
/// <summary>
@@ -360,14 +333,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory info 2 entry on success, null on error</returns>
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
private static DirectoryInfo2Entry? ParseDirectoryInfo2Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
return directoryInfo2Entry;
return data.ReadType<DirectoryInfo2Entry>();
}
/// <summary>
@@ -375,14 +343,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory copy entry on success, null on error</returns>
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
private static DirectoryCopyEntry? ParseDirectoryCopyEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
return directoryCopyEntry;
return data.ReadType<DirectoryCopyEntry>();
}
/// <summary>
@@ -390,14 +353,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory local entry on success, null on error</returns>
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
private static DirectoryLocalEntry? ParseDirectoryLocalEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
return directoryLocalEntry;
return data.ReadType<DirectoryLocalEntry>();
}
/// <summary>
@@ -407,14 +365,12 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Half-Life No Cache unknown header on success, null on error</returns>
private static UnknownHeader? ParseUnknownHeader(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownHeader unknownHeader = new UnknownHeader();
var unknownHeader = data.ReadType<UnknownHeader>();
unknownHeader.Dummy0 = data.ReadUInt32();
if (unknownHeader == null)
return null;
if (unknownHeader.Dummy0 != 0x00000001)
return null;
unknownHeader.Dummy1 = data.ReadUInt32();
if (unknownHeader.Dummy1 != 0x00000000)
return null;
@@ -426,14 +382,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cacheunknown entry on success, null on error</returns>
private static UnknownEntry ParseUnknownEntry(Stream data)
private static UnknownEntry? ParseUnknownEntry(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownEntry unknownEntry = new UnknownEntry();
unknownEntry.Dummy0 = data.ReadUInt32();
return unknownEntry;
return data.ReadType<UnknownEntry>();
}
/// <summary>
@@ -443,15 +394,13 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Half-Life No Cache checksum header on success, null on error</returns>
private static ChecksumHeader? ParseChecksumHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumHeader checksumHeader = new ChecksumHeader();
var checksumHeader = data.ReadType<ChecksumHeader>();
checksumHeader.Dummy0 = data.ReadUInt32();
if (checksumHeader == null)
return null;
if (checksumHeader.Dummy0 != 0x00000001)
return null;
checksumHeader.ChecksumSize = data.ReadUInt32();
return checksumHeader;
}
@@ -462,20 +411,15 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Half-Life No Cache checksum map header on success, null on error</returns>
private static ChecksumMapHeader? ParseChecksumMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
var checksumMapHeader = data.ReadType<ChecksumMapHeader>();
checksumMapHeader.Dummy0 = data.ReadUInt32();
if (checksumMapHeader == null)
return null;
if (checksumMapHeader.Dummy0 != 0x14893721)
return null;
checksumMapHeader.Dummy1 = data.ReadUInt32();
if (checksumMapHeader.Dummy1 != 0x00000001)
return null;
checksumMapHeader.ItemCount = data.ReadUInt32();
checksumMapHeader.ChecksumCount = data.ReadUInt32();
return checksumMapHeader;
}
@@ -484,15 +428,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum map entry on success, null on error</returns>
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
private static ChecksumMapEntry? ParseChecksumMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
checksumMapEntry.ChecksumCount = data.ReadUInt32();
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
return checksumMapEntry;
return data.ReadType<ChecksumMapEntry>();
}
/// <summary>
@@ -500,14 +438,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum entry on success, null on error</returns>
private static ChecksumEntry ParseChecksumEntry(Stream data)
private static ChecksumEntry? ParseChecksumEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumEntry checksumEntry = new ChecksumEntry();
checksumEntry.Checksum = data.ReadUInt32();
return checksumEntry;
return data.ReadType<ChecksumEntry>();
}
}
}

View File

@@ -1,18 +1,16 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.NewExecutable;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.NewExecutable.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class NewExecutable : IStreamSerializer<Executable>
public class NewExecutable : BaseBinaryDeserializer<Executable>
{
/// <inheritdoc/>
public Executable? Deserialize(Stream? data)
public override Executable? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -215,48 +213,13 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled executable header on success, null on error</returns>
public static ExecutableHeader? ParseExecutableHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new ExecutableHeader();
var header = data.ReadType<ExecutableHeader>();
byte[]? magic = data.ReadBytes(2);
if (magic == null)
if (header == null)
return null;
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.LinkerVersion = data.ReadByteValue();
header.LinkerRevision = data.ReadByteValue();
header.EntryTableOffset = data.ReadUInt16();
header.EntryTableSize = data.ReadUInt16();
header.CrcChecksum = data.ReadUInt32();
header.FlagWord = (HeaderFlag)data.ReadUInt16();
header.AutomaticDataSegmentNumber = data.ReadUInt16();
header.InitialHeapAlloc = data.ReadUInt16();
header.InitialStackAlloc = data.ReadUInt16();
header.InitialCSIPSetting = data.ReadUInt32();
header.InitialSSSPSetting = data.ReadUInt32();
header.FileSegmentCount = data.ReadUInt16();
header.ModuleReferenceTableSize = data.ReadUInt16();
header.NonResidentNameTableSize = data.ReadUInt16();
header.SegmentTableOffset = data.ReadUInt16();
header.ResourceTableOffset = data.ReadUInt16();
header.ResidentNameTableOffset = data.ReadUInt16();
header.ModuleReferenceTableOffset = data.ReadUInt16();
header.ImportedNamesTableOffset = data.ReadUInt16();
header.NonResidentNamesTableOffset = data.ReadUInt32();
header.MovableEntriesCount = data.ReadUInt16();
header.SegmentAlignmentShiftCount = data.ReadUInt16();
header.ResourceEntriesCount = data.ReadUInt16();
header.TargetOperatingSystem = (OperatingSystem)data.ReadByteValue();
header.AdditionalFlags = (OS2Flag)data.ReadByteValue();
header.ReturnThunkOffset = data.ReadUInt16();
header.SegmentReferenceThunkOffset = data.ReadUInt16();
header.MinCodeSwapAreaSize = data.ReadUInt16();
header.WindowsSDKRevision = data.ReadByteValue();
header.WindowsSDKVersion = data.ReadByteValue();
return header;
}
@@ -266,31 +229,40 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of segment table entries to read</param>
/// <returns>Filled segment table on success, null on error</returns>
public static SegmentTableEntry[] ParseSegmentTable(Stream data, int count)
public static SegmentTableEntry[]? ParseSegmentTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var segmentTable = new SegmentTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new SegmentTableEntry();
entry.Offset = data.ReadUInt16();
entry.Length = data.ReadUInt16();
entry.FlagWord = (SegmentTableEntryFlag)data.ReadUInt16();
entry.MinimumAllocationSize = data.ReadUInt16();
var entry = ParseSegmentTableEntry(data);
if (entry == null)
return null;
segmentTable[i] = entry;
}
return segmentTable;
}
/// <summary>
/// Parse a Stream into a segment table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled segment table entry on success, null on error</returns>
public static SegmentTableEntry? ParseSegmentTableEntry(Stream data)
{
return data.ReadType<SegmentTableEntry>();
}
/// <summary>
/// Parse a Stream into a resource table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of resource table entries to read</param>
/// <returns>Filled resource table on success, null on error</returns>
public static ResourceTable ParseResourceTable(Stream data, int count)
public static ResourceTable? ParseResourceTable(Stream data, int count)
{
long initialOffset = data.Position;
@@ -309,12 +281,10 @@ namespace SabreTools.Serialization.Streams
for (int j = 0; j < entry.ResourceCount; j++)
{
// TODO: Should we read and store the resource data?
var resource = new ResourceTypeResourceEntry();
resource.Offset = data.ReadUInt16();
resource.Length = data.ReadUInt16();
resource.FlagWord = (ResourceTypeResourceFlag)data.ReadUInt16();
resource.ResourceID = data.ReadUInt16();
resource.Reserved = data.ReadUInt32();
var resource = ParseResourceTypeResourceEntry(data);
if (resource == null)
return null;
entry.Resources[j] = resource;
}
resourceTable.ResourceTypes[i] = entry;
@@ -340,82 +310,157 @@ namespace SabreTools.Serialization.Streams
{
int stringOffset = (int)(stringOffsets[i] + initialOffset);
data.Seek(stringOffset, SeekOrigin.Begin);
var str = new ResourceTypeAndNameString();
str.Length = data.ReadByteValue();
str.Text = data.ReadBytes(str.Length);
var str = ParseResourceTypeAndNameString(data);
if (str == null)
return null;
resourceTable.TypeAndNameStrings[stringOffsets[i]] = str;
}
return resourceTable;
}
/// <summary>
/// Parse a Stream into a resource entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resource entry on success, null on error</returns>
public static ResourceTypeResourceEntry? ParseResourceTypeResourceEntry(Stream data)
{
// TODO: Should we read and store the resource data?
return data.ReadType<ResourceTypeResourceEntry>();
}
/// <summary>
/// Parse a Stream into a resource type and name string
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resource type and name string on success, null on error</returns>
public static ResourceTypeAndNameString? ParseResourceTypeAndNameString(Stream data)
{
// TODO: Use marshalling here instead of building
var str = new ResourceTypeAndNameString();
str.Length = data.ReadByteValue();
str.Text = data.ReadBytes(str.Length);
return str;
}
/// <summary>
/// Parse a Stream into a resident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the resident-name table</param>
/// <returns>Filled resident-name table on success, null on error</returns>
public static ResidentNameTableEntry[] ParseResidentNameTable(Stream data, int endOffset)
public static ResidentNameTableEntry[]? ParseResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<ResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new ResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
var entry = ParseResidentNameTableEntry(data);
if (entry == null)
return null;
residentNameTable.Add(entry);
}
return [.. residentNameTable];
}
/// <summary>
/// Parse a Stream into a resident-name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resident-name table entry on success, null on error</returns>
public static ResidentNameTableEntry? ParseResidentNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into a module-reference table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of module-reference table entries to read</param>
/// <returns>Filled module-reference table on success, null on error</returns>
public static ModuleReferenceTableEntry[] ParseModuleReferenceTable(Stream data, int count)
public static ModuleReferenceTableEntry[]? ParseModuleReferenceTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var moduleReferenceTable = new ModuleReferenceTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new ModuleReferenceTableEntry();
entry.Offset = data.ReadUInt16();
var entry = ParseModuleReferenceTableEntry(data);
if (entry == null)
return null;
moduleReferenceTable[i] = entry;
}
return moduleReferenceTable;
}
/// <summary>
/// Parse a Stream into a module-reference table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled module-reference table entry on success, null on error</returns>
public static ModuleReferenceTableEntry? ParseModuleReferenceTableEntry(Stream data)
{
return data.ReadType<ModuleReferenceTableEntry>();
}
/// <summary>
/// Parse a Stream into an imported-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the imported-name table</param>
/// <returns>Filled imported-name table on success, null on error</returns>
public static Dictionary<ushort, ImportedNameTableEntry?> ParseImportedNameTable(Stream data, int endOffset)
public static Dictionary<ushort, ImportedNameTableEntry>? ParseImportedNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry?>();
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry>();
while (data.Position < endOffset)
{
ushort currentOffset = (ushort)data.Position;
var entry = new ImportedNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
var entry = ParseImportedNameTableEntry(data);
if (entry == null)
return null;
importedNameTable[currentOffset] = entry;
}
return importedNameTable;
}
/// <summary>
/// Parse a Stream into an imported-name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled imported-name table entry on success, null on error</returns>
public static ImportedNameTableEntry? ParseImportedNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ImportedNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
return entry;
}
/// <summary>
/// Parse a Stream into an entry table
/// </summary>
@@ -461,21 +506,38 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the nonresident-name table</param>
/// <returns>Filled nonresident-name table on success, null on error</returns>
public static NonResidentNameTableEntry[] ParseNonResidentNameTable(Stream data, int endOffset)
public static NonResidentNameTableEntry[]? ParseNonResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<NonResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new NonResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
var entry = ParseNonResidentNameTableEntry(data);
if (entry == null)
return null;
residentNameTable.Add(entry);
}
return [.. residentNameTable];
}
/// <summary>
/// Parse a Stream into a nonresident-name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled nonresident-name table entry on success, null on error</returns>
public static NonResidentNameTableEntry? ParseNonResidentNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new NonResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
return entry;
}
}
}

View File

@@ -0,0 +1,242 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.Nitro;
namespace SabreTools.Serialization.Deserializers
{
public class Nitro : BaseBinaryDeserializer<Cart>
{
/// <inheritdoc/>
public override Cart? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new cart image to fill
var cart = new Cart();
#region Header
// Try to parse the header
var header = ParseCommonHeader(data);
if (header == null)
return null;
// Set the cart image header
cart.CommonHeader = header;
#endregion
#region Extended DSi Header
// If we have a DSi-compatible cartridge
if (header.UnitCode == Unitcode.NDSPlusDSi || header.UnitCode == Unitcode.DSi)
{
var extendedDSiHeader = ParseExtendedDSiHeader(data);
if (extendedDSiHeader == null)
return null;
cart.ExtendedDSiHeader = extendedDSiHeader;
}
#endregion
#region Secure Area
// Try to get the secure area offset
long secureAreaOffset = 0x4000;
if (secureAreaOffset > data.Length)
return null;
// Seek to the secure area
data.Seek(secureAreaOffset, SeekOrigin.Begin);
// Read the secure area without processing
cart.SecureArea = data.ReadBytes(0x800);
#endregion
#region Name Table
// Try to get the name table offset
long nameTableOffset = header.FileNameTableOffset;
if (nameTableOffset < 0 || nameTableOffset > data.Length)
return null;
// Seek to the name table
data.Seek(nameTableOffset, SeekOrigin.Begin);
// Try to parse the name table
var nameTable = ParseNameTable(data);
if (nameTable == null)
return null;
// Set the name table
cart.NameTable = nameTable;
#endregion
#region File Allocation Table
// Try to get the file allocation table offset
long fileAllocationTableOffset = header.FileAllocationTableOffset;
if (fileAllocationTableOffset < 0 || fileAllocationTableOffset > data.Length)
return null;
// Seek to the file allocation table
data.Seek(fileAllocationTableOffset, SeekOrigin.Begin);
// Create the file allocation table
var fileAllocationTable = new List<FileAllocationTableEntry>();
// Try to parse the file allocation table
while (data.Position - fileAllocationTableOffset < header.FileAllocationTableLength)
{
var entry = ParseFileAllocationTableEntry(data);
if (entry == null)
return null;
fileAllocationTable.Add(entry);
}
// Set the file allocation table
cart.FileAllocationTable = fileAllocationTable.ToArray();
#endregion
// TODO: Read and optionally parse out the other areas
// Look for offsets and lengths in the header pieces
return cart;
}
/// <summary>
/// Parse a Stream into a common header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled common header on success, null on error</returns>
private static CommonHeader? ParseCommonHeader(Stream data)
{
return data.ReadType<CommonHeader>();
}
/// <summary>
/// Parse a Stream into an extended DSi header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled extended DSi header on success, null on error</returns>
private static ExtendedDSiHeader? ParseExtendedDSiHeader(Stream data)
{
return data.ReadType<ExtendedDSiHeader>();
}
/// <summary>
/// Parse a Stream into a name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name table on success, null on error</returns>
private static NameTable? ParseNameTable(Stream data)
{
// TODO: Use marshalling here instead of building
var nameTable = new NameTable();
// Create a variable-length table
var folderAllocationTable = new List<FolderAllocationTableEntry>();
int entryCount = int.MaxValue;
while (entryCount > 0)
{
var entry = ParseFolderAllocationTableEntry(data);
if (entry == null)
return null;
folderAllocationTable.Add(entry);
// If we have the root entry
if (entryCount == int.MaxValue)
entryCount = (entry.Unknown << 8) | entry.ParentFolderIndex;
// Decrement the entry count
entryCount--;
}
// Assign the folder allocation table
nameTable.FolderAllocationTable = folderAllocationTable.ToArray();
// Create a variable-length table
var nameList = new List<NameListEntry>();
while (true)
{
var entry = ParseNameListEntry(data);
if (entry == null)
break;
nameList.Add(entry);
}
// Assign the name list
nameTable.NameList = nameList.ToArray();
return nameTable;
}
/// <summary>
/// Parse a Stream into a folder allocation table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled folder allocation table entry on success, null on error</returns>
private static FolderAllocationTableEntry? ParseFolderAllocationTableEntry(Stream data)
{
return data.ReadType<FolderAllocationTableEntry>();
}
/// <summary>
/// Parse a Stream into a name list entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name list entry on success, null on error</returns>
private static NameListEntry? ParseNameListEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new NameListEntry();
byte flagAndSize = data.ReadByteValue();
if (flagAndSize == 0xFF)
return null;
entry.Folder = (flagAndSize & 0x80) != 0;
byte size = (byte)(flagAndSize & ~0x80);
if (size > 0)
{
byte[]? name = data.ReadBytes(size);
if (name != null)
entry.Name = Encoding.UTF8.GetString(name);
}
if (entry.Folder)
entry.Index = data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into a name list entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name list entry on success, null on error</returns>
private static FileAllocationTableEntry? ParseFileAllocationTableEntry(Stream data)
{
return data.ReadType<FileAllocationTableEntry>();
}
}
}

View File

@@ -0,0 +1,7 @@
namespace SabreTools.Serialization.Deserializers
{
public class OfflineList : XmlFile<Models.OfflineList.Dat>
{
// All logic taken care of in the base class
}
}

View File

@@ -0,0 +1,7 @@
namespace SabreTools.Serialization.Deserializers
{
public class OpenMSX : XmlFile<Models.OpenMSX.SoftwareDb>
{
// All logic taken care of in the base class
}
}

View File

@@ -1,16 +1,14 @@
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.PAK;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.PAK.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class PAK : IStreamSerializer<Models.PAK.File>
public class PAK : BaseBinaryDeserializer<Models.PAK.File>
{
/// <inheritdoc/>
public Models.PAK.File? Deserialize(Stream? data)
public override Models.PAK.File? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -55,6 +53,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < file.DirectoryItems.Length; i++)
{
var directoryItem = ParseDirectoryItem(data);
if (directoryItem == null)
return null;
file.DirectoryItems[i] = directoryItem;
}
@@ -70,20 +71,13 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Half-Life Package header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (header == null)
return null;
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.DirectoryOffset = data.ReadUInt32();
header.DirectoryLength = data.ReadUInt32();
return header;
}
@@ -92,18 +86,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Package directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data)
private static DirectoryItem? ParseDirectoryItem(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryItem directoryItem = new DirectoryItem();
byte[]? itemName = data.ReadBytes(56);
if (itemName != null)
directoryItem.ItemName = Encoding.ASCII.GetString(itemName).TrimEnd('\0');
directoryItem.ItemOffset = data.ReadUInt32();
directoryItem.ItemLength = data.ReadUInt32();
return directoryItem;
return data.ReadType<DirectoryItem>();
}
}
}

View File

@@ -1,16 +1,16 @@
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.PFF;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.PFF.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class PFF : IStreamSerializer<Archive>
public class PFF : BaseBinaryDeserializer<Archive>
{
/// <inheritdoc/>
public Archive? Deserialize(Stream? data)
public override Archive? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -93,47 +93,27 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.HeaderSize = data.ReadUInt32();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
var header = data.ReadType<Header>();
if (header == null)
return null;
header.Signature = Encoding.ASCII.GetString(signature);
header.NumberOfFiles = data.ReadUInt32();
header.FileSegmentSize = data.ReadUInt32();
switch (header.Signature)
return header.Signature switch
{
case Version0SignatureString:
if (header.FileSegmentSize != Version0HSegmentSize)
return null;
break;
Version0SignatureString when header.FileSegmentSize != Version0HSegmentSize => null,
Version0SignatureString => header,
case Version2SignatureString:
if (header.FileSegmentSize != Version2SegmentSize)
return null;
break;
Version2SignatureString when header.FileSegmentSize != Version2SegmentSize => null,
Version2SignatureString => header,
// Version 3 can sometimes have Version 2 segment sizes
case Version3SignatureString:
if (header.FileSegmentSize != Version2SegmentSize && header.FileSegmentSize != Version3SegmentSize)
return null;
break;
Version3SignatureString when header.FileSegmentSize != Version2SegmentSize
&& header.FileSegmentSize != Version3SegmentSize => null,
Version3SignatureString => header,
case Version4SignatureString:
if (header.FileSegmentSize != Version4SegmentSize)
return null;
break;
Version4SignatureString when header.FileSegmentSize != Version4SegmentSize => null,
Version4SignatureString => header,
default:
return null;
}
header.FileListOffset = data.ReadUInt32();
return header;
_ => null,
};
}
/// <summary>
@@ -141,18 +121,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled footer on success, null on error</returns>
private static Footer ParseFooter(Stream data)
private static Footer? ParseFooter(Stream data)
{
// TODO: Use marshalling here instead of building
Footer footer = new Footer();
footer.SystemIP = data.ReadUInt32();
footer.Reserved = data.ReadUInt32();
byte[]? kingTag = data.ReadBytes(4);
if (kingTag != null)
footer.KingTag = Encoding.ASCII.GetString(kingTag);
return footer;
return data.ReadType<Footer>();
}
/// <summary>
@@ -164,7 +135,7 @@ namespace SabreTools.Serialization.Streams
private static Segment ParseSegment(Stream data, uint segmentSize)
{
// TODO: Use marshalling here instead of building
Segment segment = new Segment();
var segment = new Segment();
segment.Deleted = data.ReadUInt32();
segment.FileLocation = data.ReadUInt32();

View File

@@ -1,17 +1,18 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.PIC;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.PIC.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class PIC : IStreamSerializer<DiscInformation>
public class PIC : BaseBinaryDeserializer<DiscInformation>
{
#region IStreamDeserializer
/// <inheritdoc/>
public DiscInformation? Deserialize(Stream? data)
public override DiscInformation? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -105,25 +106,12 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled disc information unit header on success, null on error</returns>
private static DiscInformationUnitHeader? ParseDiscInformationUnitHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new DiscInformationUnitHeader();
var header = data.ReadType<DiscInformationUnitHeader>();
// We only accept Disc Information units, not Emergency Brake or other
byte[]? dic = data.ReadBytes(2);
if (dic == null)
if (header?.DiscInformationIdentifier != "DI")
return null;
header.DiscInformationIdentifier = Encoding.ASCII.GetString(dic);
if (header.DiscInformationIdentifier != "DI")
return null;
header.DiscInformationFormat = data.ReadByteValue();
header.NumberOfUnitsInBlock = data.ReadByteValue();
header.Reserved0 = data.ReadByteValue();
header.SequenceNumber = data.ReadByteValue();
header.BytesInUse = data.ReadByteValue();
header.Reserved1 = data.ReadByteValue();
return header;
}
@@ -164,17 +152,11 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled disc information unit trailer on success, null on error</returns>
private static DiscInformationUnitTrailer ParseDiscInformationUnitTrailer(Stream data)
private static DiscInformationUnitTrailer? ParseDiscInformationUnitTrailer(Stream data)
{
// TODO: Use marshalling here instead of building
var trailer = new DiscInformationUnitTrailer();
trailer.DiscManufacturerID = data.ReadBytes(6);
trailer.MediaTypeID = data.ReadBytes(3);
trailer.TimeStamp = data.ReadUInt16();
trailer.ProductRevisionNumber = data.ReadByteValue();
return trailer;
return data.ReadType<DiscInformationUnitTrailer>();
}
#endregion
}
}

View File

@@ -1,16 +1,23 @@
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.PlayJ;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.PlayJ.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class PlayJAudio : IStreamSerializer<AudioFile>
public class PlayJAudio : BaseBinaryDeserializer<AudioFile>
{
/// <inheritdoc cref="IStreamDeserializer.Deserialize(Stream?)"/>
public static AudioFile? DeserializeStream(Stream? data, long adjust = 0)
{
var deserializer = new PlayJAudio();
return deserializer.Deserialize(data, adjust);
}
/// <inheritdoc/>
public AudioFile? Deserialize(Stream? data) => Deserialize(data, 0);
public override AudioFile? Deserialize(Stream? data)
=> Deserialize(data, 0);
/// <inheritdoc cref="Deserialize(Stream)"/>
/// <param name="adjust">Offset to adjust all seeking by</param>

View File

@@ -1,14 +1,13 @@
using System.IO;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.PlayJ;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class PlayJPlaylist : IStreamSerializer<Playlist>
public class PlayJPlaylist : BaseBinaryDeserializer<Playlist>
{
/// <inheritdoc/>
public Playlist? Deserialize(Stream? data)
public override Playlist? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -45,7 +44,7 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < playlist.AudioFiles.Length; i++)
{
long currentOffset = data.Position;
var entryHeader = new PlayJAudio().Deserialize(data, currentOffset);
var entryHeader = PlayJAudio.DeserializeStream(data, currentOffset);
if (entryHeader == null)
return null;

View File

@@ -3,17 +3,16 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.PortableExecutable;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.PortableExecutable.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class PortableExecutable : IStreamSerializer<Executable>
public class PortableExecutable : BaseBinaryDeserializer<Executable>
{
/// <inheritdoc/>
public Executable? Deserialize(Stream? data)
public override Executable? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -204,7 +203,7 @@ namespace SabreTools.Serialization.Streams
// Try to parse the debug table
data.Seek(debugTableAddress, SeekOrigin.Begin);
int endOffset = (int)(debugTableAddress + optionalHeader.Debug.Size);
var debugTable = ParseDebugTable(data, endOffset, executable.SectionTable);
var debugTable = ParseDebugTable(data, endOffset);
if (debugTable == null)
return null;
@@ -292,20 +291,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable header on success, null on error</returns>
public static COFFFileHeader ParseCOFFFileHeader(Stream data)
public static COFFFileHeader? ParseCOFFFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var fileHeader = new COFFFileHeader();
fileHeader.Machine = (MachineType)data.ReadUInt16();
fileHeader.NumberOfSections = data.ReadUInt16();
fileHeader.TimeDateStamp = data.ReadUInt32();
fileHeader.PointerToSymbolTable = data.ReadUInt32();
fileHeader.NumberOfSymbols = data.ReadUInt32();
fileHeader.SizeOfOptionalHeader = data.ReadUInt16();
fileHeader.Characteristics = (Characteristics)data.ReadUInt16();
return fileHeader;
return data.ReadType<COFFFileHeader>();
}
/// <summary>
@@ -381,97 +369,52 @@ namespace SabreTools.Serialization.Streams
#region Data Directories
if (optionalHeader.NumberOfRvaAndSizes >= 1 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ExportTable = new DataDirectory();
optionalHeader.ExportTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ExportTable.Size = data.ReadUInt32();
}
optionalHeader.ExportTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 2 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ImportTable = new DataDirectory();
optionalHeader.ImportTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ImportTable.Size = data.ReadUInt32();
}
optionalHeader.ImportTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 3 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ResourceTable = new DataDirectory();
optionalHeader.ResourceTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ResourceTable.Size = data.ReadUInt32();
}
optionalHeader.ResourceTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 4 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ExceptionTable = new DataDirectory();
optionalHeader.ExceptionTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ExceptionTable.Size = data.ReadUInt32();
}
optionalHeader.ExceptionTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 5 && data.Position - initialOffset < optionalSize)
{
optionalHeader.CertificateTable = new DataDirectory();
optionalHeader.CertificateTable.VirtualAddress = data.ReadUInt32();
optionalHeader.CertificateTable.Size = data.ReadUInt32();
}
optionalHeader.CertificateTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 6 && data.Position - initialOffset < optionalSize)
{
optionalHeader.BaseRelocationTable = new DataDirectory();
optionalHeader.BaseRelocationTable.VirtualAddress = data.ReadUInt32();
optionalHeader.BaseRelocationTable.Size = data.ReadUInt32();
}
optionalHeader.BaseRelocationTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 7 && data.Position - initialOffset < optionalSize)
{
optionalHeader.Debug = new DataDirectory();
optionalHeader.Debug.VirtualAddress = data.ReadUInt32();
optionalHeader.Debug.Size = data.ReadUInt32();
}
optionalHeader.Debug = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 8 && data.Position - initialOffset < optionalSize)
{
optionalHeader.Architecture = data.ReadUInt64();
}
if (optionalHeader.NumberOfRvaAndSizes >= 9 && data.Position - initialOffset < optionalSize)
{
optionalHeader.GlobalPtr = new DataDirectory();
optionalHeader.GlobalPtr.VirtualAddress = data.ReadUInt32();
optionalHeader.GlobalPtr.Size = data.ReadUInt32();
}
optionalHeader.GlobalPtr = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 10 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ThreadLocalStorageTable = new DataDirectory();
optionalHeader.ThreadLocalStorageTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ThreadLocalStorageTable.Size = data.ReadUInt32();
}
optionalHeader.ThreadLocalStorageTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 11 && data.Position - initialOffset < optionalSize)
{
optionalHeader.LoadConfigTable = new DataDirectory();
optionalHeader.LoadConfigTable.VirtualAddress = data.ReadUInt32();
optionalHeader.LoadConfigTable.Size = data.ReadUInt32();
}
optionalHeader.LoadConfigTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 12 && data.Position - initialOffset < optionalSize)
{
optionalHeader.BoundImport = new DataDirectory();
optionalHeader.BoundImport.VirtualAddress = data.ReadUInt32();
optionalHeader.BoundImport.Size = data.ReadUInt32();
}
optionalHeader.BoundImport = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 13 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ImportAddressTable = new DataDirectory();
optionalHeader.ImportAddressTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ImportAddressTable.Size = data.ReadUInt32();
}
optionalHeader.ImportAddressTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 14 && data.Position - initialOffset < optionalSize)
{
optionalHeader.DelayImportDescriptor = new DataDirectory();
optionalHeader.DelayImportDescriptor.VirtualAddress = data.ReadUInt32();
optionalHeader.DelayImportDescriptor.Size = data.ReadUInt32();
}
optionalHeader.DelayImportDescriptor = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 15 && data.Position - initialOffset < optionalSize)
{
optionalHeader.CLRRuntimeHeader = new DataDirectory();
optionalHeader.CLRRuntimeHeader.VirtualAddress = data.ReadUInt32();
optionalHeader.CLRRuntimeHeader.Size = data.ReadUInt32();
}
optionalHeader.CLRRuntimeHeader = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 16 && data.Position - initialOffset < optionalSize)
{
optionalHeader.Reserved = data.ReadUInt64();
}
#endregion
@@ -696,12 +639,12 @@ namespace SabreTools.Serialization.Streams
while (totalSize > 0 && data.Position < data.Length)
{
long initialPosition = data.Position;
string? str = data.ReadString();
string? str = data.ReadNullTerminatedAnsiString();
strings.Add(str ?? string.Empty);
totalSize -= (uint)(data.Position - initialPosition);
}
coffStringTable.Strings = strings.ToArray();
coffStringTable.Strings = [.. strings];
return coffStringTable;
}
@@ -743,21 +686,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled delay-load directory table on success, null on error</returns>
public static DelayLoadDirectoryTable ParseDelayLoadDirectoryTable(Stream data)
public static DelayLoadDirectoryTable? ParseDelayLoadDirectoryTable(Stream data)
{
// TODO: Use marshalling here instead of building
var delayLoadDirectoryTable = new DelayLoadDirectoryTable();
delayLoadDirectoryTable.Attributes = data.ReadUInt32();
delayLoadDirectoryTable.Name = data.ReadUInt32();
delayLoadDirectoryTable.ModuleHandle = data.ReadUInt32();
delayLoadDirectoryTable.DelayImportAddressTable = data.ReadUInt32();
delayLoadDirectoryTable.DelayImportNameTable = data.ReadUInt32();
delayLoadDirectoryTable.BoundDelayImportTable = data.ReadUInt32();
delayLoadDirectoryTable.UnloadDelayImportTable = data.ReadUInt32();
delayLoadDirectoryTable.TimeStamp = data.ReadUInt32();
return delayLoadDirectoryTable;
return data.ReadType<DelayLoadDirectoryTable>();
}
/// <summary>
@@ -806,9 +737,8 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the debug table</param>
/// <param name="sections">Section table to use for virtual address translation</param>
/// <returns>Filled debug table on success, null on error</returns>
public static DebugTable ParseDebugTable(Stream data, int endOffset, SectionHeader?[] sections)
public static DebugTable? ParseDebugTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var debugTable = new DebugTable();
@@ -817,21 +747,14 @@ namespace SabreTools.Serialization.Streams
while (data.Position < endOffset)
{
var debugDirectoryEntry = new DebugDirectoryEntry();
debugDirectoryEntry.Characteristics = data.ReadUInt32();
debugDirectoryEntry.TimeDateStamp = data.ReadUInt32();
debugDirectoryEntry.MajorVersion = data.ReadUInt16();
debugDirectoryEntry.MinorVersion = data.ReadUInt16();
debugDirectoryEntry.DebugType = (DebugType)data.ReadUInt32();
debugDirectoryEntry.SizeOfData = data.ReadUInt32();
debugDirectoryEntry.AddressOfRawData = data.ReadUInt32();
debugDirectoryEntry.PointerToRawData = data.ReadUInt32();
var debugDirectoryEntry = data.ReadType<DebugDirectoryEntry>();
if (debugDirectoryEntry == null)
return null;
debugDirectoryTable.Add(debugDirectoryEntry);
}
debugTable.DebugDirectoryTable = debugDirectoryTable.ToArray();
debugTable.DebugDirectoryTable = [.. debugDirectoryTable];
// TODO: Should we read the debug data in? Most of it is unformatted or undocumented
// TODO: Implement .debug$F (Object Only) / IMAGE_DEBUG_TYPE_FPO
@@ -845,7 +768,7 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="sections">Section table to use for virtual address translation</param>
/// <returns>Filled export table on success, null on error</returns>
public static ExportTable ParseExportTable(Stream data, SectionHeader?[] sections)
public static ExportTable? ParseExportTable(Stream data, SectionHeader?[] sections)
{
// TODO: Use marshalling here instead of building
var exportTable = new ExportTable();
@@ -872,7 +795,7 @@ namespace SabreTools.Serialization.Streams
uint nameAddress = exportDirectoryTable.NameRVA.ConvertVirtualAddress(sections);
data.Seek(nameAddress, SeekOrigin.Begin);
string? name = data.ReadString(Encoding.ASCII);
string? name = data.ReadNullTerminatedAnsiString();
exportDirectoryTable.Name = name;
}
@@ -886,11 +809,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < exportDirectoryTable.AddressTableEntries; i++)
{
var addressTableEntry = new ExportAddressTableEntry();
// TODO: Use the optional header address and length to determine if export or forwarder
addressTableEntry.ExportRVA = data.ReadUInt32();
addressTableEntry.ForwarderRVA = addressTableEntry.ExportRVA;
var addressTableEntry = data.ReadType<ExportAddressTableEntry>();
if (addressTableEntry == null)
return null;
exportAddressTable[i] = addressTableEntry;
}
@@ -945,7 +866,7 @@ namespace SabreTools.Serialization.Streams
exportNameTable.Strings = new string[exportDirectoryTable.NumberOfNamePointers];
for (int i = 0; i < exportDirectoryTable.NumberOfNamePointers; i++)
{
string? str = data.ReadString(Encoding.ASCII);
string? str = data.ReadNullTerminatedAnsiString();
exportNameTable.Strings[i] = str ?? string.Empty;
}
@@ -962,7 +883,7 @@ namespace SabreTools.Serialization.Streams
/// <param name="magic">Optional header magic number indicating PE32 or PE32+</param>
/// <param name="sections">Section table to use for virtual address translation</param>
/// <returns>Filled import table on success, null on error</returns>
public static ImportTable ParseImportTable(Stream data, OptionalHeaderMagicNumber magic, SectionHeader?[] sections)
public static ImportTable? ParseImportTable(Stream data, OptionalHeaderMagicNumber magic, SectionHeader?[] sections)
{
// TODO: Use marshalling here instead of building
var importTable = new ImportTable();
@@ -1007,7 +928,7 @@ namespace SabreTools.Serialization.Streams
uint nameAddress = importDirectoryTableEntry.NameRVA.ConvertVirtualAddress(sections);
data.Seek(nameAddress, SeekOrigin.Begin);
string? name = data.ReadString(Encoding.ASCII);
string? name = data.ReadNullTerminatedAnsiString();
importDirectoryTableEntry.Name = name;
}
@@ -1164,21 +1085,30 @@ namespace SabreTools.Serialization.Streams
int hintNameTableEntryAddress = hintNameTableEntryAddresses[i];
data.Seek(hintNameTableEntryAddress, SeekOrigin.Begin);
var hintNameTableEntry = new HintNameTableEntry();
hintNameTableEntry.Hint = data.ReadUInt16();
hintNameTableEntry.Name = data.ReadString(Encoding.ASCII);
var hintNameTableEntry = ParseHintNameTableEntry(data);
if (hintNameTableEntry == null)
return null;
importHintNameTable.Add(hintNameTableEntry);
}
}
}
importTable.HintNameTable = importHintNameTable.ToArray();
importTable.HintNameTable = [.. importHintNameTable];
return importTable;
}
/// <summary>
/// Parse a Stream into a hint name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled hint name table entry on success, null on error</returns>
public static HintNameTableEntry? ParseHintNameTableEntry(Stream data)
{
return data.ReadType<HintNameTableEntry>();
}
/// <summary>
/// Parse a Stream into a resource directory table
/// </summary>

View File

@@ -1,16 +1,15 @@
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.Quantum;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.Quantum.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class Quantum : IStreamSerializer<Archive>
public class Quantum : BaseBinaryDeserializer<Archive>
{
/// <inheritdoc/>
public Archive? Deserialize(Stream? data)
public override Archive? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -74,23 +73,13 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
byte[]? signature = data.ReadBytes(2);
if (signature == null)
if (header == null)
return null;
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.MajorVersion = data.ReadByteValue();
header.MinorVersion = data.ReadByteValue();
header.FileCount = data.ReadUInt16();
header.TableSize = data.ReadByteValue();
header.CompressionFlags = data.ReadByteValue();
return header;
}
@@ -103,7 +92,7 @@ namespace SabreTools.Serialization.Streams
private static FileDescriptor ParseFileDescriptor(Stream data, byte minorVersion)
{
// TODO: Use marshalling here instead of building
FileDescriptor fileDescriptor = new FileDescriptor();
var fileDescriptor = new FileDescriptor();
fileDescriptor.FileNameSize = ReadVariableLength(data);
if (fileDescriptor.FileNameSize > 0)

View File

@@ -4,14 +4,13 @@ using System.Linq;
using System.Text;
using SabreTools.IO.Readers;
using SabreTools.Models.RomCenter;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class RomCenter : IStreamSerializer<MetadataFile>
public class RomCenter : BaseBinaryDeserializer<MetadataFile>
{
/// <inheritdoc/>
public MetadataFile? Deserialize(Stream? data)
public override MetadataFile? Deserialize(Stream? data)
{
// If the stream is null
if (data == null)

View File

@@ -0,0 +1,35 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
namespace SabreTools.Serialization.Deserializers
{
public class SFB : BaseBinaryDeserializer<Models.PlayStation3.SFB>
{
/// <inheritdoc/>
public override Models.PlayStation3.SFB? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Deserialize the SFB
var sfb = data.ReadType<Models.PlayStation3.SFB>();
if (sfb == null)
return null;
string magic = Encoding.ASCII.GetString(sfb!.Magic!);
if (magic != ".SFB")
return null;
return sfb;
}
}
}

View File

@@ -0,0 +1,91 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
namespace SabreTools.Serialization.Deserializers
{
public class SFO : BaseBinaryDeserializer<Models.PlayStation3.SFO>
{
/// <inheritdoc/>
public override Models.PlayStation3.SFO? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new SFO to fill
var sfo = new Models.PlayStation3.SFO();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Assign the header
sfo.Header = header;
#endregion
#region Index Table
// TODO: Determine how many entries are in the index table
#endregion
#region Key Table
// TODO: Finish implementation
#endregion
// Padding
// TODO: Finish implementation
#region Data Table
// TODO: Finish implementation
#endregion
return sfo;
}
/// <summary>
/// Parse a Stream into an SFO header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SFO header on success, null on error</returns>
public Models.PlayStation3.SFOHeader? ParseHeader(Stream data)
{
var sfoHeader = data.ReadType<Models.PlayStation3.SFOHeader>();
if (sfoHeader == null)
return null;
string magic = Encoding.ASCII.GetString(sfoHeader!.Magic!);
if (magic != "\0PSF")
return null;
return sfoHeader;
}
/// <summary>
/// Parse a Stream into an SFO index table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SFO index table entry on success, null on error</returns>
public Models.PlayStation3.SFOIndexTableEntry? ParseIndexTableEntry(Stream data)
{
return data.ReadType<Models.PlayStation3.SFOIndexTableEntry>();
}
}
}

View File

@@ -1,17 +1,16 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.SGA;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.SGA.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class SGA : IStreamSerializer<Models.SGA.File>
public class SGA : BaseBinaryDeserializer<Models.SGA.File>
{
/// <inheritdoc/>
public Models.SGA.File? Deserialize(Stream? data)
public override Models.SGA.File? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -420,7 +419,7 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < stringCount; i++)
{
long currentPosition = data.Position - stringTableStart;
strings[currentPosition] = data.ReadString(Encoding.ASCII);
strings[currentPosition] = data.ReadNullTerminatedAnsiString();
}
// Assign the files

View File

@@ -4,14 +4,85 @@ using System.Linq;
using System.Text;
using SabreTools.IO.Readers;
using SabreTools.Models.SeparatedValue;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class SeparatedValue : IStreamSerializer<MetadataFile>
// TODO: Create variants for the 3 common types: CSV, SSV, TSV
public class SeparatedValue : BaseBinaryDeserializer<MetadataFile>
{
#region Constants
public const int HeaderWithoutExtendedHashesCount = 14;
public const int HeaderWithExtendedHashesCount = 17;
#endregion
#region IByteDeserializer
/// <inheritdoc cref="IByteDeserializer.Deserialize(byte[]?, int)"/>
public static MetadataFile? DeserializeBytes(byte[]? data, int offset, char delim)
{
var deserializer = new SeparatedValue();
return deserializer.Deserialize(data, offset, delim);
}
/// <inheritdoc/>
public MetadataFile? Deserialize(Stream? data) => Deserialize(data, ',');
public override MetadataFile? Deserialize(byte[]? data, int offset)
=> Deserialize(data, offset, ',');
/// <inheritdoc/>
public MetadataFile? Deserialize(byte[]? data, int offset, char delim)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and parse that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return DeserializeStream(dataStream, delim);
}
#endregion
#region IFileDeserializer
/// <inheritdoc cref="IFileDeserializer.Deserialize(string?)"/>
public static MetadataFile? DeserializeFile(string? path, char delim = ',')
{
var deserializer = new SeparatedValue();
return deserializer.Deserialize(path, delim);
}
/// <inheritdoc/>
public override MetadataFile? Deserialize(string? path)
=> Deserialize(path, ',');
/// <inheritdoc/>
public MetadataFile? Deserialize(string? path, char delim)
{
using var stream = PathProcessor.OpenStream(path);
return DeserializeStream(stream, delim);
}
#endregion
#region IStreamDeserializer
/// <inheritdoc cref="IStreamDeserializer.Deserialize(Stream?)"/>
public static MetadataFile? DeserializeStream(Stream? data, char delim = ',')
{
var deserializer = new SeparatedValue();
return deserializer.Deserialize(data, delim);
}
/// <inheritdoc/>
public override MetadataFile? Deserialize(Stream? data)
=> Deserialize(data, ',');
/// <inheritdoc cref="Deserialize(Stream)"/>
public MetadataFile? Deserialize(Stream? data, char delim)
@@ -45,7 +116,7 @@ namespace SabreTools.Serialization.Streams
// Parse the line into a row
Row? row = null;
if (reader.Line.Count < Serialization.SeparatedValue.HeaderWithExtendedHashesCount)
if (reader.Line.Count < HeaderWithExtendedHashesCount)
{
row = new Row
{
@@ -66,8 +137,8 @@ namespace SabreTools.Serialization.Streams
};
// If we have additional fields
if (reader.Line.Count > Serialization.SeparatedValue.HeaderWithoutExtendedHashesCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(Serialization.SeparatedValue.HeaderWithoutExtendedHashesCount).ToArray();
if (reader.Line.Count > HeaderWithoutExtendedHashesCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithoutExtendedHashesCount).ToArray();
}
else
{
@@ -93,8 +164,8 @@ namespace SabreTools.Serialization.Streams
};
// If we have additional fields
if (reader.Line.Count > Serialization.SeparatedValue.HeaderWithExtendedHashesCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(Serialization.SeparatedValue.HeaderWithExtendedHashesCount).ToArray();
if (reader.Line.Count > HeaderWithExtendedHashesCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithExtendedHashesCount).ToArray();
}
rows.Add(row);
}
@@ -103,5 +174,7 @@ namespace SabreTools.Serialization.Streams
dat.Row = rows.ToArray();
return dat;
}
#endregion
}
}

View File

@@ -0,0 +1,7 @@
namespace SabreTools.Serialization.Deserializers
{
public class SoftwareList : XmlFile<Models.SoftwareList.SoftwareList>
{
// All logic taken care of in the base class
}
}

View File

@@ -1,16 +1,15 @@
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.VBSP;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.VBSP.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class VBSP : IStreamSerializer<Models.VBSP.File>
public class VBSP : BaseBinaryDeserializer<Models.VBSP.File>
{
/// <inheritdoc/>
public Models.VBSP.File? Deserialize(Stream? data)
public override Models.VBSP.File? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -49,7 +48,7 @@ namespace SabreTools.Serialization.Streams
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = new Header();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
@@ -66,7 +65,11 @@ namespace SabreTools.Serialization.Streams
header.Lumps = new Lump[HL_VBSP_LUMP_COUNT];
for (int i = 0; i < HL_VBSP_LUMP_COUNT; i++)
{
header.Lumps[i] = ParseLump(data, header.Version);
var lump = ParseLump(data, header.Version);
if (lump == null)
return null;
header.Lumps[i] = lump;
}
header.MapRevision = data.ReadInt32();
@@ -80,19 +83,9 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="version">VBSP version</param>
/// <returns>Filled Half-Life 2 Level lump on success, null on error</returns>
private static Lump ParseLump(Stream data, int version)
private static Lump? ParseLump(Stream data, int version)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.Length = data.ReadUInt32();
lump.Version = data.ReadUInt32();
lump.FourCC = new char[4];
for (int i = 0; i < 4; i++)
{
lump.FourCC[i] = (char)data.ReadByte();
}
return data.ReadType<Lump>();
// This block was commented out because test VBSPs with header
// version 21 had the values in the "right" order already and
@@ -105,8 +98,8 @@ namespace SabreTools.Serialization.Streams
// lump.Offset = lump.Length;
// lump.Length = temp;
//}
return lump;
//
//return lump
}
}
}

View File

@@ -1,17 +1,15 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.VPK;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.VPK.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class VPK : IStreamSerializer<Models.VPK.File>
public class VPK : BaseBinaryDeserializer<Models.VPK.File>
{
/// <inheritdoc/>
public Models.VPK.File? Deserialize(Stream? data)
public override Models.VPK.File? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -32,6 +30,8 @@ namespace SabreTools.Serialization.Streams
// Try to parse the header
// The original version had no signature.
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
@@ -57,6 +57,8 @@ namespace SabreTools.Serialization.Streams
// Create the directory items tree
var directoryItems = ParseDirectoryItemTree(data);
if (directoryItems == null)
return null;
// Set the directory items
file.DirectoryItems = directoryItems;
@@ -77,10 +79,13 @@ namespace SabreTools.Serialization.Streams
while (data.Position < initialOffset + file.ExtendedHeader.ArchiveHashLength)
{
var archiveHash = ParseArchiveHash(data);
if (archiveHash == null)
return null;
archiveHashes.Add(archiveHash);
}
file.ArchiveHashes = archiveHashes.ToArray();
file.ArchiveHashes = [.. archiveHashes];
}
#endregion
@@ -95,19 +100,15 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Valve Package header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
header.Signature = data.ReadUInt32();
if (header == null)
return null;
if (header.Signature != SignatureUInt32)
return null;
header.Version = data.ReadUInt32();
if (header.Version > 2)
return null;
header.DirectoryLength = data.ReadUInt32();
return header;
}
@@ -116,17 +117,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package extended header on success, null on error</returns>
private static ExtendedHeader ParseExtendedHeader(Stream data)
private static ExtendedHeader? ParseExtendedHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ExtendedHeader extendedHeader = new ExtendedHeader();
extendedHeader.Dummy0 = data.ReadUInt32();
extendedHeader.ArchiveHashLength = data.ReadUInt32();
extendedHeader.ExtraLength = data.ReadUInt32();
extendedHeader.Dummy1 = data.ReadUInt32();
return extendedHeader;
return data.ReadType<ExtendedHeader>();
}
/// <summary>
@@ -134,17 +127,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package archive hash on success, null on error</returns>
private static ArchiveHash ParseArchiveHash(Stream data)
private static ArchiveHash? ParseArchiveHash(Stream data)
{
// TODO: Use marshalling here instead of building
ArchiveHash archiveHash = new ArchiveHash();
archiveHash.ArchiveIndex = data.ReadUInt32();
archiveHash.ArchiveOffset = data.ReadUInt32();
archiveHash.Length = data.ReadUInt32();
archiveHash.Hash = data.ReadBytes(0x10);
return archiveHash;
return data.ReadType<ArchiveHash>();
}
/// <summary>
@@ -152,7 +137,7 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item tree on success, null on error</returns>
private static DirectoryItem[] ParseDirectoryItemTree(Stream data)
private static DirectoryItem[]? ParseDirectoryItemTree(Stream data)
{
// Create the directory items list
var directoryItems = new List<DirectoryItem>();
@@ -160,7 +145,7 @@ namespace SabreTools.Serialization.Streams
while (true)
{
// Get the extension
string? extensionString = data.ReadString(Encoding.ASCII);
string? extensionString = data.ReadNullTerminatedAnsiString();
if (string.IsNullOrEmpty(extensionString))
break;
@@ -173,7 +158,7 @@ namespace SabreTools.Serialization.Streams
while (true)
{
// Get the path
string? pathString = data.ReadString(Encoding.ASCII);
string? pathString = data.ReadNullTerminatedAnsiString();
if (string.IsNullOrEmpty(pathString))
break;
@@ -186,7 +171,7 @@ namespace SabreTools.Serialization.Streams
while (true)
{
// Get the name
string? nameString = data.ReadString(Encoding.ASCII);
string? nameString = data.ReadNullTerminatedAnsiString();
if (string.IsNullOrEmpty(nameString))
break;
@@ -198,6 +183,8 @@ namespace SabreTools.Serialization.Streams
// Get the directory item
var directoryItem = ParseDirectoryItem(data, extensionString!, pathString!, nameString!);
if (directoryItem == null)
return null;
// Add the directory item
directoryItems.Add(directoryItem);
@@ -213,9 +200,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data, string extension, string path, string name)
private static DirectoryItem? ParseDirectoryItem(Stream data, string extension, string path, string name)
{
DirectoryItem directoryItem = new DirectoryItem();
var directoryItem = new DirectoryItem();
directoryItem.Extension = extension;
directoryItem.Path = path;
@@ -223,6 +210,8 @@ namespace SabreTools.Serialization.Streams
// Get the directory entry
var directoryEntry = ParseDirectoryEntry(data);
if (directoryEntry == null)
return null;
// Set the directory entry
directoryItem.DirectoryEntry = directoryEntry;
@@ -268,19 +257,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
private static DirectoryEntry? ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.CRC = data.ReadUInt32();
directoryEntry.PreloadBytes = data.ReadUInt16();
directoryEntry.ArchiveIndex = data.ReadUInt16();
directoryEntry.EntryOffset = data.ReadUInt32();
directoryEntry.EntryLength = data.ReadUInt32();
directoryEntry.Dummy0 = data.ReadUInt16();
return directoryEntry;
return data.ReadType<DirectoryEntry>();
}
}
}

View File

@@ -1,16 +1,15 @@
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.WAD;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.WAD.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class WAD : IStreamSerializer<Models.WAD.File>
public class WAD : BaseBinaryDeserializer<Models.WAD.File>
{
/// <inheritdoc/>
public Models.WAD.File? Deserialize(Stream? data)
public override Models.WAD.File? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -53,6 +52,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < header.LumpCount; i++)
{
var lump = ParseLump(data);
if (lump == null)
return null;
file.Lumps[i] = lump;
}
@@ -105,20 +107,13 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled Half-Life Texture Package header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (header == null)
return null;
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.LumpCount = data.ReadUInt32();
header.LumpOffset = data.ReadUInt32();
return header;
}
@@ -127,23 +122,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package lump on success, null on error</returns>
private static Lump ParseLump(Stream data)
private static Lump? ParseLump(Stream data)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.DiskLength = data.ReadUInt32();
lump.Length = data.ReadUInt32();
lump.Type = data.ReadByteValue();
lump.Compression = data.ReadByteValue();
lump.Padding0 = data.ReadByteValue();
lump.Padding1 = data.ReadByteValue();
byte[]? name = data.ReadBytes(16);
if (name != null)
lump.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
return lump;
return data.ReadType<Lump>();
}
/// <summary>
@@ -183,7 +164,7 @@ namespace SabreTools.Serialization.Streams
lumpInfo.Width = data.ReadUInt32();
lumpInfo.Height = data.ReadUInt32();
lumpInfo.PixelOffset = data.ReadUInt32();
_ = data.ReadBytes(12); // Unknown data
lumpInfo.UnknownData = data.ReadBytes(12);
// Cache the current offset
long currentOffset = data.Position;

View File

@@ -1,9 +1,19 @@
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Strings
namespace SabreTools.Serialization.Deserializers
{
public partial class XMID : IStringSerializer<Models.Xbox.XMID>
public partial class XMID :
IStringDeserializer<Models.Xbox.XMID>
{
#region IStringDeserializer
/// <inheritdoc cref="IStringDeserializer.Deserialize(string?)"/>
public static Models.Xbox.XMID? DeserializeString(string? str)
{
var deserializer = new XMID();
return deserializer.Deserialize(str);
}
/// <inheritdoc/>
public Models.Xbox.XMID? Deserialize(string? str)
{
@@ -36,5 +46,7 @@ namespace SabreTools.Serialization.Strings
return xmid;
}
#endregion
}
}

View File

@@ -1,16 +1,15 @@
using System.IO;
using System.Text;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.XZP;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Models.XZP.Constants;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
public partial class XZP : IStreamSerializer<Models.XZP.File>
public class XZP : BaseBinaryDeserializer<Models.XZP.File>
{
/// <inheritdoc/>
public Models.XZP.File? Deserialize(Stream? data)
public override Models.XZP.File? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -47,6 +46,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < header.DirectoryEntryCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
if (directoryEntry == null)
return null;
file.DirectoryEntries[i] = directoryEntry;
}
@@ -63,6 +65,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
if (directoryEntry == null)
return null;
file.PreloadDirectoryEntries[i] = directoryEntry;
}
}
@@ -80,6 +85,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
{
var directoryMapping = ParseDirectoryMapping(data);
if (directoryMapping == null)
return null;
file.PreloadDirectoryMappings[i] = directoryMapping;
}
}
@@ -136,29 +144,15 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled XBox Package File header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (header == null)
return null;
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != HeaderSignatureString)
return null;
header.Version = data.ReadUInt32();
if (header.Version != 6)
return null;
header.PreloadDirectoryEntryCount = data.ReadUInt32();
header.DirectoryEntryCount = data.ReadUInt32();
header.PreloadBytes = data.ReadUInt32();
header.HeaderLength = data.ReadUInt32();
header.DirectoryItemCount = data.ReadUInt32();
header.DirectoryItemOffset = data.ReadUInt32();
header.DirectoryItemLength = data.ReadUInt32();
return header;
}
@@ -167,16 +161,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
private static DirectoryEntry? ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.FileNameCRC = data.ReadUInt32();
directoryEntry.EntryLength = data.ReadUInt32();
directoryEntry.EntryOffset = data.ReadUInt32();
return directoryEntry;
return data.ReadType<DirectoryEntry>();
}
/// <summary>
@@ -184,14 +171,9 @@ namespace SabreTools.Serialization.Streams
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory mapping on success, null on error</returns>
private static DirectoryMapping ParseDirectoryMapping(Stream data)
private static DirectoryMapping? ParseDirectoryMapping(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapping directoryMapping = new DirectoryMapping();
directoryMapping.PreloadDirectoryEntryIndex = data.ReadUInt16();
return directoryMapping;
return data.ReadType<DirectoryMapping>();
}
/// <summary>
@@ -215,7 +197,7 @@ namespace SabreTools.Serialization.Streams
data.Seek(directoryItem.NameOffset, SeekOrigin.Begin);
// Read the name
directoryItem.Name = data.ReadString(Encoding.ASCII);
directoryItem.Name = data.ReadNullTerminatedAnsiString();
// Seek back to the right position
data.Seek(currentPosition, SeekOrigin.Begin);
@@ -230,15 +212,10 @@ namespace SabreTools.Serialization.Streams
/// <returns>Filled XBox Package File footer on success, null on error</returns>
private static Footer? ParseFooter(Stream data)
{
// TODO: Use marshalling here instead of building
Footer footer = new Footer();
var footer = data.ReadType<Footer>();
footer.FileLength = data.ReadUInt32();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (footer == null)
return null;
footer.Signature = Encoding.ASCII.GetString(signature);
if (footer.Signature != FooterSignatureString)
return null;

View File

@@ -1,9 +1,19 @@
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Strings
namespace SabreTools.Serialization.Deserializers
{
public partial class XeMID : IStringSerializer<Models.Xbox.XeMID>
public partial class XeMID :
IStringDeserializer<Models.Xbox.XeMID>
{
#region IStringDeserializer
/// <inheritdoc cref="IStringDeserializer.Deserialize(string?)"/>
public static Models.Xbox.XeMID? DeserializeString(string? str)
{
var deserializer = new XeMID();
return deserializer.Deserialize(str);
}
/// <inheritdoc/>
public Models.Xbox.XeMID? Deserialize(string? str)
{
@@ -60,5 +70,7 @@ namespace SabreTools.Serialization.Strings
return xemid;
}
#endregion
}
}

View File

@@ -2,18 +2,17 @@ using System.IO;
using System.Xml;
using System.Xml.Schema;
using System.Xml.Serialization;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Streams
namespace SabreTools.Serialization.Deserializers
{
/// <summary>
/// Base class for other XML serializers
/// Base class for other XML deserializers
/// </summary>
/// <typeparam name="T"></typeparam>
public partial class XmlFile<T> : IStreamSerializer<T>
public class XmlFile<T> : BaseBinaryDeserializer<T>
{
/// <inheritdoc/>
public T? Deserialize(Stream? data)
public override T? Deserialize(Stream? data)
{
// If the stream is null
if (data == null)

View File

@@ -1,13 +0,0 @@
namespace SabreTools.Serialization
{
public enum Hash
{
CRC,
MD5,
SHA1,
SHA256,
SHA384,
SHA512,
SpamSum,
}
}

View File

@@ -4,7 +4,7 @@ using System.IO;
using System.Linq;
using System.Text;
using System.Xml.Serialization;
using SabreTools.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.PortableExecutable;
namespace SabreTools.Serialization
@@ -92,18 +92,56 @@ namespace SabreTools.Serialization
return -1;
}
#region Debug
/// <summary>
/// Read debug data as an NB10 Program Database
/// </summary>
/// <param name="data">Data to parse into a database</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled NB10 Program Database on success, null on error</returns>
public static NB10ProgramDatabase? AsNB10ProgramDatabase(this byte[] data, ref int offset)
{
var nb10ProgramDatabase = data.ReadType<NB10ProgramDatabase>(ref offset);
if (nb10ProgramDatabase == null)
return null;
if (nb10ProgramDatabase.Signature != 0x3031424E)
return null;
return nb10ProgramDatabase;
}
/// <summary>
/// Read debug data as an RSDS Program Database
/// </summary>
/// <param name="data">Data to parse into a database</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled RSDS Program Database on success, null on error</returns>
public static RSDSProgramDatabase? AsRSDSProgramDatabase(this byte[] data, ref int offset)
{
var rsdsProgramDatabase = data.ReadType<RSDSProgramDatabase>(ref offset);
if (rsdsProgramDatabase == null)
return null;
if (rsdsProgramDatabase.Signature != 0x53445352)
return null;
return rsdsProgramDatabase;
}
#endregion
#region Overlay
/// <summary>
/// Read overlay data as a SecuROM AddD overlay data
/// </summary>
/// <param name="data">Data to parse into overlay data</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled SecuROM AddD overlay data on success, null on error</returns>
public static SecuROMAddD? AsSecuROMAddD(this byte[]? data, ref int offset)
public static SecuROMAddD? AsSecuROMAddD(this byte[] data, ref int offset)
{
// If we have data that's invalid, we can't do anything
if (data == null)
return null;
// Read in the table
var addD = new SecuROMAddD();
@@ -114,7 +152,7 @@ namespace SabreTools.Serialization
int originalOffset = offset;
addD.EntryCount = data.ReadUInt32(ref offset);
addD.Version = data.ReadString(ref offset, Encoding.ASCII);
addD.Version = data.ReadNullTerminatedAnsiString(ref offset);
if (string.IsNullOrEmpty(addD.Version))
offset = originalOffset + 0x10;
@@ -134,18 +172,9 @@ namespace SabreTools.Serialization
addD.Entries = new SecuROMAddDEntry[addD.EntryCount];
for (int i = 0; i < addD.EntryCount; i++)
{
var addDEntry = new SecuROMAddDEntry();
addDEntry.PhysicalOffset = data.ReadUInt32(ref offset);
addDEntry.Length = data.ReadUInt32(ref offset);
addDEntry.Unknown08h = data.ReadUInt32(ref offset);
addDEntry.Unknown0Ch = data.ReadUInt32(ref offset);
addDEntry.Unknown10h = data.ReadUInt32(ref offset);
addDEntry.Unknown14h = data.ReadUInt32(ref offset);
addDEntry.Unknown18h = data.ReadUInt32(ref offset);
addDEntry.Unknown1Ch = data.ReadUInt32(ref offset);
addDEntry.FileName = data.ReadString(ref offset, Encoding.ASCII);
addDEntry.Unknown2Ch = data.ReadUInt32(ref offset);
var addDEntry = data.ReadType<SecuROMAddDEntry>(ref offset);
if (addDEntry == null)
return null;
addD.Entries[i] = addDEntry;
}
@@ -153,61 +182,6 @@ namespace SabreTools.Serialization
return addD;
}
#region Debug
/// <summary>
/// Read debug data as an NB10 Program Database
/// </summary>
/// <param name="data">Data to parse into a database</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled NB10 Program Database on success, null on error</returns>
public static NB10ProgramDatabase? AsNB10ProgramDatabase(this byte[] data, ref int offset)
{
// If we have data that's invalid, we can't do anything
if (data == null)
return null;
var nb10ProgramDatabase = new NB10ProgramDatabase();
nb10ProgramDatabase.Signature = data.ReadUInt32(ref offset);
if (nb10ProgramDatabase.Signature != 0x3031424E)
return null;
nb10ProgramDatabase.Offset = data.ReadUInt32(ref offset);
nb10ProgramDatabase.Timestamp = data.ReadUInt32(ref offset);
nb10ProgramDatabase.Age = data.ReadUInt32(ref offset);
nb10ProgramDatabase.PdbFileName = data.ReadString(ref offset, Encoding.ASCII); // TODO: Actually null-terminated UTF-8?
return nb10ProgramDatabase;
}
/// <summary>
/// Read debug data as an RSDS Program Database
/// </summary>
/// <param name="data">Data to parse into a database</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled RSDS Program Database on success, null on error</returns>
public static RSDSProgramDatabase? AsRSDSProgramDatabase(this byte[]? data, ref int offset)
{
// If we have data that's invalid, we can't do anything
if (data == null)
return null;
var rsdsProgramDatabase = new RSDSProgramDatabase();
rsdsProgramDatabase.Signature = data.ReadUInt32(ref offset);
if (rsdsProgramDatabase.Signature != 0x53445352)
return null;
var guid = data.ReadBytes(ref offset, 0x10);
if (guid != null)
rsdsProgramDatabase.GUID = new Guid(guid);
rsdsProgramDatabase.Age = data.ReadUInt32(ref offset);
rsdsProgramDatabase.PathAndFileName = data.ReadString(ref offset, Encoding.ASCII); // TODO: Actually null-terminated UTF-8
return rsdsProgramDatabase;
}
#endregion
// TODO: Implement other resource types from https://learn.microsoft.com/en-us/windows/win32/menurc/resource-file-formats
@@ -219,7 +193,7 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse into a resource header</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled resource header on success, null on error</returns>
public static ResourceHeader? AsResourceHeader(this byte[]? data, ref int offset)
public static ResourceHeader? AsResourceHeader(this byte[] data, ref int offset)
{
// If we have data that's invalid, we can't do anything
if (data == null)
@@ -263,12 +237,9 @@ namespace SabreTools.Serialization
// Read in the table
for (int i = 0; i < count; i++)
{
var acceleratorTableEntry = new AcceleratorTableEntry();
acceleratorTableEntry.Flags = (AcceleratorTableFlags)entry.Data.ReadUInt16(ref offset);
acceleratorTableEntry.Ansi = entry.Data.ReadUInt16(ref offset);
acceleratorTableEntry.Id = entry.Data.ReadUInt16(ref offset);
acceleratorTableEntry.Padding = entry.Data.ReadUInt16(ref offset);
var acceleratorTableEntry = entry.Data.ReadType<AcceleratorTableEntry>(ref offset);
if (acceleratorTableEntry == null)
return null;
table[i] = acceleratorTableEntry;
}
@@ -355,7 +326,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the menu resource as a string
dialogTemplateExtended.MenuResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplateExtended.MenuResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -391,7 +362,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the class resource as a string
dialogTemplateExtended.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplateExtended.ClassResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -422,7 +393,7 @@ namespace SabreTools.Serialization
else
{
// Read the title resource as a string
dialogTemplateExtended.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplateExtended.TitleResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -447,7 +418,7 @@ namespace SabreTools.Serialization
dialogTemplateExtended.Weight = entry.Data.ReadUInt16(ref offset);
dialogTemplateExtended.Italic = entry.Data.ReadByte(ref offset);
dialogTemplateExtended.CharSet = entry.Data.ReadByte(ref offset);
dialogTemplateExtended.Typeface = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplateExtended.Typeface = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
}
// Align to the DWORD boundary if we're not at the end
@@ -503,7 +474,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the class resource as a string
dialogItemTemplate.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogItemTemplate.ClassResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -533,7 +504,7 @@ namespace SabreTools.Serialization
else
{
// Read the title resource as a string
dialogItemTemplate.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogItemTemplate.TitleResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -605,7 +576,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the menu resource as a string
dialogTemplate.MenuResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplate.MenuResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -641,7 +612,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the class resource as a string
dialogTemplate.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplate.ClassResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -672,7 +643,7 @@ namespace SabreTools.Serialization
else
{
// Read the title resource as a string
dialogTemplate.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplate.TitleResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -696,7 +667,7 @@ namespace SabreTools.Serialization
dialogTemplate.PointSizeValue = entry.Data.ReadUInt16(ref offset);
// Read the font name as a string
dialogTemplate.Typeface = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplate.Typeface = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
}
// Align to the DWORD boundary if we're not at the end
@@ -751,7 +722,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the class resource as a string
dialogItemTemplate.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogItemTemplate.ClassResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -781,7 +752,7 @@ namespace SabreTools.Serialization
else
{
// Read the title resource as a string
dialogItemTemplate.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogItemTemplate.TitleResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -881,8 +852,8 @@ namespace SabreTools.Serialization
dirEntry.Entry.Reserved = entry.Data.ReadUInt32(ref offset);
// TODO: Determine how to read these two? Immediately after?
dirEntry.Entry.DeviceName = entry.Data.ReadString(ref offset);
dirEntry.Entry.FaceName = entry.Data.ReadString(ref offset);
dirEntry.Entry.DeviceName = entry.Data.ReadNullTerminatedAnsiString(ref offset);
dirEntry.Entry.FaceName = entry.Data.ReadNullTerminatedAnsiString(ref offset);
fontGroupHeader.DE[i] = dirEntry;
}
@@ -916,11 +887,9 @@ namespace SabreTools.Serialization
{
#region Extended menu header
var menuHeaderExtended = new MenuHeaderExtended();
menuHeaderExtended.Version = entry.Data.ReadUInt16(ref offset);
menuHeaderExtended.Offset = entry.Data.ReadUInt16(ref offset);
menuHeaderExtended.HelpID = entry.Data.ReadUInt32(ref offset);
var menuHeaderExtended = entry.Data.ReadType<MenuHeaderExtended>(ref offset);
if (menuHeaderExtended == null)
return null;
menuResource.ExtendedMenuHeader = menuHeaderExtended;
@@ -936,13 +905,9 @@ namespace SabreTools.Serialization
while (offset < entry.Data.Length)
{
var extendedMenuItem = new MenuItemExtended();
extendedMenuItem.ItemType = (MenuFlags)entry.Data.ReadUInt32(ref offset);
extendedMenuItem.State = (MenuFlags)entry.Data.ReadUInt32(ref offset);
extendedMenuItem.ID = entry.Data.ReadUInt32(ref offset);
extendedMenuItem.Flags = (MenuFlags)entry.Data.ReadUInt32(ref offset);
extendedMenuItem.MenuText = entry.Data.ReadString(ref offset, Encoding.Unicode);
var extendedMenuItem = entry.Data.ReadType<MenuItemExtended>(ref offset);
if (extendedMenuItem == null)
return null;
// Align to the DWORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -963,10 +928,9 @@ namespace SabreTools.Serialization
{
#region Menu header
var menuHeader = new MenuHeader();
menuHeader.Version = entry.Data.ReadUInt16(ref offset);
menuHeader.HeaderSize = entry.Data.ReadUInt16(ref offset);
var menuHeader = entry.Data.ReadType<MenuHeader>(ref offset);
if (menuHeader == null)
return null;
menuResource.MenuHeader = menuHeader;
@@ -978,28 +942,19 @@ namespace SabreTools.Serialization
while (offset < entry.Data.Length)
{
var menuItem = new MenuItem();
// Determine if this is a popup
int flagsOffset = offset;
var initialFlags = (MenuFlags)entry.Data.ReadUInt16(ref flagsOffset);
MenuItem? menuItem;
#if NET20 || NET35
if ((initialFlags & MenuFlags.MF_POPUP) != 0)
#else
if (initialFlags.HasFlag(MenuFlags.MF_POPUP))
#endif
{
menuItem.PopupItemType = (MenuFlags)entry.Data.ReadUInt32(ref offset);
menuItem.PopupState = (MenuFlags)entry.Data.ReadUInt32(ref offset);
menuItem.PopupID = entry.Data.ReadUInt32(ref offset);
menuItem.PopupResInfo = (MenuFlags)entry.Data.ReadUInt32(ref offset);
menuItem.PopupMenuText = entry.Data.ReadString(ref offset, Encoding.Unicode);
}
menuItem = entry.Data.ReadType<PopupMenuItem>(ref offset);
else
{
menuItem.NormalResInfo = (MenuFlags)entry.Data.ReadUInt16(ref offset);
menuItem.NormalMenuText = entry.Data.ReadString(ref offset, Encoding.Unicode);
}
menuItem = entry.Data.ReadType<NormalMenuItem>(ref offset);
// Align to the DWORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -1008,6 +963,9 @@ namespace SabreTools.Serialization
_ = entry.Data.ReadByte(ref offset);
}
if (menuItem == null)
return null;
menuItems.Add(menuItem);
}
@@ -1044,11 +1002,9 @@ namespace SabreTools.Serialization
for (int i = 0; i < messageResourceData.NumberOfBlocks; i++)
{
var messageResourceBlock = new MessageResourceBlock();
messageResourceBlock.LowId = entry.Data.ReadUInt32(ref offset);
messageResourceBlock.HighId = entry.Data.ReadUInt32(ref offset);
messageResourceBlock.OffsetToEntries = entry.Data.ReadUInt32(ref offset);
var messageResourceBlock = entry.Data.ReadType<MessageResourceBlock>(ref offset);
if (messageResourceBlock == null)
return null;
messageResourceBlocks.Add(messageResourceBlock);
}
@@ -1154,7 +1110,7 @@ namespace SabreTools.Serialization
versionInfo.Length = entry.Data.ReadUInt16(ref offset);
versionInfo.ValueLength = entry.Data.ReadUInt16(ref offset);
versionInfo.ResourceType = (VersionResourceType)entry.Data.ReadUInt16(ref offset);
versionInfo.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
versionInfo.Key = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
if (versionInfo.Key != "VS_VERSION_INFO")
return null;
@@ -1164,23 +1120,13 @@ namespace SabreTools.Serialization
// Read fixed file info
if (versionInfo.ValueLength > 0)
{
var fixedFileInfo = new FixedFileInfo();
fixedFileInfo.Signature = entry.Data.ReadUInt32(ref offset);
var fixedFileInfo = entry.Data.ReadType<FixedFileInfo>(ref offset);
if (fixedFileInfo == null)
return null;
if (fixedFileInfo.Signature != 0xFEEF04BD)
return null;
fixedFileInfo.StrucVersion = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileVersionMS = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileVersionLS = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.ProductVersionMS = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.ProductVersionLS = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileFlagsMask = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileFlags = (FixedFileInfoFlags)(entry.Data.ReadUInt32(ref offset) & fixedFileInfo.FileFlagsMask);
fixedFileInfo.FileOS = (FixedFileInfoOS)entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileType = (FixedFileInfoFileType)entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileSubtype = (FixedFileInfoFileSubtype)entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileDateMS = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileDateLS = entry.Data.ReadUInt32(ref offset);
versionInfo.Value = fixedFileInfo;
}
@@ -1196,7 +1142,7 @@ namespace SabreTools.Serialization
int currentOffset = offset;
offset += 6;
string? nextKey = entry.Data.ReadString(ref offset, Encoding.Unicode);
string? nextKey = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
offset = currentOffset;
if (nextKey == "StringFileInfo")
@@ -1218,7 +1164,7 @@ namespace SabreTools.Serialization
int currentOffset = offset;
offset += 6;
string? nextKey = entry.Data.ReadString(ref offset, Encoding.Unicode);
string? nextKey = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
offset = currentOffset;
if (nextKey == "StringFileInfo")
@@ -1242,7 +1188,7 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse into a string file info</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled string file info resource on success, null on error</returns>
private static StringFileInfo? AsStringFileInfo(byte[] data, ref int offset)
public static StringFileInfo? AsStringFileInfo(byte[] data, ref int offset)
{
var stringFileInfo = new StringFileInfo();
@@ -1252,7 +1198,7 @@ namespace SabreTools.Serialization
stringFileInfo.Length = data.ReadUInt16(ref offset);
stringFileInfo.ValueLength = data.ReadUInt16(ref offset);
stringFileInfo.ResourceType = (VersionResourceType)data.ReadUInt16(ref offset);
stringFileInfo.Key = data.ReadString(ref offset, Encoding.Unicode);
stringFileInfo.Key = data.ReadNullTerminatedUnicodeString(ref offset);
if (stringFileInfo.Key != "StringFileInfo")
{
offset -= 6 + ((stringFileInfo.Key?.Length ?? 0 + 1) * 2);
@@ -1274,7 +1220,7 @@ namespace SabreTools.Serialization
stringTable.Length = data.ReadUInt16(ref offset);
stringTable.ValueLength = data.ReadUInt16(ref offset);
stringTable.ResourceType = (VersionResourceType)data.ReadUInt16(ref offset);
stringTable.Key = data.ReadString(ref offset, Encoding.Unicode);
stringTable.Key = data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the DWORD boundary if we're not at the end
if (offset < data.Length)
@@ -1292,7 +1238,7 @@ namespace SabreTools.Serialization
stringData.Length = data.ReadUInt16(ref offset);
stringData.ValueLength = data.ReadUInt16(ref offset);
stringData.ResourceType = (VersionResourceType)data.ReadUInt16(ref offset);
stringData.Key = data.ReadString(ref offset, Encoding.Unicode);
stringData.Key = data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the DWORD boundary if we're not at the end
if (offset < data.Length)
@@ -1337,7 +1283,7 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse into a var file info</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled var file info resource on success, null on error</returns>
private static VarFileInfo? AsVarFileInfo(byte[] data, ref int offset)
public static VarFileInfo? AsVarFileInfo(byte[] data, ref int offset)
{
var varFileInfo = new VarFileInfo();
@@ -1347,7 +1293,7 @@ namespace SabreTools.Serialization
varFileInfo.Length = data.ReadUInt16(ref offset);
varFileInfo.ValueLength = data.ReadUInt16(ref offset);
varFileInfo.ResourceType = (VersionResourceType)data.ReadUInt16(ref offset);
varFileInfo.Key = data.ReadString(ref offset, Encoding.Unicode);
varFileInfo.Key = data.ReadNullTerminatedUnicodeString(ref offset);
if (varFileInfo.Key != "VarFileInfo")
return null;
@@ -1366,7 +1312,7 @@ namespace SabreTools.Serialization
varData.Length = data.ReadUInt16(ref offset);
varData.ValueLength = data.ReadUInt16(ref offset);
varData.ResourceType = (VersionResourceType)data.ReadUInt16(ref offset);
varData.Key = data.ReadString(ref offset, Encoding.Unicode);
varData.Key = data.ReadNullTerminatedUnicodeString(ref offset);
if (varData.Key != "Translation")
{
offset -= 6 + ((varData.Key?.Length ?? 0 + 1) * 2);

View File

@@ -1,14 +0,0 @@
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Files
{
public partial class AACS : IFileSerializer<Models.AACS.MediaKeyBlock>
{
/// <inheritdoc/>
public Models.AACS.MediaKeyBlock? Deserialize(string? path)
{
using var stream = PathProcessor.OpenStream(path);
return new Streams.AACS().Deserialize(stream);
}
}
}

View File

@@ -1,22 +0,0 @@
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Files
{
public partial class AACS : IFileSerializer<Models.AACS.MediaKeyBlock>
{
/// <inheritdoc/>
public bool Serialize(Models.AACS.MediaKeyBlock? obj, string? path)
{
if (string.IsNullOrEmpty(path))
return false;
using var stream = new Streams.AACS().Serialize(obj);
if (stream == null)
return false;
using var fs = System.IO.File.OpenWrite(path);
stream.CopyTo(fs);
return true;
}
}
}

View File

@@ -1,7 +0,0 @@
namespace SabreTools.Serialization.Files
{
public partial class ArchiveDotOrg : XmlFile<Models.ArchiveDotOrg.Files>
{
// All serialization logic is in the base class
}
}

View File

@@ -1,7 +0,0 @@
namespace SabreTools.Serialization.Files
{
public partial class ArchiveDotOrg : XmlFile<Models.ArchiveDotOrg.Files>
{
// All serialization logic is in the base class
}
}

View File

@@ -1,15 +0,0 @@
using SabreTools.Models.AttractMode;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Files
{
public partial class AttractMode : IFileSerializer<MetadataFile>
{
/// <inheritdoc/>
public MetadataFile? Deserialize(string? path)
{
using var stream = PathProcessor.OpenStream(path);
return new Streams.AttractMode().Deserialize(stream);
}
}
}

View File

@@ -1,24 +0,0 @@
using System.IO;
using SabreTools.Models.AttractMode;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Files
{
public partial class AttractMode : IFileSerializer<MetadataFile>
{
/// <inheritdoc/>
public bool Serialize(MetadataFile? obj, string? path)
{
if (string.IsNullOrEmpty(path))
return false;
using var stream = new Streams.AttractMode().Serialize(obj);
if (stream == null)
return false;
using var fs = File.OpenWrite(path);
stream.CopyTo(fs);
return true;
}
}
}

View File

@@ -1,14 +0,0 @@
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Files
{
public partial class BDPlus : IFileSerializer<Models.BDPlus.SVM>
{
/// <inheritdoc/>
public Models.BDPlus.SVM? Deserialize(string? path)
{
using var stream = PathProcessor.OpenStream(path);
return new Streams.BDPlus().Deserialize(stream);
}
}
}

View File

@@ -1,22 +0,0 @@
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Files
{
public partial class BDPlus : IFileSerializer<Models.BDPlus.SVM>
{
/// <inheritdoc/>
public bool Serialize(Models.BDPlus.SVM? obj, string? path)
{
if (string.IsNullOrEmpty(path))
return false;
using var stream = new Streams.BDPlus().Serialize(obj);
if (stream == null)
return false;
using var fs = System.IO.File.OpenWrite(path);
stream.CopyTo(fs);
return true;
}
}
}

View File

@@ -1,14 +0,0 @@
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Files
{
public partial class BFPK : IFileSerializer<Models.BFPK.Archive>
{
/// <inheritdoc/>
public Models.BFPK.Archive? Deserialize(string? path)
{
using var stream = PathProcessor.OpenStream(path);
return new Streams.BFPK().Deserialize(stream);
}
}
}

View File

@@ -1,22 +0,0 @@
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Files
{
public partial class BFPK : IFileSerializer<Models.BFPK.Archive>
{
/// <inheritdoc/>
public bool Serialize(Models.BFPK.Archive? obj, string? path)
{
if (string.IsNullOrEmpty(path))
return false;
using var stream = new Streams.BFPK().Serialize(obj);
if (stream == null)
return false;
using var fs = System.IO.File.OpenWrite(path);
stream.CopyTo(fs);
return true;
}
}
}

View File

@@ -1,14 +0,0 @@
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Files
{
public partial class BSP : IFileSerializer<Models.BSP.File>
{
/// <inheritdoc/>
public Models.BSP.File? Deserialize(string? path)
{
using var stream = PathProcessor.OpenStream(path);
return new Streams.BSP().Deserialize(stream);
}
}
}

View File

@@ -1,22 +0,0 @@
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Files
{
public partial class BSP : IFileSerializer<Models.BSP.File>
{
/// <inheritdoc/>
public bool Serialize(Models.BSP.File? obj, string? path)
{
if (string.IsNullOrEmpty(path))
return false;
using var stream = new Streams.BSP().Serialize(obj);
if (stream == null)
return false;
using var fs = System.IO.File.OpenWrite(path);
stream.CopyTo(fs);
return true;
}
}
}

View File

@@ -1,14 +0,0 @@
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Files
{
public partial class CFB : IFileSerializer<Models.CFB.Binary>
{
/// <inheritdoc/>
public Models.CFB.Binary? Deserialize(string? path)
{
using var stream = PathProcessor.OpenStream(path);
return new Streams.CFB().Deserialize(stream);
}
}
}

Some files were not shown because too many files have changed in this diff Show More