Compare commits

...

17 Commits
1.5.1 ... 1.5.4

Author SHA1 Message Date
Matt Nadareski
3bf78c78e3 Bump version 2024-04-23 21:53:36 -04:00
Matt Nadareski
e38ecaec4c Fix build 2024-04-23 21:49:57 -04:00
Matt Nadareski
af40c78b56 Handle more directly-marshalled types 2024-04-23 21:45:54 -04:00
Matt Nadareski
12b206f9fa Update SabreTools.Models 2024-04-23 21:07:42 -04:00
Matt Nadareski
2cc51ba089 Fix build, oops 2024-04-23 21:05:50 -04:00
Matt Nadareski
52f0846d5d Add and fix some deserializers 2024-04-23 21:00:10 -04:00
Matt Nadareski
3fa8848e77 Make more parsing methods public 2024-04-23 15:38:33 -04:00
Matt Nadareski
41276e3d7e Port more helpers 2024-04-23 15:38:21 -04:00
Matt Nadareski
4cef93c95e Port some accessors from UnshieldSharp 2024-04-23 15:27:07 -04:00
Matt Nadareski
cdd999ee03 Fix other instances of string reading 2024-04-23 15:18:18 -04:00
Matt Nadareski
4f253323db Fix IS-CAB deserialization 2024-04-23 15:14:18 -04:00
Matt Nadareski
351f749e20 Add ISAv3 deserializer 2024-04-23 15:12:42 -04:00
Matt Nadareski
1e83fc4b9a Update packages 2024-04-23 15:12:39 -04:00
Matt Nadareski
c532bd1063 Bump version 2024-04-18 12:16:02 -04:00
Matt Nadareski
e4631a8176 Update SabreTools.IO 2024-04-18 12:04:54 -04:00
Matt Nadareski
ee8dad0c87 Bump version 2024-04-17 13:00:07 -04:00
Matt Nadareski
4163b2f22a Create non-typed variants of IWrapper and WrapperBase 2024-04-17 12:28:08 -04:00
36 changed files with 1493 additions and 2073 deletions

View File

@@ -70,20 +70,13 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
byte[]? magic = data.ReadBytes(4);
if (magic == null)
if (header == null)
return null;
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.Version = data.ReadInt32();
header.Files = data.ReadInt32();
return header;
}
@@ -92,10 +85,10 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file entry on success, null on error</returns>
private static FileEntry ParseFileEntry(Stream data)
private static FileEntry? ParseFileEntry(Stream data)
{
// TODO: Use marshalling here instead of building
FileEntry fileEntry = new FileEntry();
var fileEntry = new FileEntry();
fileEntry.NameSize = data.ReadInt32();
if (fileEntry.NameSize > 0)

View File

@@ -47,6 +47,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < HL_BSP_LUMP_COUNT; i++)
{
var lump = ParseLump(data);
if (lump == null)
return null;
file.Lumps[i] = lump;
}
@@ -102,13 +105,13 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Level header on success, null on error</returns>
/// <remarks>Only recognized versions are 29 and 30</remarks>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
// Only recognized versions are 29 and 30
header.Version = data.ReadUInt32();
if (header == null)
return null;
if (header.Version != 29 && header.Version != 30)
return null;
@@ -120,15 +123,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled lump on success, null on error</returns>
private static Lump ParseLump(Stream data)
private static Lump? ParseLump(Stream data)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.Length = data.ReadUInt32();
return lump;
return data.ReadType<Lump>();
}
/// <summary>
@@ -139,7 +136,7 @@ namespace SabreTools.Serialization.Deserializers
private static TextureHeader ParseTextureHeader(Stream data)
{
// TODO: Use marshalling here instead of building
TextureHeader textureHeader = new TextureHeader();
var textureHeader = new TextureHeader();
textureHeader.TextureCount = data.ReadUInt32();
@@ -166,7 +163,7 @@ namespace SabreTools.Serialization.Deserializers
private static Texture ParseTexture(Stream data, uint mipmap = 0)
{
// TODO: Use marshalling here instead of building
Texture texture = new Texture();
var texture = new Texture();
byte[]? name = data.ReadBytes(16)?.TakeWhile(c => c != '\0')?.ToArray();
if (name != null)

View File

@@ -42,7 +42,7 @@ namespace SabreTools.Serialization.Deserializers
#region DIFAT Sector Numbers
// Create a DIFAT sector table
var difatSectors = new List<SectorNumber?>();
var difatSectors = new List<SectorNumber>();
// Add the sectors from the header
if (fileHeader.DIFAT != null)
@@ -84,7 +84,7 @@ namespace SabreTools.Serialization.Deserializers
#region FAT Sector Numbers
// Create a FAT sector table
var fatSectors = new List<SectorNumber?>();
var fatSectors = new List<SectorNumber>();
// Loop through and add the FAT sectors
currentSector = binary.DIFATSectorNumbers[0];
@@ -122,7 +122,7 @@ namespace SabreTools.Serialization.Deserializers
#region Mini FAT Sector Numbers
// Create a mini FAT sector table
var miniFatSectors = new List<SectorNumber?>();
var miniFatSectors = new List<SectorNumber>();
// Loop through and add the mini FAT sectors
currentSector = (SectorNumber)fileHeader.FirstMiniFATSectorLocation;
@@ -233,49 +233,23 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled file header on success, null on error</returns>
private static FileHeader? ParseFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
FileHeader header = new FileHeader();
var header = data.ReadType<FileHeader>();
header.Signature = data.ReadUInt64();
if (header == null)
return null;
if (header.Signature != SignatureUInt64)
return null;
header.CLSID = data.ReadGuid();
header.MinorVersion = data.ReadUInt16();
header.MajorVersion = data.ReadUInt16();
header.ByteOrder = data.ReadUInt16();
if (header.ByteOrder != 0xFFFE)
return null;
header.SectorShift = data.ReadUInt16();
if (header.MajorVersion == 3 && header.SectorShift != 0x0009)
return null;
else if (header.MajorVersion == 4 && header.SectorShift != 0x000C)
return null;
header.MiniSectorShift = data.ReadUInt16();
header.Reserved = data.ReadBytes(6);
header.NumberOfDirectorySectors = data.ReadUInt32();
if (header.MajorVersion == 3 && header.NumberOfDirectorySectors != 0)
return null;
header.NumberOfFATSectors = data.ReadUInt32();
header.FirstDirectorySectorLocation = data.ReadUInt32();
header.TransactionSignatureNumber = data.ReadUInt32();
header.MiniStreamCutoffSize = data.ReadUInt32();
if (header.MiniStreamCutoffSize != 0x00001000)
return null;
header.FirstMiniFATSectorLocation = data.ReadUInt32();
header.NumberOfMiniFATSectors = data.ReadUInt32();
header.FirstDIFATSectorLocation = data.ReadUInt32();
header.NumberOfDIFATSectors = data.ReadUInt32();
header.DIFAT = new SectorNumber?[109];
for (int i = 0; i < header.DIFAT.Length; i++)
{
header.DIFAT[i] = (SectorNumber)data.ReadUInt32();
}
// Skip rest of sector for version 4
if (header.MajorVersion == 4)
_ = data.ReadBytes(3584);
@@ -289,11 +263,11 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="sectorShift">Sector shift from the header</param>
/// <returns>Filled sector full of sector numbers on success, null on error</returns>
private static SectorNumber?[] ParseSectorNumbers(Stream data, ushort sectorShift)
private static SectorNumber[] ParseSectorNumbers(Stream data, ushort sectorShift)
{
// TODO: Use marshalling here instead of building
int sectorCount = (int)(Math.Pow(2, sectorShift) / sizeof(uint));
var sectorNumbers = new SectorNumber?[sectorCount];
var sectorNumbers = new SectorNumber[sectorCount];
for (int i = 0; i < sectorNumbers.Length; i++)
{
@@ -315,7 +289,7 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Use marshalling here instead of building
const int directoryEntrySize = 64 + 2 + 1 + 1 + 4 + 4 + 4 + 16 + 4 + 8 + 8 + 4 + 8;
int sectorCount = (int)(Math.Pow(2, sectorShift) / directoryEntrySize);
DirectoryEntry[] directoryEntries = new DirectoryEntry[sectorCount];
var directoryEntries = new DirectoryEntry[sectorCount];
for (int i = 0; i < directoryEntries.Length; i++)
{
@@ -335,26 +309,17 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version from the header</param>
/// <returns>Filled directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data, ushort majorVersion)
private static DirectoryEntry? ParseDirectoryEntry(Stream data, ushort majorVersion)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
var directoryEntry = data.ReadType<DirectoryEntry>();
byte[]? name = data.ReadBytes(64);
if (name != null)
directoryEntry.Name = Encoding.Unicode.GetString(name).TrimEnd('\0');
directoryEntry.NameLength = data.ReadUInt16();
directoryEntry.ObjectType = (ObjectType)data.ReadByteValue();
directoryEntry.ColorFlag = (ColorFlag)data.ReadByteValue();
directoryEntry.LeftSiblingID = (StreamID)data.ReadUInt32();
directoryEntry.RightSiblingID = (StreamID)data.ReadUInt32();
directoryEntry.ChildID = (StreamID)data.ReadUInt32();
directoryEntry.CLSID = data.ReadGuid();
directoryEntry.StateBits = data.ReadUInt32();
directoryEntry.CreationTime = data.ReadUInt64();
directoryEntry.ModifiedTime = data.ReadUInt64();
directoryEntry.StartingSectorLocation = data.ReadUInt32();
directoryEntry.StreamSize = data.ReadUInt64();
if (directoryEntry == null)
return null;
// TEMPORARY FIX FOR ASCII -> UNICODE
directoryEntry.Name = Encoding.Unicode.GetString(Encoding.ASCII.GetBytes(directoryEntry.Name!));
// Handle version 3 entries
if (majorVersion == 3)
directoryEntry.StreamSize &= 0x0000FFFF;

View File

@@ -145,22 +145,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled CIA header on success, null on error</returns>
private static CIAHeader ParseCIAHeader(Stream data)
public static CIAHeader? ParseCIAHeader(Stream data)
{
// TODO: Use marshalling here instead of building
CIAHeader ciaHeader = new CIAHeader();
ciaHeader.HeaderSize = data.ReadUInt32();
ciaHeader.Type = data.ReadUInt16();
ciaHeader.Version = data.ReadUInt16();
ciaHeader.CertificateChainSize = data.ReadUInt32();
ciaHeader.TicketSize = data.ReadUInt32();
ciaHeader.TMDFileSize = data.ReadUInt32();
ciaHeader.MetaSize = data.ReadUInt32();
ciaHeader.ContentSize = data.ReadUInt64();
ciaHeader.ContentIndex = data.ReadBytes(0x2000);
return ciaHeader;
return data.ReadType<CIAHeader>();
}
/// <summary>
@@ -168,7 +155,7 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled certificate on success, null on error</returns>
private static Certificate? ParseCertificate(Stream data)
public static Certificate? ParseCertificate(Stream data)
{
// TODO: Use marshalling here instead of building
Certificate certificate = new Certificate();
@@ -244,7 +231,7 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="fromCdn">Indicates if the ticket is from CDN</param>
/// <returns>Filled ticket on success, null on error</returns>
private static Ticket? ParseTicket(Stream data, bool fromCdn = false)
public static Ticket? ParseTicket(Stream data, bool fromCdn = false)
{
// TODO: Use marshalling here instead of building
Ticket ticket = new Ticket();
@@ -349,10 +336,10 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="fromCdn">Indicates if the ticket is from CDN</param>
/// <returns>Filled title metadata on success, null on error</returns>
private static TitleMetadata? ParseTitleMetadata(Stream data, bool fromCdn = false)
public static TitleMetadata? ParseTitleMetadata(Stream data, bool fromCdn = false)
{
// TODO: Use marshalling here instead of building
TitleMetadata titleMetadata = new TitleMetadata();
var titleMetadata = new TitleMetadata();
titleMetadata.SignatureType = (SignatureType)data.ReadUInt32();
switch (titleMetadata.SignatureType)
@@ -420,11 +407,19 @@ namespace SabreTools.Serialization.Deserializers
titleMetadata.ContentInfoRecords = new ContentInfoRecord[64];
for (int i = 0; i < 64; i++)
{
var contentInfoRecord = ParseContentInfoRecord(data);
if (contentInfoRecord == null)
return null;
titleMetadata.ContentInfoRecords[i] = ParseContentInfoRecord(data);
}
titleMetadata.ContentChunkRecords = new ContentChunkRecord[titleMetadata.ContentCount];
for (int i = 0; i < titleMetadata.ContentCount; i++)
{
var contentChunkRecord = ParseContentChunkRecord(data);
if (contentChunkRecord == null)
return null;
titleMetadata.ContentChunkRecords[i] = ParseContentChunkRecord(data);
}
@@ -450,16 +445,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled content info record on success, null on error</returns>
private static ContentInfoRecord ParseContentInfoRecord(Stream data)
public static ContentInfoRecord? ParseContentInfoRecord(Stream data)
{
// TODO: Use marshalling here instead of building
ContentInfoRecord contentInfoRecord = new ContentInfoRecord();
contentInfoRecord.ContentIndexOffset = data.ReadUInt16();
contentInfoRecord.ContentCommandCount = data.ReadUInt16();
contentInfoRecord.UnhashedContentRecordsSHA256Hash = data.ReadBytes(0x20);
return contentInfoRecord;
return data.ReadType<ContentInfoRecord>();
}
/// <summary>
@@ -467,18 +455,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled content chunk record on success, null on error</returns>
private static ContentChunkRecord ParseContentChunkRecord(Stream data)
public static ContentChunkRecord? ParseContentChunkRecord(Stream data)
{
// TODO: Use marshalling here instead of building
ContentChunkRecord contentChunkRecord = new ContentChunkRecord();
contentChunkRecord.ContentId = data.ReadUInt32();
contentChunkRecord.ContentIndex = (ContentIndex)data.ReadUInt16();
contentChunkRecord.ContentType = (TMDContentType)data.ReadUInt16();
contentChunkRecord.ContentSize = data.ReadUInt64();
contentChunkRecord.SHA256Hash = data.ReadBytes(0x20);
return contentChunkRecord;
return data.ReadType<ContentChunkRecord>();
}
/// <summary>
@@ -486,18 +465,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled meta data on success, null on error</returns>
private static MetaData ParseMetaData(Stream data)
public static MetaData? ParseMetaData(Stream data)
{
// TODO: Use marshalling here instead of building
MetaData metaData = new MetaData();
metaData.TitleIDDependencyList = data.ReadBytes(0x180);
metaData.Reserved1 = data.ReadBytes(0x180);
metaData.CoreVersion = data.ReadUInt32();
metaData.Reserved2 = data.ReadBytes(0xFC);
metaData.IconData = data.ReadBytes(0x36C0);
return metaData;
return data.ReadType<MetaData>();
}
}
}

View File

@@ -58,6 +58,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < blockEntryHeader.BlockCount; i++)
{
var blockEntry = ParseBlockEntry(data);
if (blockEntry == null)
return null;
file.BlockEntries[i] = blockEntry;
}
@@ -84,6 +87,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < fragmentationMapHeader.BlockCount; i++)
{
var fragmentationMap = ParseFragmentationMap(data);
if (fragmentationMap == null)
return null;
file.FragmentationMaps[i] = fragmentationMap;
}
@@ -115,6 +121,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < file.BlockEntryMapHeader.BlockCount; i++)
{
var blockEntryMap = ParseBlockEntryMap(data);
if (blockEntryMap == null)
return null;
file.BlockEntryMaps[i] = blockEntryMap;
}
}
@@ -145,6 +154,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
if (directoryEntry == null)
return null;
file.DirectoryEntries[i] = directoryEntry;
}
@@ -167,7 +179,7 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position < directoryNamesEnd)
{
long nameOffset = data.Position - directoryNamesStart;
string? directoryName = data.ReadString(Encoding.ASCII);
string? directoryName = data.ReadNullTerminatedAnsiString();
if (data.Position > directoryNamesEnd)
{
data.Seek(-directoryName?.Length ?? 0, SeekOrigin.Current);
@@ -180,13 +192,6 @@ namespace SabreTools.Serialization.Deserializers
file.DirectoryNames[nameOffset] = directoryName;
}
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
if (directoryEntry != null)
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}
#endregion
@@ -200,6 +205,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
if (directoryInfo1Entry == null)
return null;
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
}
@@ -214,6 +222,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
if (directoryInfo2Entry == null)
return null;
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
}
@@ -228,6 +239,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.CopyCount; i++)
{
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
if (directoryCopyEntry == null)
return null;
file.DirectoryCopyEntries[i] = directoryCopyEntry;
}
@@ -242,6 +256,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.LocalCount; i++)
{
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
if (directoryLocalEntry == null)
return null;
file.DirectoryLocalEntries[i] = directoryLocalEntry;
}
@@ -274,6 +291,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryMapEntry = ParseDirectoryMapEntry(data);
if (directoryMapEntry == null)
return null;
file.DirectoryMapEntries[i] = directoryMapEntry;
}
@@ -315,6 +335,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = ParseChecksumMapEntry(data);
if (checksumMapEntry == null)
return null;
file.ChecksumMapEntries[i] = checksumMapEntry;
}
@@ -329,6 +352,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = ParseChecksumEntry(data);
if (checksumEntry == null)
return null;
file.ChecksumEntries[i] = checksumEntry;
}
@@ -359,30 +385,17 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
header.Dummy0 = data.ReadUInt32();
if (header == null)
return null;
if (header.Dummy0 != 0x00000001)
return null;
header.MajorVersion = data.ReadUInt32();
if (header.MajorVersion != 0x00000001)
return null;
header.MinorVersion = data.ReadUInt32();
if (header.MinorVersion != 3 && header.MinorVersion != 5 && header.MinorVersion != 6)
return null;
header.CacheID = data.ReadUInt32();
header.LastVersionPlayed = data.ReadUInt32();
header.Dummy1 = data.ReadUInt32();
header.Dummy2 = data.ReadUInt32();
header.FileSize = data.ReadUInt32();
header.BlockSize = data.ReadUInt32();
header.BlockCount = data.ReadUInt32();
header.Dummy3 = data.ReadUInt32();
return header;
}
@@ -391,21 +404,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry header on success, null on error</returns>
private static BlockEntryHeader ParseBlockEntryHeader(Stream data)
private static BlockEntryHeader? ParseBlockEntryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryHeader blockEntryHeader = new BlockEntryHeader();
blockEntryHeader.BlockCount = data.ReadUInt32();
blockEntryHeader.BlocksUsed = data.ReadUInt32();
blockEntryHeader.Dummy0 = data.ReadUInt32();
blockEntryHeader.Dummy1 = data.ReadUInt32();
blockEntryHeader.Dummy2 = data.ReadUInt32();
blockEntryHeader.Dummy3 = data.ReadUInt32();
blockEntryHeader.Dummy4 = data.ReadUInt32();
blockEntryHeader.Checksum = data.ReadUInt32();
return blockEntryHeader;
return data.ReadType<BlockEntryHeader>();
}
/// <summary>
@@ -413,20 +414,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry on success, null on error</returns>
private static BlockEntry ParseBlockEntry(Stream data)
private static BlockEntry? ParseBlockEntry(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntry blockEntry = new BlockEntry();
blockEntry.EntryFlags = data.ReadUInt32();
blockEntry.FileDataOffset = data.ReadUInt32();
blockEntry.FileDataSize = data.ReadUInt32();
blockEntry.FirstDataBlockIndex = data.ReadUInt32();
blockEntry.NextBlockEntryIndex = data.ReadUInt32();
blockEntry.PreviousBlockEntryIndex = data.ReadUInt32();
blockEntry.DirectoryIndex = data.ReadUInt32();
return blockEntry;
return data.ReadType<BlockEntry>();
}
/// <summary>
@@ -434,17 +424,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache fragmentation map header on success, null on error</returns>
private static FragmentationMapHeader ParseFragmentationMapHeader(Stream data)
private static FragmentationMapHeader? ParseFragmentationMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
FragmentationMapHeader fragmentationMapHeader = new FragmentationMapHeader();
fragmentationMapHeader.BlockCount = data.ReadUInt32();
fragmentationMapHeader.FirstUnusedEntry = data.ReadUInt32();
fragmentationMapHeader.Terminator = data.ReadUInt32();
fragmentationMapHeader.Checksum = data.ReadUInt32();
return fragmentationMapHeader;
return data.ReadType<FragmentationMapHeader>();
}
/// <summary>
@@ -452,14 +434,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache fragmentation map on success, null on error</returns>
private static FragmentationMap ParseFragmentationMap(Stream data)
private static FragmentationMap? ParseFragmentationMap(Stream data)
{
// TODO: Use marshalling here instead of building
FragmentationMap fragmentationMap = new FragmentationMap();
fragmentationMap.NextDataBlockIndex = data.ReadUInt32();
return fragmentationMap;
return data.ReadType<FragmentationMap>();
}
/// <summary>
@@ -467,18 +444,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry map header on success, null on error</returns>
private static BlockEntryMapHeader ParseBlockEntryMapHeader(Stream data)
private static BlockEntryMapHeader? ParseBlockEntryMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryMapHeader blockEntryMapHeader = new BlockEntryMapHeader();
blockEntryMapHeader.BlockCount = data.ReadUInt32();
blockEntryMapHeader.FirstBlockEntryIndex = data.ReadUInt32();
blockEntryMapHeader.LastBlockEntryIndex = data.ReadUInt32();
blockEntryMapHeader.Dummy0 = data.ReadUInt32();
blockEntryMapHeader.Checksum = data.ReadUInt32();
return blockEntryMapHeader;
return data.ReadType<BlockEntryMapHeader>();
}
/// <summary>
@@ -486,15 +454,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache block entry map on success, null on error</returns>
private static BlockEntryMap ParseBlockEntryMap(Stream data)
private static BlockEntryMap? ParseBlockEntryMap(Stream data)
{
// TODO: Use marshalling here instead of building
BlockEntryMap blockEntryMap = new BlockEntryMap();
blockEntryMap.PreviousBlockEntryIndex = data.ReadUInt32();
blockEntryMap.NextBlockEntryIndex = data.ReadUInt32();
return blockEntryMap;
return data.ReadType<BlockEntryMap>();
}
/// <summary>
@@ -502,27 +464,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory header on success, null on error</returns>
private static DirectoryHeader ParseDirectoryHeader(Stream data)
private static DirectoryHeader? ParseDirectoryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryHeader directoryHeader = new DirectoryHeader();
directoryHeader.Dummy0 = data.ReadUInt32();
directoryHeader.CacheID = data.ReadUInt32();
directoryHeader.LastVersionPlayed = data.ReadUInt32();
directoryHeader.ItemCount = data.ReadUInt32();
directoryHeader.FileCount = data.ReadUInt32();
directoryHeader.Dummy1 = data.ReadUInt32();
directoryHeader.DirectorySize = data.ReadUInt32();
directoryHeader.NameSize = data.ReadUInt32();
directoryHeader.Info1Count = data.ReadUInt32();
directoryHeader.CopyCount = data.ReadUInt32();
directoryHeader.LocalCount = data.ReadUInt32();
directoryHeader.Dummy2 = data.ReadUInt32();
directoryHeader.Dummy3 = data.ReadUInt32();
directoryHeader.Checksum = data.ReadUInt32();
return directoryHeader;
return data.ReadType<DirectoryHeader>();
}
/// <summary>
@@ -530,20 +474,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
private static DirectoryEntry? ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.NameOffset = data.ReadUInt32();
directoryEntry.ItemSize = data.ReadUInt32();
directoryEntry.ChecksumIndex = data.ReadUInt32();
directoryEntry.DirectoryFlags = (HL_GCF_FLAG)data.ReadUInt32();
directoryEntry.ParentIndex = data.ReadUInt32();
directoryEntry.NextIndex = data.ReadUInt32();
directoryEntry.FirstIndex = data.ReadUInt32();
return directoryEntry;
return data.ReadType<DirectoryEntry>();
}
/// <summary>
@@ -551,14 +484,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory info 1 entry on success, null on error</returns>
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
private static DirectoryInfo1Entry? ParseDirectoryInfo1Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
return directoryInfo1Entry;
return data.ReadType<DirectoryInfo1Entry>();
}
/// <summary>
@@ -566,14 +494,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory info 2 entry on success, null on error</returns>
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
private static DirectoryInfo2Entry? ParseDirectoryInfo2Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
return directoryInfo2Entry;
return data.ReadType<DirectoryInfo2Entry>();
}
/// <summary>
@@ -581,14 +504,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory copy entry on success, null on error</returns>
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
private static DirectoryCopyEntry? ParseDirectoryCopyEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
return directoryCopyEntry;
return data.ReadType<DirectoryCopyEntry>();
}
/// <summary>
@@ -596,14 +514,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory local entry on success, null on error</returns>
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
private static DirectoryLocalEntry? ParseDirectoryLocalEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
return directoryLocalEntry;
return data.ReadType<DirectoryLocalEntry>();
}
/// <summary>
@@ -613,14 +526,12 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life Game Cache directory map header on success, null on error</returns>
private static DirectoryMapHeader? ParseDirectoryMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapHeader directoryMapHeader = new DirectoryMapHeader();
var directoryMapHeader = data.ReadType<DirectoryMapHeader>();
directoryMapHeader.Dummy0 = data.ReadUInt32();
if (directoryMapHeader == null)
return null;
if (directoryMapHeader.Dummy0 != 0x00000001)
return null;
directoryMapHeader.Dummy1 = data.ReadUInt32();
if (directoryMapHeader.Dummy1 != 0x00000000)
return null;
@@ -632,14 +543,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache directory map entry on success, null on error</returns>
private static DirectoryMapEntry ParseDirectoryMapEntry(Stream data)
private static DirectoryMapEntry? ParseDirectoryMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapEntry directoryMapEntry = new DirectoryMapEntry();
directoryMapEntry.FirstBlockIndex = data.ReadUInt32();
return directoryMapEntry;
return data.ReadType<DirectoryMapEntry>();
}
/// <summary>
@@ -649,15 +555,13 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life Game Cache checksum header on success, null on error</returns>
private static ChecksumHeader? ParseChecksumHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumHeader checksumHeader = new ChecksumHeader();
var checksumHeader = data.ReadType<ChecksumHeader>();
checksumHeader.Dummy0 = data.ReadUInt32();
if (checksumHeader == null)
return null;
if (checksumHeader.Dummy0 != 0x00000001)
return null;
checksumHeader.ChecksumSize = data.ReadUInt32();
return checksumHeader;
}
@@ -668,20 +572,15 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life Game Cache checksum map header on success, null on error</returns>
private static ChecksumMapHeader? ParseChecksumMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
var checksumMapHeader = data.ReadType<ChecksumMapHeader>();
checksumMapHeader.Dummy0 = data.ReadUInt32();
if (checksumMapHeader == null)
return null;
if (checksumMapHeader.Dummy0 != 0x14893721)
return null;
checksumMapHeader.Dummy1 = data.ReadUInt32();
if (checksumMapHeader.Dummy1 != 0x00000001)
return null;
checksumMapHeader.ItemCount = data.ReadUInt32();
checksumMapHeader.ChecksumCount = data.ReadUInt32();
return checksumMapHeader;
}
@@ -690,15 +589,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum map entry on success, null on error</returns>
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
private static ChecksumMapEntry? ParseChecksumMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
checksumMapEntry.ChecksumCount = data.ReadUInt32();
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
return checksumMapEntry;
return data.ReadType<ChecksumMapEntry>();
}
/// <summary>
@@ -706,14 +599,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Game Cache checksum entry on success, null on error</returns>
private static ChecksumEntry ParseChecksumEntry(Stream data)
private static ChecksumEntry? ParseChecksumEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumEntry checksumEntry = new ChecksumEntry();
checksumEntry.Checksum = data.ReadUInt32();
return checksumEntry;
return data.ReadType<ChecksumEntry>();
}
/// <summary>
@@ -722,10 +610,10 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="minorVersion">Minor version field from the header</param>
/// <returns>Filled Half-Life Game Cache data block header on success, null on error</returns>
private static DataBlockHeader ParseDataBlockHeader(Stream data, uint minorVersion)
private static DataBlockHeader? ParseDataBlockHeader(Stream data, uint minorVersion)
{
// TODO: Use marshalling here instead of building
DataBlockHeader dataBlockHeader = new DataBlockHeader();
var dataBlockHeader = new DataBlockHeader();
// In version 3 the DataBlockHeader is missing the LastVersionPlayed field.
if (minorVersion >= 5)

View File

@@ -20,7 +20,7 @@ namespace SabreTools.Serialization.Deserializers
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new media key block to fill
// Create a new IRD to fill
var ird = new Models.IRD.File();
ird.Magic = data.ReadBytes(4);

View File

@@ -0,0 +1,137 @@
using System.Collections.Generic;
using System.IO;
using SabreTools.IO.Extensions;
using SabreTools.Models.InstallShieldArchiveV3;
namespace SabreTools.Serialization.Deserializers
{
public class InstallShieldArchiveV3 : BaseBinaryDeserializer<Archive>
{
public override Archive? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the archive header
archive.Header = header;
#endregion
#region Directories
// Get the directories offset
uint directoriesOffset = header.TocAddress;
if (directoriesOffset < 0 || directoriesOffset >= data.Length)
return null;
// Seek to the directories
data.Seek(directoriesOffset, SeekOrigin.Begin);
// Try to parse the directories
var directories = new List<Models.InstallShieldArchiveV3.Directory>();
for (int i = 0; i < header.DirCount; i++)
{
var directory = ParseDirectory(data, out uint chunkSize);
if (directory?.Name == null)
return null;
directories.Add(directory);
data.Seek(chunkSize - directory.Name.Length - 6, SeekOrigin.Current);
}
// Set the directories
archive.Directories = [.. directories];
#endregion
#region Files
// Try to parse the files
var files = new List<Models.InstallShieldArchiveV3.File>();
for (int i = 0; i < archive.Directories.Length; i++)
{
var directory = archive.Directories[i];
for (int j = 0; j < directory.FileCount; j++)
{
var file = ParseFile(data);
if (file?.Name == null)
return null;
files.Add(file);
data.Seek(file.ChunkSize - file.Name.Length - 30, SeekOrigin.Current);
}
}
// Set the files
archive.Files = [.. files];
#endregion
return archive;
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
public static Header? ParseHeader(Stream data)
{
var header = data.ReadType<Header>();
if (header == null)
return null;
if (header.Signature1 != 0x8C655D13) // TODO: Move constant to Models
return null;
if (header.TocAddress >= data.Length)
return null;
return header;
}
/// <summary>
/// Parse a Stream into a directory
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled directory on success, null on error</returns>
public static Models.InstallShieldArchiveV3.Directory? ParseDirectory(Stream data, out uint chunkSize)
{
// TODO: Use ReadType when model is fixed
var directory = new Models.InstallShieldArchiveV3.Directory();
directory.FileCount = data.ReadUInt16();
chunkSize = data.ReadUInt16(); // TODO: Add to model and remove from output params
directory.Name = data.ReadPrefixedAnsiString();
return directory;
}
/// <summary>
/// Parse a Stream into a file
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled file on success, null on error</returns>
public static Models.InstallShieldArchiveV3.File? ParseFile(Stream data)
{
return data.ReadType<Models.InstallShieldArchiveV3.File>();
}
}
}

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.InstallShieldCabinet;
using static SabreTools.Models.InstallShieldCabinet.Constants;
@@ -333,23 +332,15 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled common header on success, null on error</returns>
private static CommonHeader? ParseCommonHeader(Stream data)
public static CommonHeader? ParseCommonHeader(Stream data)
{
CommonHeader commonHeader = new CommonHeader();
var commonHeader = data.ReadType<CommonHeader>();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (commonHeader == null)
return null;
commonHeader.Signature = Encoding.ASCII.GetString(signature);
if (commonHeader.Signature != SignatureString)
return null;
commonHeader.Version = data.ReadUInt32();
commonHeader.VolumeInfo = data.ReadUInt32();
commonHeader.DescriptorOffset = data.ReadUInt32();
commonHeader.DescriptorSize = data.ReadUInt32();
return commonHeader;
}
@@ -359,7 +350,7 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled volume header on success, null on error</returns>
private static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
public static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
{
VolumeHeader volumeHeader = new VolumeHeader();
@@ -406,46 +397,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled descriptor on success, null on error</returns>
private static Descriptor ParseDescriptor(Stream data)
public static Descriptor? ParseDescriptor(Stream data)
{
Descriptor descriptor = new Descriptor();
descriptor.StringsOffset = data.ReadUInt32();
descriptor.Reserved0 = data.ReadBytes(4);
descriptor.ComponentListOffset = data.ReadUInt32();
descriptor.FileTableOffset = data.ReadUInt32();
descriptor.Reserved1 = data.ReadBytes(4);
descriptor.FileTableSize = data.ReadUInt32();
descriptor.FileTableSize2 = data.ReadUInt32();
descriptor.DirectoryCount = data.ReadUInt16();
descriptor.Reserved2 = data.ReadBytes(4);
descriptor.Reserved3 = data.ReadBytes(2);
descriptor.Reserved4 = data.ReadBytes(4);
descriptor.FileCount = data.ReadUInt32();
descriptor.FileTableOffset2 = data.ReadUInt32();
descriptor.ComponentTableInfoCount = data.ReadUInt16();
descriptor.ComponentTableOffset = data.ReadUInt32();
descriptor.Reserved5 = data.ReadBytes(4);
descriptor.Reserved6 = data.ReadBytes(4);
descriptor.FileGroupOffsets = new uint[MAX_FILE_GROUP_COUNT];
for (int i = 0; i < descriptor.FileGroupOffsets.Length; i++)
{
descriptor.FileGroupOffsets[i] = data.ReadUInt32();
}
descriptor.ComponentOffsets = new uint[MAX_COMPONENT_COUNT];
for (int i = 0; i < descriptor.ComponentOffsets.Length; i++)
{
descriptor.ComponentOffsets[i] = data.ReadUInt32();
}
descriptor.SetupTypesOffset = data.ReadUInt32();
descriptor.SetupTableOffset = data.ReadUInt32();
descriptor.Reserved7 = data.ReadBytes(4);
descriptor.Reserved8 = data.ReadBytes(4);
return descriptor;
return data.ReadType<Descriptor>();
}
/// <summary>
@@ -455,9 +409,9 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled offset list on success, null on error</returns>
private static OffsetList ParseOffsetList(Stream data, int majorVersion, uint descriptorOffset)
public static OffsetList ParseOffsetList(Stream data, int majorVersion, uint descriptorOffset)
{
OffsetList offsetList = new OffsetList();
var offsetList = new OffsetList();
offsetList.NameOffset = data.ReadUInt32();
offsetList.DescriptorOffset = data.ReadUInt32();
@@ -471,9 +425,9 @@ namespace SabreTools.Serialization.Deserializers
// Read the string
if (majorVersion >= 17)
offsetList.Name = data.ReadString(Encoding.Unicode);
offsetList.Name = data.ReadNullTerminatedUnicodeString();
else
offsetList.Name = data.ReadString(Encoding.ASCII);
offsetList.Name = data.ReadNullTerminatedAnsiString();
// Seek back to the correct offset
data.Seek(currentOffset, SeekOrigin.Begin);
@@ -488,9 +442,9 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file group on success, null on error</returns>
private static FileGroup ParseFileGroup(Stream data, int majorVersion, uint descriptorOffset)
public static FileGroup ParseFileGroup(Stream data, int majorVersion, uint descriptorOffset)
{
FileGroup fileGroup = new FileGroup();
var fileGroup = new FileGroup();
fileGroup.NameOffset = data.ReadUInt32();
@@ -533,9 +487,9 @@ namespace SabreTools.Serialization.Deserializers
// Read the string
if (majorVersion >= 17)
fileGroup.Name = data.ReadString(Encoding.Unicode);
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
else
fileGroup.Name = data.ReadString(Encoding.ASCII);
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
}
// Seek back to the correct offset
@@ -551,14 +505,14 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled component on success, null on error</returns>
private static Component ParseComponent(Stream data, int majorVersion, uint descriptorOffset)
public static Component ParseComponent(Stream data, int majorVersion, uint descriptorOffset)
{
Component component = new Component();
var component = new Component();
component.IdentifierOffset = data.ReadUInt32();
component.DescriptorOffset = data.ReadUInt32();
component.DisplayNameOffset = data.ReadUInt32();
component.Reserved0 = data.ReadBytes(2);
component.Reserved0 = data.ReadUInt16();
component.ReservedOffset0 = data.ReadUInt32();
component.ReservedOffset1 = data.ReadUInt32();
component.ComponentIndex = data.ReadUInt16();
@@ -595,9 +549,9 @@ namespace SabreTools.Serialization.Deserializers
// Read the string
if (majorVersion >= 17)
component.Identifier = data.ReadString(Encoding.Unicode);
component.Identifier = data.ReadNullTerminatedUnicodeString();
else
component.Identifier = data.ReadString(Encoding.ASCII);
component.Identifier = data.ReadNullTerminatedAnsiString();
}
// Read the display name, if possible
@@ -608,9 +562,9 @@ namespace SabreTools.Serialization.Deserializers
// Read the string
if (majorVersion >= 17)
component.DisplayName = data.ReadString(Encoding.Unicode);
component.DisplayName = data.ReadNullTerminatedUnicodeString();
else
component.DisplayName = data.ReadString(Encoding.ASCII);
component.DisplayName = data.ReadNullTerminatedAnsiString();
}
// Read the name, if possible
@@ -621,9 +575,9 @@ namespace SabreTools.Serialization.Deserializers
// Read the string
if (majorVersion >= 17)
component.Name = data.ReadString(Encoding.Unicode);
component.Name = data.ReadNullTerminatedUnicodeString();
else
component.Name = data.ReadString(Encoding.ASCII);
component.Name = data.ReadNullTerminatedAnsiString();
}
// Read the CLSID, if possible
@@ -656,9 +610,9 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(nameOffset + descriptorOffset, SeekOrigin.Begin);
if (majorVersion >= 17)
component.FileGroupNames[j] = data.ReadString(Encoding.Unicode) ?? string.Empty;
component.FileGroupNames[j] = data.ReadNullTerminatedUnicodeString() ?? string.Empty;
else
component.FileGroupNames[j] = data.ReadString(Encoding.ASCII) ?? string.Empty;
component.FileGroupNames[j] = data.ReadNullTerminatedAnsiString() ?? string.Empty;
// Seek back to the original position
data.Seek(preNameOffset, SeekOrigin.Begin);
@@ -677,13 +631,13 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="majorVersion">Major version of the cabinet</param>
/// <returns>Filled directory name on success, null on error</returns>
private static string? ParseDirectoryName(Stream data, int majorVersion)
public static string? ParseDirectoryName(Stream data, int majorVersion)
{
// Read the string
if (majorVersion >= 17)
return data.ReadString(Encoding.Unicode);
return data.ReadNullTerminatedUnicodeString();
else
return data.ReadString(Encoding.ASCII);
return data.ReadNullTerminatedAnsiString();
}
/// <summary>
@@ -693,7 +647,7 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="majorVersion">Major version of the cabinet</param>
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
/// <returns>Filled file descriptor on success, null on error</returns>
private static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset)
public static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset)
{
FileDescriptor fileDescriptor = new FileDescriptor();
@@ -740,9 +694,9 @@ namespace SabreTools.Serialization.Deserializers
// Read the string
if (majorVersion >= 17)
fileDescriptor.Name = data.ReadString(Encoding.Unicode);
fileDescriptor.Name = data.ReadNullTerminatedUnicodeString();
else
fileDescriptor.Name = data.ReadString(Encoding.ASCII);
fileDescriptor.Name = data.ReadNullTerminatedAnsiString();
}
// Seek back to the correct offset
@@ -757,7 +711,7 @@ namespace SabreTools.Serialization.Deserializers
/// Get the major version of the cabinet
/// </summary>
/// <remarks>This should live in the wrapper but is needed during parsing</remarks>
private static int GetMajorVersion(CommonHeader commonHeader)
public static int GetMajorVersion(CommonHeader commonHeader)
{
uint majorVersion = commonHeader.Version;
if (majorVersion >> 24 == 1)

View File

@@ -425,63 +425,13 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled information block on success, null on error</returns>
public static InformationBlock? ParseInformationBlock(Stream data)
{
// TODO: Use marshalling here instead of building
var informationBlock = new InformationBlock();
var informationBlock = data.ReadType<InformationBlock>();
byte[]? magic = data.ReadBytes(2);
if (magic == null)
if (informationBlock == null)
return null;
informationBlock.Signature = Encoding.ASCII.GetString(magic);
if (informationBlock.Signature != LESignatureString && informationBlock.Signature != LXSignatureString)
return null;
informationBlock.ByteOrder = (ByteOrder)data.ReadByteValue();
informationBlock.WordOrder = (WordOrder)data.ReadByteValue();
informationBlock.ExecutableFormatLevel = data.ReadUInt32();
informationBlock.CPUType = (CPUType)data.ReadUInt16();
informationBlock.ModuleOS = (OperatingSystem)data.ReadUInt16();
informationBlock.ModuleVersion = data.ReadUInt32();
informationBlock.ModuleTypeFlags = (ModuleFlags)data.ReadUInt32();
informationBlock.ModuleNumberPages = data.ReadUInt32();
informationBlock.InitialObjectCS = data.ReadUInt32();
informationBlock.InitialEIP = data.ReadUInt32();
informationBlock.InitialObjectSS = data.ReadUInt32();
informationBlock.InitialESP = data.ReadUInt32();
informationBlock.MemoryPageSize = data.ReadUInt32();
informationBlock.BytesOnLastPage = data.ReadUInt32();
informationBlock.FixupSectionSize = data.ReadUInt32();
informationBlock.FixupSectionChecksum = data.ReadUInt32();
informationBlock.LoaderSectionSize = data.ReadUInt32();
informationBlock.LoaderSectionChecksum = data.ReadUInt32();
informationBlock.ObjectTableOffset = data.ReadUInt32();
informationBlock.ObjectTableCount = data.ReadUInt32();
informationBlock.ObjectPageMapOffset = data.ReadUInt32();
informationBlock.ObjectIterateDataMapOffset = data.ReadUInt32();
informationBlock.ResourceTableOffset = data.ReadUInt32();
informationBlock.ResourceTableCount = data.ReadUInt32();
informationBlock.ResidentNamesTableOffset = data.ReadUInt32();
informationBlock.EntryTableOffset = data.ReadUInt32();
informationBlock.ModuleDirectivesTableOffset = data.ReadUInt32();
informationBlock.ModuleDirectivesCount = data.ReadUInt32();
informationBlock.FixupPageTableOffset = data.ReadUInt32();
informationBlock.FixupRecordTableOffset = data.ReadUInt32();
informationBlock.ImportedModulesNameTableOffset = data.ReadUInt32();
informationBlock.ImportedModulesCount = data.ReadUInt32();
informationBlock.ImportProcedureNameTableOffset = data.ReadUInt32();
informationBlock.PerPageChecksumTableOffset = data.ReadUInt32();
informationBlock.DataPagesOffset = data.ReadUInt32();
informationBlock.PreloadPageCount = data.ReadUInt32();
informationBlock.NonResidentNamesTableOffset = data.ReadUInt32();
informationBlock.NonResidentNamesTableLength = data.ReadUInt32();
informationBlock.NonResidentNamesTableChecksum = data.ReadUInt32();
informationBlock.AutomaticDataObject = data.ReadUInt32();
informationBlock.DebugInformationOffset = data.ReadUInt32();
informationBlock.DebugInformationLength = data.ReadUInt32();
informationBlock.PreloadInstancePagesNumber = data.ReadUInt32();
informationBlock.DemandInstancePagesNumber = data.ReadUInt32();
informationBlock.ExtraHeapAllocation = data.ReadUInt32();
return informationBlock;
}
@@ -490,19 +440,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled object table entry on success, null on error</returns>
public static ObjectTableEntry ParseObjectTableEntry(Stream data)
public static ObjectTableEntry? ParseObjectTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ObjectTableEntry();
entry.VirtualSegmentSize = data.ReadUInt32();
entry.RelocationBaseAddress = data.ReadUInt32();
entry.ObjectFlags = (ObjectFlags)data.ReadUInt16();
entry.PageTableIndex = data.ReadUInt32();
entry.PageTableEntries = data.ReadUInt32();
entry.Reserved = data.ReadUInt32();
return entry;
return data.ReadType<ObjectTableEntry>();
}
/// <summary>
@@ -510,16 +450,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled object page map entry on success, null on error</returns>
public static ObjectPageMapEntry ParseObjectPageMapEntry(Stream data)
public static ObjectPageMapEntry? ParseObjectPageMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ObjectPageMapEntry();
entry.PageDataOffset = data.ReadUInt32();
entry.DataSize = data.ReadUInt16();
entry.Flags = (ObjectPageFlags)data.ReadUInt16();
return entry;
return data.ReadType<ObjectPageMapEntry>();
}
/// <summary>
@@ -527,18 +460,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resource table entry on success, null on error</returns>
public static ResourceTableEntry ParseResourceTableEntry(Stream data)
public static ResourceTableEntry? ParseResourceTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ResourceTableEntry();
entry.TypeID = (ResourceTableEntryType)data.ReadUInt32();
entry.NameID = data.ReadUInt16();
entry.ResourceSize = data.ReadUInt32();
entry.ObjectNumber = data.ReadUInt16();
entry.Offset = data.ReadUInt32();
return entry;
return data.ReadType<ResourceTableEntry>();
}
/// <summary>
@@ -631,16 +555,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled module format directives table entry on success, null on error</returns>
public static ModuleFormatDirectivesTableEntry ParseModuleFormatDirectivesTableEntry(Stream data)
public static ModuleFormatDirectivesTableEntry? ParseModuleFormatDirectivesTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ModuleFormatDirectivesTableEntry();
entry.DirectiveNumber = (DirectiveNumber)data.ReadUInt16();
entry.DirectiveDataLength = data.ReadUInt16();
entry.DirectiveDataOffset = data.ReadUInt32();
return entry;
return data.ReadType<ModuleFormatDirectivesTableEntry>();
}
/// <summary>
@@ -648,20 +565,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled verify record directive table entry on success, null on error</returns>
public static VerifyRecordDirectiveTableEntry ParseVerifyRecordDirectiveTableEntry(Stream data)
public static VerifyRecordDirectiveTableEntry? ParseVerifyRecordDirectiveTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new VerifyRecordDirectiveTableEntry();
entry.EntryCount = data.ReadUInt16();
entry.OrdinalIndex = data.ReadUInt16();
entry.Version = data.ReadUInt16();
entry.ObjectEntriesCount = data.ReadUInt16();
entry.ObjectNumberInModule = data.ReadUInt16();
entry.ObjectLoadBaseAddress = data.ReadUInt16();
entry.ObjectVirtualAddressSize = data.ReadUInt16();
return entry;
return data.ReadType<VerifyRecordDirectiveTableEntry>();
}
/// <summary>
@@ -669,14 +575,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled fix-up page table entry on success, null on error</returns>
public static FixupPageTableEntry ParseFixupPageTableEntry(Stream data)
public static FixupPageTableEntry? ParseFixupPageTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new FixupPageTableEntry();
entry.Offset = data.ReadUInt32();
return entry;
return data.ReadType<FixupPageTableEntry>();
}
/// <summary>
@@ -945,14 +846,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled per-page checksum table entry on success, null on error</returns>
public static PerPageChecksumTableEntry ParsePerPageChecksumTableEntry(Stream data)
public static PerPageChecksumTableEntry? ParsePerPageChecksumTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new PerPageChecksumTableEntry();
entry.Checksum = data.ReadUInt32();
return entry;
return data.ReadType<PerPageChecksumTableEntry>();
}
/// <summary>

View File

@@ -126,20 +126,31 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of relocation table entries to read</param>
/// <returns>Filled relocation table on success, null on error</returns>
private static RelocationEntry[] ParseRelocationTable(Stream data, int count)
private static RelocationEntry[]? ParseRelocationTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var relocationTable = new RelocationEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new RelocationEntry();
entry.Offset = data.ReadUInt16();
entry.Segment = data.ReadUInt16();
var entry = ParseRelocationEntry(data);
if (entry == null)
return null;
relocationTable[i] = entry;
}
return relocationTable;
}
/// <summary>
/// Parse a Stream into a relocation table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled relocation table entry on success, null on error</returns>
public static RelocationEntry? ParseRelocationEntry(Stream data)
{
return data.ReadType<RelocationEntry>();
}
}
}

View File

@@ -138,8 +138,8 @@ namespace SabreTools.Serialization.Deserializers
if (header.Flags.HasFlag(HeaderFlags.PREV_CABINET))
#endif
{
header.CabinetPrev = data.ReadString(Encoding.ASCII);
header.DiskPrev = data.ReadString(Encoding.ASCII);
header.CabinetPrev = data.ReadNullTerminatedAnsiString();
header.DiskPrev = data.ReadNullTerminatedAnsiString();
}
#if NET20 || NET35
@@ -148,8 +148,8 @@ namespace SabreTools.Serialization.Deserializers
if (header.Flags.HasFlag(HeaderFlags.NEXT_CABINET))
#endif
{
header.CabinetNext = data.ReadString(Encoding.ASCII);
header.DiskNext = data.ReadString(Encoding.ASCII);
header.CabinetNext = data.ReadNullTerminatedAnsiString();
header.DiskNext = data.ReadNullTerminatedAnsiString();
}
return header;
@@ -234,9 +234,9 @@ namespace SabreTools.Serialization.Deserializers
#else
if (file.Attributes.HasFlag(Models.MicrosoftCabinet.FileAttributes.NAME_IS_UTF))
#endif
file.Name = data.ReadString(Encoding.Unicode);
file.Name = data.ReadNullTerminatedUnicodeString();
else
file.Name = data.ReadString(Encoding.ASCII);
file.Name = data.ReadNullTerminatedAnsiString();
return file;
}

View File

@@ -404,20 +404,13 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled user data on success, null on error</returns>
private static UserData? ParseUserData(Stream data)
{
UserData userData = new UserData();
var userData = data.ReadType<UserData>();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (userData == null)
return null;
userData.Signature = Encoding.ASCII.GetString(signature);
if (userData.Signature != UserDataSignatureString)
return null;
userData.UserDataSize = data.ReadUInt32();
userData.HeaderOffset = data.ReadUInt32();
userData.UserDataHeaderSize = data.ReadUInt32();
return userData;
}
@@ -428,7 +421,7 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled HET table on success, null on error</returns>
private static HetTable? ParseHetTable(Stream data)
{
HetTable hetTable = new HetTable();
var hetTable = new HetTable();
// Common Headers
byte[]? signature = data.ReadBytes(4);
@@ -465,7 +458,7 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled BET table on success, null on error</returns>
private static BetTable? ParseBetTable(Stream data)
{
BetTable betTable = new BetTable();
var betTable = new BetTable();
// Common Headers
byte[]? signature = data.ReadBytes(4);
@@ -519,18 +512,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled hash entry on success, null on error</returns>
private static HashEntry ParseHashEntry(Stream data)
private static HashEntry? ParseHashEntry(Stream data)
{
// TODO: Use marshalling here instead of building
HashEntry hashEntry = new HashEntry();
hashEntry.NameHashPartA = data.ReadUInt32();
hashEntry.NameHashPartB = data.ReadUInt32();
hashEntry.Locale = (Locale)data.ReadUInt16();
hashEntry.Platform = data.ReadUInt16();
hashEntry.BlockIndex = data.ReadUInt32();
return hashEntry;
return data.ReadType<HashEntry>();
}
/// <summary>
@@ -538,16 +522,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled block entry on success, null on error</returns>
private static BlockEntry ParseBlockEntry(Stream data)
private static BlockEntry? ParseBlockEntry(Stream data)
{
BlockEntry blockEntry = new BlockEntry();
blockEntry.FilePosition = data.ReadUInt32();
blockEntry.CompressedSize = data.ReadUInt32();
blockEntry.UncompressedSize = data.ReadUInt32();
blockEntry.Flags = (FileFlags)data.ReadUInt32();
return blockEntry;
return data.ReadType<BlockEntry>();
}
/// <summary>
@@ -555,19 +532,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled patch info on success, null on error</returns>
private static PatchInfo ParsePatchInfo(Stream data)
private static PatchInfo? ParsePatchInfo(Stream data)
{
// TODO: Use marshalling here instead of building
PatchInfo patchInfo = new PatchInfo();
patchInfo.Length = data.ReadUInt32();
patchInfo.Flags = data.ReadUInt32();
patchInfo.DataSize = data.ReadUInt32();
patchInfo.MD5 = data.ReadBytes(0x10);
// TODO: Fill the sector offset table
return patchInfo;
return data.ReadType<PatchInfo>();
}
#region Helpers

View File

@@ -137,7 +137,11 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(offset, SeekOrigin.Begin);
// Parse the ExeFS header
cart.ExeFSHeaders[i] = ParseExeFSHeader(data);
var exeFsHeader = ParseExeFSHeader(data);
if (exeFsHeader == null)
return null;
cart.ExeFSHeaders[i] = exeFsHeader;
}
#endregion
@@ -182,7 +186,7 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled NCSD header on success, null on error</returns>
private static NCSDHeader? ParseNCSDHeader(Stream data)
public static NCSDHeader? ParseNCSDHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new NCSDHeader();
@@ -204,7 +208,11 @@ namespace SabreTools.Serialization.Deserializers
header.PartitionsTable = new PartitionTableEntry[8];
for (int i = 0; i < 8; i++)
{
header.PartitionsTable[i] = ParsePartitionTableEntry(data);
var partitionTableEntry = ParsePartitionTableEntry(data);
if (partitionTableEntry == null)
return null;
header.PartitionsTable[i] = partitionTableEntry;
}
if (header.PartitionsFSType == FilesystemType.Normal || header.PartitionsFSType == FilesystemType.None)
@@ -239,15 +247,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled partition table entry on success, null on error</returns>
private static PartitionTableEntry ParsePartitionTableEntry(Stream data)
public static PartitionTableEntry? ParsePartitionTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var partitionTableEntry = new PartitionTableEntry();
partitionTableEntry.Offset = data.ReadUInt32();
partitionTableEntry.Length = data.ReadUInt32();
return partitionTableEntry;
return data.ReadType<PartitionTableEntry>();
}
/// <summary>
@@ -255,24 +257,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled card info header on success, null on error</returns>
private static CardInfoHeader ParseCardInfoHeader(Stream data)
public static CardInfoHeader? ParseCardInfoHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var cardInfoHeader = new CardInfoHeader();
cardInfoHeader.WritableAddressMediaUnits = data.ReadUInt32();
cardInfoHeader.CardInfoBitmask = data.ReadUInt32();
cardInfoHeader.Reserved1 = data.ReadBytes(0xF8);
cardInfoHeader.FilledSize = data.ReadUInt32();
cardInfoHeader.Reserved2 = data.ReadBytes(0x0C);
cardInfoHeader.TitleVersion = data.ReadUInt16();
cardInfoHeader.CardRevision = data.ReadUInt16();
cardInfoHeader.Reserved3 = data.ReadBytes(0x0C);
cardInfoHeader.CVerTitleID = data.ReadBytes(8);
cardInfoHeader.CVerVersionNumber = data.ReadUInt16();
cardInfoHeader.Reserved4 = data.ReadBytes(0xCD6);
return cardInfoHeader;
return data.ReadType<CardInfoHeader>();
}
/// <summary>
@@ -280,46 +267,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled development card info header on success, null on error</returns>
private static DevelopmentCardInfoHeader? ParseDevelopmentCardInfoHeader(Stream data)
public static DevelopmentCardInfoHeader? ParseDevelopmentCardInfoHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var developmentCardInfoHeader = new DevelopmentCardInfoHeader();
developmentCardInfoHeader.InitialData = ParseInitialData(data);
if (developmentCardInfoHeader.InitialData == null)
return null;
developmentCardInfoHeader.CardDeviceReserved1 = data.ReadBytes(0x200);
developmentCardInfoHeader.TitleKey = data.ReadBytes(0x10);
developmentCardInfoHeader.CardDeviceReserved2 = data.ReadBytes(0x1BF0);
developmentCardInfoHeader.TestData = ParseTestData(data);
if (developmentCardInfoHeader.TestData == null)
return null;
return developmentCardInfoHeader;
}
/// <summary>
/// Parse a Stream into an initial data
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled initial data on success, null on error</returns>
private static InitialData? ParseInitialData(Stream data)
{
// TODO: Use marshalling here instead of building
var initialData = new InitialData();
initialData.CardSeedKeyY = data.ReadBytes(0x10);
initialData.EncryptedCardSeed = data.ReadBytes(0x10);
initialData.CardSeedAESMAC = data.ReadBytes(0x10);
initialData.CardSeedNonce = data.ReadBytes(0xC);
initialData.Reserved = data.ReadBytes(0xC4);
initialData.BackupHeader = ParseNCCHHeader(data, true);
if (initialData.BackupHeader == null)
return null;
return initialData;
return data.ReadType<DevelopmentCardInfoHeader>();
}
/// <summary>
@@ -328,7 +278,7 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="skipSignature">Indicates if the signature should be skipped</param>
/// <returns>Filled NCCH header on success, null on error</returns>
internal static NCCHHeader ParseNCCHHeader(Stream data, bool skipSignature = false)
public static NCCHHeader ParseNCCHHeader(Stream data, bool skipSignature = false)
{
// TODO: Use marshalling here instead of building
var header = new NCCHHeader();
@@ -352,7 +302,7 @@ namespace SabreTools.Serialization.Deserializers
header.ProductCode = Encoding.ASCII.GetString(productCode).TrimEnd('\0');
header.ExtendedHeaderHash = data.ReadBytes(0x20);
header.ExtendedHeaderSizeInBytes = data.ReadUInt32();
header.Reserved2 = data.ReadBytes(4);
header.Reserved2 = data.ReadUInt32();
header.Flags = ParseNCCHHeaderFlags(data);
header.PlainRegionOffsetInMediaUnits = data.ReadUInt32();
header.PlainRegionSizeInMediaUnits = data.ReadUInt32();
@@ -361,11 +311,11 @@ namespace SabreTools.Serialization.Deserializers
header.ExeFSOffsetInMediaUnits = data.ReadUInt32();
header.ExeFSSizeInMediaUnits = data.ReadUInt32();
header.ExeFSHashRegionSizeInMediaUnits = data.ReadUInt32();
header.Reserved3 = data.ReadBytes(4);
header.Reserved3 = data.ReadUInt32();
header.RomFSOffsetInMediaUnits = data.ReadUInt32();
header.RomFSSizeInMediaUnits = data.ReadUInt32();
header.RomFSHashRegionSizeInMediaUnits = data.ReadUInt32();
header.Reserved4 = data.ReadBytes(4);
header.Reserved4 = data.ReadUInt32();
header.ExeFSSuperblockHash = data.ReadBytes(0x20);
header.RomFSSuperblockHash = data.ReadBytes(0x20);
@@ -377,46 +327,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled NCCH header flags on success, null on error</returns>
private static NCCHHeaderFlags ParseNCCHHeaderFlags(Stream data)
public static NCCHHeaderFlags? ParseNCCHHeaderFlags(Stream data)
{
// TODO: Use marshalling here instead of building
var headerFlags = new NCCHHeaderFlags();
headerFlags.Reserved0 = data.ReadByteValue();
headerFlags.Reserved1 = data.ReadByteValue();
headerFlags.Reserved2 = data.ReadByteValue();
headerFlags.CryptoMethod = (CryptoMethod)data.ReadByteValue();
headerFlags.ContentPlatform = (ContentPlatform)data.ReadByteValue();
headerFlags.MediaPlatformIndex = (ContentType)data.ReadByteValue();
headerFlags.ContentUnitSize = data.ReadByteValue();
headerFlags.BitMasks = (BitMasks)data.ReadByteValue();
return headerFlags;
}
/// <summary>
/// Parse a Stream into an initial data
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled initial data on success, null on error</returns>
private static TestData ParseTestData(Stream data)
{
// TODO: Use marshalling here instead of building
var testData = new TestData();
// TODO: Validate some of the values
testData.Signature = data.ReadBytes(8);
testData.AscendingByteSequence = data.ReadBytes(0x1F8);
testData.DescendingByteSequence = data.ReadBytes(0x200);
testData.Filled00 = data.ReadBytes(0x200);
testData.FilledFF = data.ReadBytes(0x200);
testData.Filled0F = data.ReadBytes(0x200);
testData.FilledF0 = data.ReadBytes(0x200);
testData.Filled55 = data.ReadBytes(0x200);
testData.FilledAA = data.ReadBytes(0x1FF);
testData.FinalByte = data.ReadByteValue();
return testData;
return data.ReadType<NCCHHeaderFlags>();
}
/// <summary>
@@ -424,203 +337,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled NCCH extended header on success, null on error</returns>
private static NCCHExtendedHeader? ParseNCCHExtendedHeader(Stream data)
public static NCCHExtendedHeader? ParseNCCHExtendedHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var extendedHeader = new NCCHExtendedHeader();
extendedHeader.SCI = ParseSystemControlInfo(data);
if (extendedHeader.SCI == null)
return null;
extendedHeader.ACI = ParseAccessControlInfo(data);
if (extendedHeader.ACI == null)
return null;
extendedHeader.AccessDescSignature = data.ReadBytes(0x100);
extendedHeader.NCCHHDRPublicKey = data.ReadBytes(0x100);
extendedHeader.ACIForLimitations = ParseAccessControlInfo(data);
if (extendedHeader.ACI == null)
return null;
return extendedHeader;
}
/// <summary>
/// Parse a Stream into a system control info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled system control info on success, null on error</returns>
private static SystemControlInfo ParseSystemControlInfo(Stream data)
{
// TODO: Use marshalling here instead of building
var systemControlInfo = new SystemControlInfo();
byte[]? applicationTitle = data.ReadBytes(8);
if (applicationTitle != null)
systemControlInfo.ApplicationTitle = Encoding.ASCII.GetString(applicationTitle).TrimEnd('\0');
systemControlInfo.Reserved1 = data.ReadBytes(5);
systemControlInfo.Flag = data.ReadByteValue();
systemControlInfo.RemasterVersion = data.ReadUInt16();
systemControlInfo.TextCodeSetInfo = ParseCodeSetInfo(data);
systemControlInfo.StackSize = data.ReadUInt32();
systemControlInfo.ReadOnlyCodeSetInfo = ParseCodeSetInfo(data);
systemControlInfo.Reserved2 = data.ReadBytes(4);
systemControlInfo.DataCodeSetInfo = ParseCodeSetInfo(data);
systemControlInfo.BSSSize = data.ReadUInt32();
systemControlInfo.DependencyModuleList = new ulong[48];
for (int i = 0; i < 48; i++)
{
systemControlInfo.DependencyModuleList[i] = data.ReadUInt64();
}
systemControlInfo.SystemInfo = ParseSystemInfo(data);
return systemControlInfo;
}
/// <summary>
/// Parse a Stream into a code set info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled code set info on success, null on error</returns>
private static CodeSetInfo ParseCodeSetInfo(Stream data)
{
// TODO: Use marshalling here instead of building
var codeSetInfo = new CodeSetInfo();
codeSetInfo.Address = data.ReadUInt32();
codeSetInfo.PhysicalRegionSizeInPages = data.ReadUInt32();
codeSetInfo.SizeInBytes = data.ReadUInt32();
return codeSetInfo;
}
/// <summary>
/// Parse a Stream into a system info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled system info on success, null on error</returns>
private static SystemInfo ParseSystemInfo(Stream data)
{
// TODO: Use marshalling here instead of building
var systemInfo = new SystemInfo();
systemInfo.SaveDataSize = data.ReadUInt64();
systemInfo.JumpID = data.ReadUInt64();
systemInfo.Reserved = data.ReadBytes(0x30);
return systemInfo;
}
/// <summary>
/// Parse a Stream into an access control info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled access control info on success, null on error</returns>
private static AccessControlInfo ParseAccessControlInfo(Stream data)
{
// TODO: Use marshalling here instead of building
var accessControlInfo = new AccessControlInfo();
accessControlInfo.ARM11LocalSystemCapabilities = ParseARM11LocalSystemCapabilities(data);
accessControlInfo.ARM11KernelCapabilities = ParseARM11KernelCapabilities(data);
accessControlInfo.ARM9AccessControl = ParseARM9AccessControl(data);
return accessControlInfo;
}
/// <summary>
/// Parse a Stream into an ARM11 local system capabilities
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ARM11 local system capabilities on success, null on error</returns>
private static ARM11LocalSystemCapabilities ParseARM11LocalSystemCapabilities(Stream data)
{
// TODO: Use marshalling here instead of building
var arm11LocalSystemCapabilities = new ARM11LocalSystemCapabilities();
arm11LocalSystemCapabilities.ProgramID = data.ReadUInt64();
arm11LocalSystemCapabilities.CoreVersion = data.ReadUInt32();
arm11LocalSystemCapabilities.Flag1 = (ARM11LSCFlag1)data.ReadByteValue();
arm11LocalSystemCapabilities.Flag2 = (ARM11LSCFlag2)data.ReadByteValue();
arm11LocalSystemCapabilities.Flag0 = (ARM11LSCFlag0)data.ReadByteValue();
arm11LocalSystemCapabilities.Priority = data.ReadByteValue();
arm11LocalSystemCapabilities.ResourceLimitDescriptors = new ushort[16];
for (int i = 0; i < 16; i++)
{
arm11LocalSystemCapabilities.ResourceLimitDescriptors[i] = data.ReadUInt16();
}
arm11LocalSystemCapabilities.StorageInfo = ParseStorageInfo(data);
arm11LocalSystemCapabilities.ServiceAccessControl = new ulong[32];
for (int i = 0; i < 32; i++)
{
arm11LocalSystemCapabilities.ServiceAccessControl[i] = data.ReadUInt64();
}
arm11LocalSystemCapabilities.ExtendedServiceAccessControl = new ulong[2];
for (int i = 0; i < 2; i++)
{
arm11LocalSystemCapabilities.ExtendedServiceAccessControl[i] = data.ReadUInt64();
}
arm11LocalSystemCapabilities.Reserved = data.ReadBytes(0x0F);
arm11LocalSystemCapabilities.ResourceLimitCategory = (ResourceLimitCategory)data.ReadByteValue();
return arm11LocalSystemCapabilities;
}
/// <summary>
/// Parse a Stream into a storage info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled storage info on success, null on error</returns>
private static StorageInfo ParseStorageInfo(Stream data)
{
// TODO: Use marshalling here instead of building
var storageInfo = new StorageInfo();
storageInfo.ExtdataID = data.ReadUInt64();
storageInfo.SystemSavedataIDs = data.ReadBytes(8);
storageInfo.StorageAccessibleUniqueIDs = data.ReadBytes(8);
storageInfo.FileSystemAccessInfo = data.ReadBytes(7);
storageInfo.OtherAttributes = (StorageInfoOtherAttributes)data.ReadByteValue();
return storageInfo;
}
/// <summary>
/// Parse a Stream into an ARM11 kernel capabilities
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ARM11 kernel capabilities on success, null on error</returns>
private static ARM11KernelCapabilities ParseARM11KernelCapabilities(Stream data)
{
// TODO: Use marshalling here instead of building
var arm11KernelCapabilities = new ARM11KernelCapabilities();
arm11KernelCapabilities.Descriptors = new uint[28];
for (int i = 0; i < 28; i++)
{
arm11KernelCapabilities.Descriptors[i] = data.ReadUInt32();
}
arm11KernelCapabilities.Reserved = data.ReadBytes(0x10);
return arm11KernelCapabilities;
}
/// <summary>
/// Parse a Stream into an ARM11 access control
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ARM11 access control on success, null on error</returns>
private static ARM9AccessControl ParseARM9AccessControl(Stream data)
{
// TODO: Use marshalling here instead of building
var arm9AccessControl = new ARM9AccessControl();
arm9AccessControl.Descriptors = data.ReadBytes(15);
arm9AccessControl.DescriptorVersion = data.ReadByteValue();
return arm9AccessControl;
return data.ReadType<NCCHExtendedHeader>();
}
/// <summary>
@@ -628,7 +347,7 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ExeFS header on success, null on error</returns>
private static ExeFSHeader ParseExeFSHeader(Stream data)
public static ExeFSHeader? ParseExeFSHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var exeFSHeader = new ExeFSHeader();
@@ -636,7 +355,11 @@ namespace SabreTools.Serialization.Deserializers
exeFSHeader.FileHeaders = new ExeFSFileHeader[10];
for (int i = 0; i < 10; i++)
{
exeFSHeader.FileHeaders[i] = ParseExeFSFileHeader(data);
var exeFsFileHeader = ParseExeFSFileHeader(data);
if (exeFsFileHeader == null)
return null;
exeFSHeader.FileHeaders[i] = exeFsFileHeader;
}
exeFSHeader.Reserved = data.ReadBytes(0x20);
exeFSHeader.FileHashes = new byte[10][];
@@ -653,7 +376,7 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ExeFS file header on success, null on error</returns>
private static ExeFSFileHeader ParseExeFSFileHeader(Stream data)
public static ExeFSFileHeader? ParseExeFSFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var exeFSFileHeader = new ExeFSFileHeader();
@@ -672,39 +395,17 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled RomFS header on success, null on error</returns>
private static RomFSHeader? ParseRomFSHeader(Stream data)
public static RomFSHeader? ParseRomFSHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var romFSHeader = new RomFSHeader();
var romFSHeader = data.ReadType<RomFSHeader>();
byte[]? magicString = data.ReadBytes(4);
if (magicString == null)
if (romFSHeader == null)
return null;
romFSHeader.MagicString = Encoding.ASCII.GetString(magicString).TrimEnd('\0');
if (romFSHeader.MagicString != RomFSMagicNumber)
return null;
romFSHeader.MagicNumber = data.ReadUInt32();
if (romFSHeader.MagicNumber != RomFSSecondMagicNumber)
return null;
romFSHeader.MasterHashSize = data.ReadUInt32();
romFSHeader.Level1LogicalOffset = data.ReadUInt64();
romFSHeader.Level1HashdataSize = data.ReadUInt64();
romFSHeader.Level1BlockSizeLog2 = data.ReadUInt32();
romFSHeader.Reserved1 = data.ReadBytes(4);
romFSHeader.Level2LogicalOffset = data.ReadUInt64();
romFSHeader.Level2HashdataSize = data.ReadUInt64();
romFSHeader.Level2BlockSizeLog2 = data.ReadUInt32();
romFSHeader.Reserved2 = data.ReadBytes(4);
romFSHeader.Level3LogicalOffset = data.ReadUInt64();
romFSHeader.Level3HashdataSize = data.ReadUInt64();
romFSHeader.Level3BlockSizeLog2 = data.ReadUInt32();
romFSHeader.Reserved3 = data.ReadBytes(4);
romFSHeader.Reserved4 = data.ReadBytes(4);
romFSHeader.OptionalInfoSize = data.ReadUInt32();
return romFSHeader;
}
}

View File

@@ -61,6 +61,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
if (directoryEntry == null)
return null;
file.DirectoryEntries[i] = directoryEntry;
}
@@ -83,7 +86,7 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position < directoryNamesEnd)
{
long nameOffset = data.Position - directoryNamesStart;
string? directoryName = data.ReadString(Encoding.ASCII);
string? directoryName = data.ReadNullTerminatedAnsiString();
if (data.Position > directoryNamesEnd)
{
data.Seek(-directoryName?.Length ?? 0, SeekOrigin.Current);
@@ -96,13 +99,6 @@ namespace SabreTools.Serialization.Deserializers
file.DirectoryNames[nameOffset] = directoryName;
}
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
if (directoryEntry != null)
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}
#endregion
@@ -116,6 +112,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.Info1Count; i++)
{
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
if (directoryInfo1Entry == null)
return null;
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
}
@@ -130,6 +129,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
if (directoryInfo2Entry == null)
return null;
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
}
@@ -144,6 +146,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.CopyCount; i++)
{
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
if (directoryCopyEntry == null)
return null;
file.DirectoryCopyEntries[i] = directoryCopyEntry;
}
@@ -158,6 +163,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.LocalCount; i++)
{
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
if (directoryLocalEntry == null)
return null;
file.DirectoryLocalEntries[i] = directoryLocalEntry;
}
@@ -187,6 +195,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < directoryHeader.ItemCount; i++)
{
var unknownEntry = ParseUnknownEntry(data);
if (unknownEntry == null)
return null;
file.UnknownEntries[i] = unknownEntry;
}
@@ -228,6 +239,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
{
var checksumMapEntry = ParseChecksumMapEntry(data);
if (checksumMapEntry == null)
return null;
file.ChecksumMapEntries[i] = checksumMapEntry;
}
@@ -242,6 +256,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
{
var checksumEntry = ParseChecksumEntry(data);
if (checksumEntry == null)
return null;
file.ChecksumEntries[i] = checksumEntry;
}
@@ -260,30 +277,17 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life No Cache header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
header.Dummy0 = data.ReadUInt32();
if (header == null)
return null;
if (header.Dummy0 != 0x00000001)
return null;
header.MajorVersion = data.ReadUInt32();
if (header.MajorVersion != 0x00000002)
return null;
header.MinorVersion = data.ReadUInt32();
if (header.MinorVersion != 1)
return null;
header.CacheID = data.ReadUInt32();
header.LastVersionPlayed = data.ReadUInt32();
header.Dummy1 = data.ReadUInt32();
header.Dummy2 = data.ReadUInt32();
header.FileSize = data.ReadUInt32();
header.BlockSize = data.ReadUInt32();
header.BlockCount = data.ReadUInt32();
header.Dummy3 = data.ReadUInt32();
return header;
}
@@ -294,27 +298,13 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life No Cache directory header on success, null on error</returns>
private static DirectoryHeader? ParseDirectoryHeader(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryHeader directoryHeader = new DirectoryHeader();
var directoryHeader = data.ReadType<DirectoryHeader>();
directoryHeader.Dummy0 = data.ReadUInt32();
if (directoryHeader == null)
return null;
if (directoryHeader.Dummy0 != 0x00000004)
return null;
directoryHeader.CacheID = data.ReadUInt32();
directoryHeader.LastVersionPlayed = data.ReadUInt32();
directoryHeader.ItemCount = data.ReadUInt32();
directoryHeader.FileCount = data.ReadUInt32();
directoryHeader.ChecksumDataLength = data.ReadUInt32();
directoryHeader.DirectorySize = data.ReadUInt32();
directoryHeader.NameSize = data.ReadUInt32();
directoryHeader.Info1Count = data.ReadUInt32();
directoryHeader.CopyCount = data.ReadUInt32();
directoryHeader.LocalCount = data.ReadUInt32();
directoryHeader.Dummy1 = data.ReadUInt32();
directoryHeader.Dummy2 = data.ReadUInt32();
directoryHeader.Checksum = data.ReadUInt32();
return directoryHeader;
}
@@ -323,20 +313,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
private static DirectoryEntry? ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.NameOffset = data.ReadUInt32();
directoryEntry.ItemSize = data.ReadUInt32();
directoryEntry.ChecksumIndex = data.ReadUInt32();
directoryEntry.DirectoryFlags = (HL_NCF_FLAG)data.ReadUInt32();
directoryEntry.ParentIndex = data.ReadUInt32();
directoryEntry.NextIndex = data.ReadUInt32();
directoryEntry.FirstIndex = data.ReadUInt32();
return directoryEntry;
return data.ReadType<DirectoryEntry>();
}
/// <summary>
@@ -344,14 +323,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory info 1 entry on success, null on error</returns>
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
private static DirectoryInfo1Entry? ParseDirectoryInfo1Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
return directoryInfo1Entry;
return data.ReadType<DirectoryInfo1Entry>();
}
/// <summary>
@@ -359,14 +333,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory info 2 entry on success, null on error</returns>
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
private static DirectoryInfo2Entry? ParseDirectoryInfo2Entry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
return directoryInfo2Entry;
return data.ReadType<DirectoryInfo2Entry>();
}
/// <summary>
@@ -374,14 +343,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory copy entry on success, null on error</returns>
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
private static DirectoryCopyEntry? ParseDirectoryCopyEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
return directoryCopyEntry;
return data.ReadType<DirectoryCopyEntry>();
}
/// <summary>
@@ -389,14 +353,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache directory local entry on success, null on error</returns>
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
private static DirectoryLocalEntry? ParseDirectoryLocalEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
return directoryLocalEntry;
return data.ReadType<DirectoryLocalEntry>();
}
/// <summary>
@@ -406,14 +365,12 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life No Cache unknown header on success, null on error</returns>
private static UnknownHeader? ParseUnknownHeader(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownHeader unknownHeader = new UnknownHeader();
var unknownHeader = data.ReadType<UnknownHeader>();
unknownHeader.Dummy0 = data.ReadUInt32();
if (unknownHeader == null)
return null;
if (unknownHeader.Dummy0 != 0x00000001)
return null;
unknownHeader.Dummy1 = data.ReadUInt32();
if (unknownHeader.Dummy1 != 0x00000000)
return null;
@@ -425,14 +382,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cacheunknown entry on success, null on error</returns>
private static UnknownEntry ParseUnknownEntry(Stream data)
private static UnknownEntry? ParseUnknownEntry(Stream data)
{
// TODO: Use marshalling here instead of building
UnknownEntry unknownEntry = new UnknownEntry();
unknownEntry.Dummy0 = data.ReadUInt32();
return unknownEntry;
return data.ReadType<UnknownEntry>();
}
/// <summary>
@@ -442,15 +394,13 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life No Cache checksum header on success, null on error</returns>
private static ChecksumHeader? ParseChecksumHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumHeader checksumHeader = new ChecksumHeader();
var checksumHeader = data.ReadType<ChecksumHeader>();
checksumHeader.Dummy0 = data.ReadUInt32();
if (checksumHeader == null)
return null;
if (checksumHeader.Dummy0 != 0x00000001)
return null;
checksumHeader.ChecksumSize = data.ReadUInt32();
return checksumHeader;
}
@@ -461,20 +411,15 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life No Cache checksum map header on success, null on error</returns>
private static ChecksumMapHeader? ParseChecksumMapHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
var checksumMapHeader = data.ReadType<ChecksumMapHeader>();
checksumMapHeader.Dummy0 = data.ReadUInt32();
if (checksumMapHeader == null)
return null;
if (checksumMapHeader.Dummy0 != 0x14893721)
return null;
checksumMapHeader.Dummy1 = data.ReadUInt32();
if (checksumMapHeader.Dummy1 != 0x00000001)
return null;
checksumMapHeader.ItemCount = data.ReadUInt32();
checksumMapHeader.ChecksumCount = data.ReadUInt32();
return checksumMapHeader;
}
@@ -483,15 +428,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum map entry on success, null on error</returns>
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
private static ChecksumMapEntry? ParseChecksumMapEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
checksumMapEntry.ChecksumCount = data.ReadUInt32();
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
return checksumMapEntry;
return data.ReadType<ChecksumMapEntry>();
}
/// <summary>
@@ -499,14 +438,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life No Cache checksum entry on success, null on error</returns>
private static ChecksumEntry ParseChecksumEntry(Stream data)
private static ChecksumEntry? ParseChecksumEntry(Stream data)
{
// TODO: Use marshalling here instead of building
ChecksumEntry checksumEntry = new ChecksumEntry();
checksumEntry.Checksum = data.ReadUInt32();
return checksumEntry;
return data.ReadType<ChecksumEntry>();
}
}
}

View File

@@ -1,7 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.NewExecutable;
using static SabreTools.Models.NewExecutable.Constants;
@@ -214,48 +213,13 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled executable header on success, null on error</returns>
public static ExecutableHeader? ParseExecutableHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new ExecutableHeader();
var header = data.ReadType<ExecutableHeader>();
byte[]? magic = data.ReadBytes(2);
if (magic == null)
if (header == null)
return null;
header.Magic = Encoding.ASCII.GetString(magic);
if (header.Magic != SignatureString)
return null;
header.LinkerVersion = data.ReadByteValue();
header.LinkerRevision = data.ReadByteValue();
header.EntryTableOffset = data.ReadUInt16();
header.EntryTableSize = data.ReadUInt16();
header.CrcChecksum = data.ReadUInt32();
header.FlagWord = (HeaderFlag)data.ReadUInt16();
header.AutomaticDataSegmentNumber = data.ReadUInt16();
header.InitialHeapAlloc = data.ReadUInt16();
header.InitialStackAlloc = data.ReadUInt16();
header.InitialCSIPSetting = data.ReadUInt32();
header.InitialSSSPSetting = data.ReadUInt32();
header.FileSegmentCount = data.ReadUInt16();
header.ModuleReferenceTableSize = data.ReadUInt16();
header.NonResidentNameTableSize = data.ReadUInt16();
header.SegmentTableOffset = data.ReadUInt16();
header.ResourceTableOffset = data.ReadUInt16();
header.ResidentNameTableOffset = data.ReadUInt16();
header.ModuleReferenceTableOffset = data.ReadUInt16();
header.ImportedNamesTableOffset = data.ReadUInt16();
header.NonResidentNamesTableOffset = data.ReadUInt32();
header.MovableEntriesCount = data.ReadUInt16();
header.SegmentAlignmentShiftCount = data.ReadUInt16();
header.ResourceEntriesCount = data.ReadUInt16();
header.TargetOperatingSystem = (OperatingSystem)data.ReadByteValue();
header.AdditionalFlags = (OS2Flag)data.ReadByteValue();
header.ReturnThunkOffset = data.ReadUInt16();
header.SegmentReferenceThunkOffset = data.ReadUInt16();
header.MinCodeSwapAreaSize = data.ReadUInt16();
header.WindowsSDKRevision = data.ReadByteValue();
header.WindowsSDKVersion = data.ReadByteValue();
return header;
}
@@ -265,31 +229,40 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of segment table entries to read</param>
/// <returns>Filled segment table on success, null on error</returns>
public static SegmentTableEntry[] ParseSegmentTable(Stream data, int count)
public static SegmentTableEntry[]? ParseSegmentTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var segmentTable = new SegmentTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new SegmentTableEntry();
entry.Offset = data.ReadUInt16();
entry.Length = data.ReadUInt16();
entry.FlagWord = (SegmentTableEntryFlag)data.ReadUInt16();
entry.MinimumAllocationSize = data.ReadUInt16();
var entry = ParseSegmentTableEntry(data);
if (entry == null)
return null;
segmentTable[i] = entry;
}
return segmentTable;
}
/// <summary>
/// Parse a Stream into a segment table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled segment table entry on success, null on error</returns>
public static SegmentTableEntry? ParseSegmentTableEntry(Stream data)
{
return data.ReadType<SegmentTableEntry>();
}
/// <summary>
/// Parse a Stream into a resource table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of resource table entries to read</param>
/// <returns>Filled resource table on success, null on error</returns>
public static ResourceTable ParseResourceTable(Stream data, int count)
public static ResourceTable? ParseResourceTable(Stream data, int count)
{
long initialOffset = data.Position;
@@ -308,12 +281,10 @@ namespace SabreTools.Serialization.Deserializers
for (int j = 0; j < entry.ResourceCount; j++)
{
// TODO: Should we read and store the resource data?
var resource = new ResourceTypeResourceEntry();
resource.Offset = data.ReadUInt16();
resource.Length = data.ReadUInt16();
resource.FlagWord = (ResourceTypeResourceFlag)data.ReadUInt16();
resource.ResourceID = data.ReadUInt16();
resource.Reserved = data.ReadUInt32();
var resource = ParseResourceTypeResourceEntry(data);
if (resource == null)
return null;
entry.Resources[j] = resource;
}
resourceTable.ResourceTypes[i] = entry;
@@ -339,82 +310,157 @@ namespace SabreTools.Serialization.Deserializers
{
int stringOffset = (int)(stringOffsets[i] + initialOffset);
data.Seek(stringOffset, SeekOrigin.Begin);
var str = new ResourceTypeAndNameString();
str.Length = data.ReadByteValue();
str.Text = data.ReadBytes(str.Length);
var str = ParseResourceTypeAndNameString(data);
if (str == null)
return null;
resourceTable.TypeAndNameStrings[stringOffsets[i]] = str;
}
return resourceTable;
}
/// <summary>
/// Parse a Stream into a resource entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resource entry on success, null on error</returns>
public static ResourceTypeResourceEntry? ParseResourceTypeResourceEntry(Stream data)
{
// TODO: Should we read and store the resource data?
return data.ReadType<ResourceTypeResourceEntry>();
}
/// <summary>
/// Parse a Stream into a resource type and name string
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resource type and name string on success, null on error</returns>
public static ResourceTypeAndNameString? ParseResourceTypeAndNameString(Stream data)
{
// TODO: Use marshalling here instead of building
var str = new ResourceTypeAndNameString();
str.Length = data.ReadByteValue();
str.Text = data.ReadBytes(str.Length);
return str;
}
/// <summary>
/// Parse a Stream into a resident-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the resident-name table</param>
/// <returns>Filled resident-name table on success, null on error</returns>
public static ResidentNameTableEntry[] ParseResidentNameTable(Stream data, int endOffset)
public static ResidentNameTableEntry[]? ParseResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<ResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new ResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
var entry = ParseResidentNameTableEntry(data);
if (entry == null)
return null;
residentNameTable.Add(entry);
}
return [.. residentNameTable];
}
/// <summary>
/// Parse a Stream into a resident-name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled resident-name table entry on success, null on error</returns>
public static ResidentNameTableEntry? ParseResidentNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
return entry;
}
/// <summary>
/// Parse a Stream into a module-reference table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of module-reference table entries to read</param>
/// <returns>Filled module-reference table on success, null on error</returns>
public static ModuleReferenceTableEntry[] ParseModuleReferenceTable(Stream data, int count)
public static ModuleReferenceTableEntry[]? ParseModuleReferenceTable(Stream data, int count)
{
// TODO: Use marshalling here instead of building
var moduleReferenceTable = new ModuleReferenceTableEntry[count];
for (int i = 0; i < count; i++)
{
var entry = new ModuleReferenceTableEntry();
entry.Offset = data.ReadUInt16();
var entry = ParseModuleReferenceTableEntry(data);
if (entry == null)
return null;
moduleReferenceTable[i] = entry;
}
return moduleReferenceTable;
}
/// <summary>
/// Parse a Stream into a module-reference table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled module-reference table entry on success, null on error</returns>
public static ModuleReferenceTableEntry? ParseModuleReferenceTableEntry(Stream data)
{
return data.ReadType<ModuleReferenceTableEntry>();
}
/// <summary>
/// Parse a Stream into an imported-name table
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the imported-name table</param>
/// <returns>Filled imported-name table on success, null on error</returns>
public static Dictionary<ushort, ImportedNameTableEntry?> ParseImportedNameTable(Stream data, int endOffset)
public static Dictionary<ushort, ImportedNameTableEntry>? ParseImportedNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry?>();
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry>();
while (data.Position < endOffset)
{
ushort currentOffset = (ushort)data.Position;
var entry = new ImportedNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
var entry = ParseImportedNameTableEntry(data);
if (entry == null)
return null;
importedNameTable[currentOffset] = entry;
}
return importedNameTable;
}
/// <summary>
/// Parse a Stream into an imported-name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled imported-name table entry on success, null on error</returns>
public static ImportedNameTableEntry? ParseImportedNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ImportedNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
return entry;
}
/// <summary>
/// Parse a Stream into an entry table
/// </summary>
@@ -460,21 +506,38 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the nonresident-name table</param>
/// <returns>Filled nonresident-name table on success, null on error</returns>
public static NonResidentNameTableEntry[] ParseNonResidentNameTable(Stream data, int endOffset)
public static NonResidentNameTableEntry[]? ParseNonResidentNameTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var residentNameTable = new List<NonResidentNameTableEntry>();
while (data.Position < endOffset)
{
var entry = new NonResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
var entry = ParseNonResidentNameTableEntry(data);
if (entry == null)
return null;
residentNameTable.Add(entry);
}
return [.. residentNameTable];
}
/// <summary>
/// Parse a Stream into a nonresident-name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled nonresident-name table entry on success, null on error</returns>
public static NonResidentNameTableEntry? ParseNonResidentNameTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new NonResidentNameTableEntry();
entry.Length = data.ReadByteValue();
entry.NameString = data.ReadBytes(entry.Length);
entry.OrdinalNumber = data.ReadUInt16();
return entry;
}
}
}

View File

@@ -103,6 +103,9 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position - fileAllocationTableOffset < header.FileAllocationTableLength)
{
var entry = ParseFileAllocationTableEntry(data);
if (entry == null)
return null;
fileAllocationTable.Add(entry);
}
@@ -122,58 +125,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled common header on success, null on error</returns>
private static CommonHeader ParseCommonHeader(Stream data)
private static CommonHeader? ParseCommonHeader(Stream data)
{
// TODO: Use marshalling here instead of building
CommonHeader commonHeader = new CommonHeader();
byte[]? gameTitle = data.ReadBytes(12);
if (gameTitle != null)
commonHeader.GameTitle = Encoding.ASCII.GetString(gameTitle).TrimEnd('\0');
commonHeader.GameCode = data.ReadUInt32();
byte[]? makerCode = data.ReadBytes(2);
if (makerCode != null)
commonHeader.MakerCode = Encoding.ASCII.GetString(bytes: makerCode).TrimEnd('\0');
commonHeader.UnitCode = (Unitcode)data.ReadByteValue();
commonHeader.EncryptionSeedSelect = data.ReadByteValue();
commonHeader.DeviceCapacity = data.ReadByteValue();
commonHeader.Reserved1 = data.ReadBytes(7);
commonHeader.GameRevision = data.ReadUInt16();
commonHeader.RomVersion = data.ReadByteValue();
commonHeader.InternalFlags = data.ReadByteValue();
commonHeader.ARM9RomOffset = data.ReadUInt32();
commonHeader.ARM9EntryAddress = data.ReadUInt32();
commonHeader.ARM9LoadAddress = data.ReadUInt32();
commonHeader.ARM9Size = data.ReadUInt32();
commonHeader.ARM7RomOffset = data.ReadUInt32();
commonHeader.ARM7EntryAddress = data.ReadUInt32();
commonHeader.ARM7LoadAddress = data.ReadUInt32();
commonHeader.ARM7Size = data.ReadUInt32();
commonHeader.FileNameTableOffset = data.ReadUInt32();
commonHeader.FileNameTableLength = data.ReadUInt32();
commonHeader.FileAllocationTableOffset = data.ReadUInt32();
commonHeader.FileAllocationTableLength = data.ReadUInt32();
commonHeader.ARM9OverlayOffset = data.ReadUInt32();
commonHeader.ARM9OverlayLength = data.ReadUInt32();
commonHeader.ARM7OverlayOffset = data.ReadUInt32();
commonHeader.ARM7OverlayLength = data.ReadUInt32();
commonHeader.NormalCardControlRegisterSettings = data.ReadUInt32();
commonHeader.SecureCardControlRegisterSettings = data.ReadUInt32();
commonHeader.IconBannerOffset = data.ReadUInt32();
commonHeader.SecureAreaCRC = data.ReadUInt16();
commonHeader.SecureTransferTimeout = data.ReadUInt16();
commonHeader.ARM9Autoload = data.ReadUInt32();
commonHeader.ARM7Autoload = data.ReadUInt32();
commonHeader.SecureDisable = data.ReadBytes(8);
commonHeader.NTRRegionRomSize = data.ReadUInt32();
commonHeader.HeaderSize = data.ReadUInt32();
commonHeader.Reserved2 = data.ReadBytes(56);
commonHeader.NintendoLogo = data.ReadBytes(156);
commonHeader.NintendoLogoCRC = data.ReadUInt16();
commonHeader.HeaderCRC = data.ReadUInt16();
commonHeader.DebuggerReserved = data.ReadBytes(0x20);
return commonHeader;
return data.ReadType<CommonHeader>();
}
/// <summary>
@@ -181,73 +135,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled extended DSi header on success, null on error</returns>
private static ExtendedDSiHeader ParseExtendedDSiHeader(Stream data)
private static ExtendedDSiHeader? ParseExtendedDSiHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ExtendedDSiHeader extendedDSiHeader = new ExtendedDSiHeader();
extendedDSiHeader.GlobalMBK15Settings = new uint[5];
for (int i = 0; i < 5; i++)
{
extendedDSiHeader.GlobalMBK15Settings[i] = data.ReadUInt32();
}
extendedDSiHeader.LocalMBK68SettingsARM9 = new uint[3];
for (int i = 0; i < 3; i++)
{
extendedDSiHeader.LocalMBK68SettingsARM9[i] = data.ReadUInt32();
}
extendedDSiHeader.LocalMBK68SettingsARM7 = new uint[3];
for (int i = 0; i < 3; i++)
{
extendedDSiHeader.LocalMBK68SettingsARM7[i] = data.ReadUInt32();
}
extendedDSiHeader.GlobalMBK9Setting = data.ReadUInt32();
extendedDSiHeader.RegionFlags = data.ReadUInt32();
extendedDSiHeader.AccessControl = data.ReadUInt32();
extendedDSiHeader.ARM7SCFGEXTMask = data.ReadUInt32();
extendedDSiHeader.ReservedFlags = data.ReadUInt32();
extendedDSiHeader.ARM9iRomOffset = data.ReadUInt32();
extendedDSiHeader.Reserved3 = data.ReadUInt32();
extendedDSiHeader.ARM9iLoadAddress = data.ReadUInt32();
extendedDSiHeader.ARM9iSize = data.ReadUInt32();
extendedDSiHeader.ARM7iRomOffset = data.ReadUInt32();
extendedDSiHeader.Reserved4 = data.ReadUInt32();
extendedDSiHeader.ARM7iLoadAddress = data.ReadUInt32();
extendedDSiHeader.ARM7iSize = data.ReadUInt32();
extendedDSiHeader.DigestNTRRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestNTRRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestTWLRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestTWLRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestSectorHashtableRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestSectorHashtableRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestBlockHashtableRegionOffset = data.ReadUInt32();
extendedDSiHeader.DigestBlockHashtableRegionLength = data.ReadUInt32();
extendedDSiHeader.DigestSectorSize = data.ReadUInt32();
extendedDSiHeader.DigestBlockSectorCount = data.ReadUInt32();
extendedDSiHeader.IconBannerSize = data.ReadUInt32();
extendedDSiHeader.Unknown1 = data.ReadUInt32();
extendedDSiHeader.ModcryptArea1Offset = data.ReadUInt32();
extendedDSiHeader.ModcryptArea1Size = data.ReadUInt32();
extendedDSiHeader.ModcryptArea2Offset = data.ReadUInt32();
extendedDSiHeader.ModcryptArea2Size = data.ReadUInt32();
extendedDSiHeader.TitleID = data.ReadBytes(8);
extendedDSiHeader.DSiWarePublicSavSize = data.ReadUInt32();
extendedDSiHeader.DSiWarePrivateSavSize = data.ReadUInt32();
extendedDSiHeader.ReservedZero = data.ReadBytes(176);
extendedDSiHeader.Unknown2 = data.ReadBytes(0x10);
extendedDSiHeader.ARM9WithSecureAreaSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.ARM7SHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.DigestMasterSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.BannerSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.ARM9iDecryptedSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.ARM7iDecryptedSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.Reserved5 = data.ReadBytes(40);
extendedDSiHeader.ARM9NoSecureAreaSHA1HMACHash = data.ReadBytes(20);
extendedDSiHeader.Reserved6 = data.ReadBytes(2636);
extendedDSiHeader.ReservedAndUnchecked = data.ReadBytes(0x180);
extendedDSiHeader.RSASignature = data.ReadBytes(0x80);
return extendedDSiHeader;
return data.ReadType<ExtendedDSiHeader>();
}
/// <summary>
@@ -255,10 +145,10 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name table on success, null on error</returns>
private static NameTable ParseNameTable(Stream data)
private static NameTable? ParseNameTable(Stream data)
{
// TODO: Use marshalling here instead of building
NameTable nameTable = new NameTable();
var nameTable = new NameTable();
// Create a variable-length table
var folderAllocationTable = new List<FolderAllocationTableEntry>();
@@ -266,6 +156,9 @@ namespace SabreTools.Serialization.Deserializers
while (entryCount > 0)
{
var entry = ParseFolderAllocationTableEntry(data);
if (entry == null)
return null;
folderAllocationTable.Add(entry);
// If we have the root entry
@@ -301,17 +194,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled folder allocation table entry on success, null on error</returns>
private static FolderAllocationTableEntry ParseFolderAllocationTableEntry(Stream data)
private static FolderAllocationTableEntry? ParseFolderAllocationTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
FolderAllocationTableEntry entry = new FolderAllocationTableEntry();
entry.StartOffset = data.ReadUInt32();
entry.FirstFileIndex = data.ReadUInt16();
entry.ParentFolderIndex = data.ReadByteValue();
entry.Unknown = data.ReadByteValue();
return entry;
return data.ReadType<FolderAllocationTableEntry>();
}
/// <summary>
@@ -322,7 +207,7 @@ namespace SabreTools.Serialization.Deserializers
private static NameListEntry? ParseNameListEntry(Stream data)
{
// TODO: Use marshalling here instead of building
NameListEntry entry = new NameListEntry();
var entry = new NameListEntry();
byte flagAndSize = data.ReadByteValue();
if (flagAndSize == 0xFF)
@@ -349,15 +234,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled name list entry on success, null on error</returns>
private static FileAllocationTableEntry ParseFileAllocationTableEntry(Stream data)
private static FileAllocationTableEntry? ParseFileAllocationTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
FileAllocationTableEntry entry = new FileAllocationTableEntry();
entry.StartOffset = data.ReadUInt32();
entry.EndOffset = data.ReadUInt32();
return entry;
return data.ReadType<FileAllocationTableEntry>();
}
}
}

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.PAK;
using static SabreTools.Models.PAK.Constants;
@@ -54,6 +53,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < file.DirectoryItems.Length; i++)
{
var directoryItem = ParseDirectoryItem(data);
if (directoryItem == null)
return null;
file.DirectoryItems[i] = directoryItem;
}
@@ -69,20 +71,13 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life Package header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (header == null)
return null;
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.DirectoryOffset = data.ReadUInt32();
header.DirectoryLength = data.ReadUInt32();
return header;
}
@@ -91,18 +86,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Package directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data)
private static DirectoryItem? ParseDirectoryItem(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryItem directoryItem = new DirectoryItem();
byte[]? itemName = data.ReadBytes(56);
if (itemName != null)
directoryItem.ItemName = Encoding.ASCII.GetString(itemName).TrimEnd('\0');
directoryItem.ItemOffset = data.ReadUInt32();
directoryItem.ItemLength = data.ReadUInt32();
return directoryItem;
return data.ReadType<DirectoryItem>();
}
}
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.PFF;
@@ -92,47 +93,27 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
header.HeaderSize = data.ReadUInt32();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
var header = data.ReadType<Header>();
if (header == null)
return null;
header.Signature = Encoding.ASCII.GetString(signature);
header.NumberOfFiles = data.ReadUInt32();
header.FileSegmentSize = data.ReadUInt32();
switch (header.Signature)
return header.Signature switch
{
case Version0SignatureString:
if (header.FileSegmentSize != Version0HSegmentSize)
return null;
break;
Version0SignatureString when header.FileSegmentSize != Version0HSegmentSize => null,
Version0SignatureString => header,
case Version2SignatureString:
if (header.FileSegmentSize != Version2SegmentSize)
return null;
break;
Version2SignatureString when header.FileSegmentSize != Version2SegmentSize => null,
Version2SignatureString => header,
// Version 3 can sometimes have Version 2 segment sizes
case Version3SignatureString:
if (header.FileSegmentSize != Version2SegmentSize && header.FileSegmentSize != Version3SegmentSize)
return null;
break;
Version3SignatureString when header.FileSegmentSize != Version2SegmentSize
&& header.FileSegmentSize != Version3SegmentSize => null,
Version3SignatureString => header,
case Version4SignatureString:
if (header.FileSegmentSize != Version4SegmentSize)
return null;
break;
Version4SignatureString when header.FileSegmentSize != Version4SegmentSize => null,
Version4SignatureString => header,
default:
return null;
}
header.FileListOffset = data.ReadUInt32();
return header;
_ => null,
};
}
/// <summary>
@@ -140,18 +121,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled footer on success, null on error</returns>
private static Footer ParseFooter(Stream data)
private static Footer? ParseFooter(Stream data)
{
// TODO: Use marshalling here instead of building
Footer footer = new Footer();
footer.SystemIP = data.ReadUInt32();
footer.Reserved = data.ReadUInt32();
byte[]? kingTag = data.ReadBytes(4);
if (kingTag != null)
footer.KingTag = Encoding.ASCII.GetString(kingTag);
return footer;
return data.ReadType<Footer>();
}
/// <summary>
@@ -163,7 +135,7 @@ namespace SabreTools.Serialization.Deserializers
private static Segment ParseSegment(Stream data, uint segmentSize)
{
// TODO: Use marshalling here instead of building
Segment segment = new Segment();
var segment = new Segment();
segment.Deleted = data.ReadUInt32();
segment.FileLocation = data.ReadUInt32();

View File

@@ -106,25 +106,12 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled disc information unit header on success, null on error</returns>
private static DiscInformationUnitHeader? ParseDiscInformationUnitHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new DiscInformationUnitHeader();
var header = data.ReadType<DiscInformationUnitHeader>();
// We only accept Disc Information units, not Emergency Brake or other
byte[]? dic = data.ReadBytes(2);
if (dic == null)
if (header?.DiscInformationIdentifier != "DI")
return null;
header.DiscInformationIdentifier = Encoding.ASCII.GetString(dic);
if (header.DiscInformationIdentifier != "DI")
return null;
header.DiscInformationFormat = data.ReadByteValue();
header.NumberOfUnitsInBlock = data.ReadByteValue();
header.Reserved0 = data.ReadByteValue();
header.SequenceNumber = data.ReadByteValue();
header.BytesInUse = data.ReadByteValue();
header.Reserved1 = data.ReadByteValue();
return header;
}
@@ -165,17 +152,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled disc information unit trailer on success, null on error</returns>
private static DiscInformationUnitTrailer ParseDiscInformationUnitTrailer(Stream data)
private static DiscInformationUnitTrailer? ParseDiscInformationUnitTrailer(Stream data)
{
// TODO: Use marshalling here instead of building
var trailer = new DiscInformationUnitTrailer();
trailer.DiscManufacturerID = data.ReadBytes(6);
trailer.MediaTypeID = data.ReadBytes(3);
trailer.TimeStamp = data.ReadUInt16();
trailer.ProductRevisionNumber = data.ReadByteValue();
return trailer;
return data.ReadType<DiscInformationUnitTrailer>();
}
#endregion

View File

@@ -203,7 +203,7 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the debug table
data.Seek(debugTableAddress, SeekOrigin.Begin);
int endOffset = (int)(debugTableAddress + optionalHeader.Debug.Size);
var debugTable = ParseDebugTable(data, endOffset, executable.SectionTable);
var debugTable = ParseDebugTable(data, endOffset);
if (debugTable == null)
return null;
@@ -291,20 +291,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled executable header on success, null on error</returns>
public static COFFFileHeader ParseCOFFFileHeader(Stream data)
public static COFFFileHeader? ParseCOFFFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var fileHeader = new COFFFileHeader();
fileHeader.Machine = (MachineType)data.ReadUInt16();
fileHeader.NumberOfSections = data.ReadUInt16();
fileHeader.TimeDateStamp = data.ReadUInt32();
fileHeader.PointerToSymbolTable = data.ReadUInt32();
fileHeader.NumberOfSymbols = data.ReadUInt32();
fileHeader.SizeOfOptionalHeader = data.ReadUInt16();
fileHeader.Characteristics = (Characteristics)data.ReadUInt16();
return fileHeader;
return data.ReadType<COFFFileHeader>();
}
/// <summary>
@@ -380,97 +369,52 @@ namespace SabreTools.Serialization.Deserializers
#region Data Directories
if (optionalHeader.NumberOfRvaAndSizes >= 1 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ExportTable = new DataDirectory();
optionalHeader.ExportTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ExportTable.Size = data.ReadUInt32();
}
optionalHeader.ExportTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 2 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ImportTable = new DataDirectory();
optionalHeader.ImportTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ImportTable.Size = data.ReadUInt32();
}
optionalHeader.ImportTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 3 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ResourceTable = new DataDirectory();
optionalHeader.ResourceTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ResourceTable.Size = data.ReadUInt32();
}
optionalHeader.ResourceTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 4 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ExceptionTable = new DataDirectory();
optionalHeader.ExceptionTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ExceptionTable.Size = data.ReadUInt32();
}
optionalHeader.ExceptionTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 5 && data.Position - initialOffset < optionalSize)
{
optionalHeader.CertificateTable = new DataDirectory();
optionalHeader.CertificateTable.VirtualAddress = data.ReadUInt32();
optionalHeader.CertificateTable.Size = data.ReadUInt32();
}
optionalHeader.CertificateTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 6 && data.Position - initialOffset < optionalSize)
{
optionalHeader.BaseRelocationTable = new DataDirectory();
optionalHeader.BaseRelocationTable.VirtualAddress = data.ReadUInt32();
optionalHeader.BaseRelocationTable.Size = data.ReadUInt32();
}
optionalHeader.BaseRelocationTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 7 && data.Position - initialOffset < optionalSize)
{
optionalHeader.Debug = new DataDirectory();
optionalHeader.Debug.VirtualAddress = data.ReadUInt32();
optionalHeader.Debug.Size = data.ReadUInt32();
}
optionalHeader.Debug = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 8 && data.Position - initialOffset < optionalSize)
{
optionalHeader.Architecture = data.ReadUInt64();
}
if (optionalHeader.NumberOfRvaAndSizes >= 9 && data.Position - initialOffset < optionalSize)
{
optionalHeader.GlobalPtr = new DataDirectory();
optionalHeader.GlobalPtr.VirtualAddress = data.ReadUInt32();
optionalHeader.GlobalPtr.Size = data.ReadUInt32();
}
optionalHeader.GlobalPtr = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 10 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ThreadLocalStorageTable = new DataDirectory();
optionalHeader.ThreadLocalStorageTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ThreadLocalStorageTable.Size = data.ReadUInt32();
}
optionalHeader.ThreadLocalStorageTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 11 && data.Position - initialOffset < optionalSize)
{
optionalHeader.LoadConfigTable = new DataDirectory();
optionalHeader.LoadConfigTable.VirtualAddress = data.ReadUInt32();
optionalHeader.LoadConfigTable.Size = data.ReadUInt32();
}
optionalHeader.LoadConfigTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 12 && data.Position - initialOffset < optionalSize)
{
optionalHeader.BoundImport = new DataDirectory();
optionalHeader.BoundImport.VirtualAddress = data.ReadUInt32();
optionalHeader.BoundImport.Size = data.ReadUInt32();
}
optionalHeader.BoundImport = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 13 && data.Position - initialOffset < optionalSize)
{
optionalHeader.ImportAddressTable = new DataDirectory();
optionalHeader.ImportAddressTable.VirtualAddress = data.ReadUInt32();
optionalHeader.ImportAddressTable.Size = data.ReadUInt32();
}
optionalHeader.ImportAddressTable = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 14 && data.Position - initialOffset < optionalSize)
{
optionalHeader.DelayImportDescriptor = new DataDirectory();
optionalHeader.DelayImportDescriptor.VirtualAddress = data.ReadUInt32();
optionalHeader.DelayImportDescriptor.Size = data.ReadUInt32();
}
optionalHeader.DelayImportDescriptor = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 15 && data.Position - initialOffset < optionalSize)
{
optionalHeader.CLRRuntimeHeader = new DataDirectory();
optionalHeader.CLRRuntimeHeader.VirtualAddress = data.ReadUInt32();
optionalHeader.CLRRuntimeHeader.Size = data.ReadUInt32();
}
optionalHeader.CLRRuntimeHeader = data.ReadType<DataDirectory>();
if (optionalHeader.NumberOfRvaAndSizes >= 16 && data.Position - initialOffset < optionalSize)
{
optionalHeader.Reserved = data.ReadUInt64();
}
#endregion
@@ -695,12 +639,12 @@ namespace SabreTools.Serialization.Deserializers
while (totalSize > 0 && data.Position < data.Length)
{
long initialPosition = data.Position;
string? str = data.ReadString();
string? str = data.ReadNullTerminatedAnsiString();
strings.Add(str ?? string.Empty);
totalSize -= (uint)(data.Position - initialPosition);
}
coffStringTable.Strings = strings.ToArray();
coffStringTable.Strings = [.. strings];
return coffStringTable;
}
@@ -742,21 +686,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled delay-load directory table on success, null on error</returns>
public static DelayLoadDirectoryTable ParseDelayLoadDirectoryTable(Stream data)
public static DelayLoadDirectoryTable? ParseDelayLoadDirectoryTable(Stream data)
{
// TODO: Use marshalling here instead of building
var delayLoadDirectoryTable = new DelayLoadDirectoryTable();
delayLoadDirectoryTable.Attributes = data.ReadUInt32();
delayLoadDirectoryTable.Name = data.ReadUInt32();
delayLoadDirectoryTable.ModuleHandle = data.ReadUInt32();
delayLoadDirectoryTable.DelayImportAddressTable = data.ReadUInt32();
delayLoadDirectoryTable.DelayImportNameTable = data.ReadUInt32();
delayLoadDirectoryTable.BoundDelayImportTable = data.ReadUInt32();
delayLoadDirectoryTable.UnloadDelayImportTable = data.ReadUInt32();
delayLoadDirectoryTable.TimeStamp = data.ReadUInt32();
return delayLoadDirectoryTable;
return data.ReadType<DelayLoadDirectoryTable>();
}
/// <summary>
@@ -805,9 +737,8 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="endOffset">First address not part of the debug table</param>
/// <param name="sections">Section table to use for virtual address translation</param>
/// <returns>Filled debug table on success, null on error</returns>
public static DebugTable ParseDebugTable(Stream data, int endOffset, SectionHeader?[] sections)
public static DebugTable? ParseDebugTable(Stream data, int endOffset)
{
// TODO: Use marshalling here instead of building
var debugTable = new DebugTable();
@@ -816,21 +747,14 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position < endOffset)
{
var debugDirectoryEntry = new DebugDirectoryEntry();
debugDirectoryEntry.Characteristics = data.ReadUInt32();
debugDirectoryEntry.TimeDateStamp = data.ReadUInt32();
debugDirectoryEntry.MajorVersion = data.ReadUInt16();
debugDirectoryEntry.MinorVersion = data.ReadUInt16();
debugDirectoryEntry.DebugType = (DebugType)data.ReadUInt32();
debugDirectoryEntry.SizeOfData = data.ReadUInt32();
debugDirectoryEntry.AddressOfRawData = data.ReadUInt32();
debugDirectoryEntry.PointerToRawData = data.ReadUInt32();
var debugDirectoryEntry = data.ReadType<DebugDirectoryEntry>();
if (debugDirectoryEntry == null)
return null;
debugDirectoryTable.Add(debugDirectoryEntry);
}
debugTable.DebugDirectoryTable = debugDirectoryTable.ToArray();
debugTable.DebugDirectoryTable = [.. debugDirectoryTable];
// TODO: Should we read the debug data in? Most of it is unformatted or undocumented
// TODO: Implement .debug$F (Object Only) / IMAGE_DEBUG_TYPE_FPO
@@ -844,7 +768,7 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="sections">Section table to use for virtual address translation</param>
/// <returns>Filled export table on success, null on error</returns>
public static ExportTable ParseExportTable(Stream data, SectionHeader?[] sections)
public static ExportTable? ParseExportTable(Stream data, SectionHeader?[] sections)
{
// TODO: Use marshalling here instead of building
var exportTable = new ExportTable();
@@ -871,7 +795,7 @@ namespace SabreTools.Serialization.Deserializers
uint nameAddress = exportDirectoryTable.NameRVA.ConvertVirtualAddress(sections);
data.Seek(nameAddress, SeekOrigin.Begin);
string? name = data.ReadString(Encoding.ASCII);
string? name = data.ReadNullTerminatedAnsiString();
exportDirectoryTable.Name = name;
}
@@ -885,11 +809,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < exportDirectoryTable.AddressTableEntries; i++)
{
var addressTableEntry = new ExportAddressTableEntry();
// TODO: Use the optional header address and length to determine if export or forwarder
addressTableEntry.ExportRVA = data.ReadUInt32();
addressTableEntry.ForwarderRVA = addressTableEntry.ExportRVA;
var addressTableEntry = data.ReadType<ExportAddressTableEntry>();
if (addressTableEntry == null)
return null;
exportAddressTable[i] = addressTableEntry;
}
@@ -944,7 +866,7 @@ namespace SabreTools.Serialization.Deserializers
exportNameTable.Strings = new string[exportDirectoryTable.NumberOfNamePointers];
for (int i = 0; i < exportDirectoryTable.NumberOfNamePointers; i++)
{
string? str = data.ReadString(Encoding.ASCII);
string? str = data.ReadNullTerminatedAnsiString();
exportNameTable.Strings[i] = str ?? string.Empty;
}
@@ -961,7 +883,7 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="magic">Optional header magic number indicating PE32 or PE32+</param>
/// <param name="sections">Section table to use for virtual address translation</param>
/// <returns>Filled import table on success, null on error</returns>
public static ImportTable ParseImportTable(Stream data, OptionalHeaderMagicNumber magic, SectionHeader?[] sections)
public static ImportTable? ParseImportTable(Stream data, OptionalHeaderMagicNumber magic, SectionHeader?[] sections)
{
// TODO: Use marshalling here instead of building
var importTable = new ImportTable();
@@ -1006,7 +928,7 @@ namespace SabreTools.Serialization.Deserializers
uint nameAddress = importDirectoryTableEntry.NameRVA.ConvertVirtualAddress(sections);
data.Seek(nameAddress, SeekOrigin.Begin);
string? name = data.ReadString(Encoding.ASCII);
string? name = data.ReadNullTerminatedAnsiString();
importDirectoryTableEntry.Name = name;
}
@@ -1163,21 +1085,30 @@ namespace SabreTools.Serialization.Deserializers
int hintNameTableEntryAddress = hintNameTableEntryAddresses[i];
data.Seek(hintNameTableEntryAddress, SeekOrigin.Begin);
var hintNameTableEntry = new HintNameTableEntry();
hintNameTableEntry.Hint = data.ReadUInt16();
hintNameTableEntry.Name = data.ReadString(Encoding.ASCII);
var hintNameTableEntry = ParseHintNameTableEntry(data);
if (hintNameTableEntry == null)
return null;
importHintNameTable.Add(hintNameTableEntry);
}
}
}
importTable.HintNameTable = importHintNameTable.ToArray();
importTable.HintNameTable = [.. importHintNameTable];
return importTable;
}
/// <summary>
/// Parse a Stream into a hint name table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled hint name table entry on success, null on error</returns>
public static HintNameTableEntry? ParseHintNameTableEntry(Stream data)
{
return data.ReadType<HintNameTableEntry>();
}
/// <summary>
/// Parse a Stream into a resource directory table
/// </summary>

View File

@@ -73,23 +73,13 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
byte[]? signature = data.ReadBytes(2);
if (signature == null)
if (header == null)
return null;
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.MajorVersion = data.ReadByteValue();
header.MinorVersion = data.ReadByteValue();
header.FileCount = data.ReadUInt16();
header.TableSize = data.ReadByteValue();
header.CompressionFlags = data.ReadByteValue();
return header;
}
@@ -102,7 +92,7 @@ namespace SabreTools.Serialization.Deserializers
private static FileDescriptor ParseFileDescriptor(Stream data, byte minorVersion)
{
// TODO: Use marshalling here instead of building
FileDescriptor fileDescriptor = new FileDescriptor();
var fileDescriptor = new FileDescriptor();
fileDescriptor.FileNameSize = ReadVariableLength(data);
if (fileDescriptor.FileNameSize > 0)

View File

@@ -0,0 +1,35 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
namespace SabreTools.Serialization.Deserializers
{
public class SFB : BaseBinaryDeserializer<Models.PlayStation3.SFB>
{
/// <inheritdoc/>
public override Models.PlayStation3.SFB? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Deserialize the SFB
var sfb = data.ReadType<Models.PlayStation3.SFB>();
if (sfb == null)
return null;
string magic = Encoding.ASCII.GetString(sfb!.Magic!);
if (magic != ".SFB")
return null;
return sfb;
}
}
}

View File

@@ -0,0 +1,91 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
namespace SabreTools.Serialization.Deserializers
{
public class SFO : BaseBinaryDeserializer<Models.PlayStation3.SFO>
{
/// <inheritdoc/>
public override Models.PlayStation3.SFO? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new SFO to fill
var sfo = new Models.PlayStation3.SFO();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Assign the header
sfo.Header = header;
#endregion
#region Index Table
// TODO: Determine how many entries are in the index table
#endregion
#region Key Table
// TODO: Finish implementation
#endregion
// Padding
// TODO: Finish implementation
#region Data Table
// TODO: Finish implementation
#endregion
return sfo;
}
/// <summary>
/// Parse a Stream into an SFO header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SFO header on success, null on error</returns>
public Models.PlayStation3.SFOHeader? ParseHeader(Stream data)
{
var sfoHeader = data.ReadType<Models.PlayStation3.SFOHeader>();
if (sfoHeader == null)
return null;
string magic = Encoding.ASCII.GetString(sfoHeader!.Magic!);
if (magic != "\0PSF")
return null;
return sfoHeader;
}
/// <summary>
/// Parse a Stream into an SFO index table entry
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled SFO index table entry on success, null on error</returns>
public Models.PlayStation3.SFOIndexTableEntry? ParseIndexTableEntry(Stream data)
{
return data.ReadType<Models.PlayStation3.SFOIndexTableEntry>();
}
}
}

View File

@@ -419,7 +419,7 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < stringCount; i++)
{
long currentPosition = data.Position - stringTableStart;
strings[currentPosition] = data.ReadString(Encoding.ASCII);
strings[currentPosition] = data.ReadNullTerminatedAnsiString();
}
// Assign the files

View File

@@ -48,7 +48,7 @@ namespace SabreTools.Serialization.Deserializers
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = new Header();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
@@ -65,7 +65,11 @@ namespace SabreTools.Serialization.Deserializers
header.Lumps = new Lump[HL_VBSP_LUMP_COUNT];
for (int i = 0; i < HL_VBSP_LUMP_COUNT; i++)
{
header.Lumps[i] = ParseLump(data, header.Version);
var lump = ParseLump(data, header.Version);
if (lump == null)
return null;
header.Lumps[i] = lump;
}
header.MapRevision = data.ReadInt32();
@@ -79,19 +83,9 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="version">VBSP version</param>
/// <returns>Filled Half-Life 2 Level lump on success, null on error</returns>
private static Lump ParseLump(Stream data, int version)
private static Lump? ParseLump(Stream data, int version)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.Length = data.ReadUInt32();
lump.Version = data.ReadUInt32();
lump.FourCC = new char[4];
for (int i = 0; i < 4; i++)
{
lump.FourCC[i] = (char)data.ReadByte();
}
return data.ReadType<Lump>();
// This block was commented out because test VBSPs with header
// version 21 had the values in the "right" order already and
@@ -104,8 +98,8 @@ namespace SabreTools.Serialization.Deserializers
// lump.Offset = lump.Length;
// lump.Length = temp;
//}
return lump;
//
//return lump
}
}
}

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.VPK;
using static SabreTools.Models.VPK.Constants;
@@ -31,6 +30,8 @@ namespace SabreTools.Serialization.Deserializers
// Try to parse the header
// The original version had no signature.
var header = ParseHeader(data);
if (header == null)
return null;
// Set the package header
file.Header = header;
@@ -56,6 +57,8 @@ namespace SabreTools.Serialization.Deserializers
// Create the directory items tree
var directoryItems = ParseDirectoryItemTree(data);
if (directoryItems == null)
return null;
// Set the directory items
file.DirectoryItems = directoryItems;
@@ -76,10 +79,13 @@ namespace SabreTools.Serialization.Deserializers
while (data.Position < initialOffset + file.ExtendedHeader.ArchiveHashLength)
{
var archiveHash = ParseArchiveHash(data);
if (archiveHash == null)
return null;
archiveHashes.Add(archiveHash);
}
file.ArchiveHashes = archiveHashes.ToArray();
file.ArchiveHashes = [.. archiveHashes];
}
#endregion
@@ -94,19 +100,15 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Valve Package header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
header.Signature = data.ReadUInt32();
if (header == null)
return null;
if (header.Signature != SignatureUInt32)
return null;
header.Version = data.ReadUInt32();
if (header.Version > 2)
return null;
header.DirectoryLength = data.ReadUInt32();
return header;
}
@@ -115,17 +117,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package extended header on success, null on error</returns>
private static ExtendedHeader ParseExtendedHeader(Stream data)
private static ExtendedHeader? ParseExtendedHeader(Stream data)
{
// TODO: Use marshalling here instead of building
ExtendedHeader extendedHeader = new ExtendedHeader();
extendedHeader.Dummy0 = data.ReadUInt32();
extendedHeader.ArchiveHashLength = data.ReadUInt32();
extendedHeader.ExtraLength = data.ReadUInt32();
extendedHeader.Dummy1 = data.ReadUInt32();
return extendedHeader;
return data.ReadType<ExtendedHeader>();
}
/// <summary>
@@ -133,17 +127,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package archive hash on success, null on error</returns>
private static ArchiveHash ParseArchiveHash(Stream data)
private static ArchiveHash? ParseArchiveHash(Stream data)
{
// TODO: Use marshalling here instead of building
ArchiveHash archiveHash = new ArchiveHash();
archiveHash.ArchiveIndex = data.ReadUInt32();
archiveHash.ArchiveOffset = data.ReadUInt32();
archiveHash.Length = data.ReadUInt32();
archiveHash.Hash = data.ReadBytes(0x10);
return archiveHash;
return data.ReadType<ArchiveHash>();
}
/// <summary>
@@ -151,7 +137,7 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item tree on success, null on error</returns>
private static DirectoryItem[] ParseDirectoryItemTree(Stream data)
private static DirectoryItem[]? ParseDirectoryItemTree(Stream data)
{
// Create the directory items list
var directoryItems = new List<DirectoryItem>();
@@ -159,7 +145,7 @@ namespace SabreTools.Serialization.Deserializers
while (true)
{
// Get the extension
string? extensionString = data.ReadString(Encoding.ASCII);
string? extensionString = data.ReadNullTerminatedAnsiString();
if (string.IsNullOrEmpty(extensionString))
break;
@@ -172,7 +158,7 @@ namespace SabreTools.Serialization.Deserializers
while (true)
{
// Get the path
string? pathString = data.ReadString(Encoding.ASCII);
string? pathString = data.ReadNullTerminatedAnsiString();
if (string.IsNullOrEmpty(pathString))
break;
@@ -185,7 +171,7 @@ namespace SabreTools.Serialization.Deserializers
while (true)
{
// Get the name
string? nameString = data.ReadString(Encoding.ASCII);
string? nameString = data.ReadNullTerminatedAnsiString();
if (string.IsNullOrEmpty(nameString))
break;
@@ -197,6 +183,8 @@ namespace SabreTools.Serialization.Deserializers
// Get the directory item
var directoryItem = ParseDirectoryItem(data, extensionString!, pathString!, nameString!);
if (directoryItem == null)
return null;
// Add the directory item
directoryItems.Add(directoryItem);
@@ -212,9 +200,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory item on success, null on error</returns>
private static DirectoryItem ParseDirectoryItem(Stream data, string extension, string path, string name)
private static DirectoryItem? ParseDirectoryItem(Stream data, string extension, string path, string name)
{
DirectoryItem directoryItem = new DirectoryItem();
var directoryItem = new DirectoryItem();
directoryItem.Extension = extension;
directoryItem.Path = path;
@@ -222,6 +210,8 @@ namespace SabreTools.Serialization.Deserializers
// Get the directory entry
var directoryEntry = ParseDirectoryEntry(data);
if (directoryEntry == null)
return null;
// Set the directory entry
directoryItem.DirectoryEntry = directoryEntry;
@@ -267,19 +257,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Valve Package directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
private static DirectoryEntry? ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.CRC = data.ReadUInt32();
directoryEntry.PreloadBytes = data.ReadUInt16();
directoryEntry.ArchiveIndex = data.ReadUInt16();
directoryEntry.EntryOffset = data.ReadUInt32();
directoryEntry.EntryLength = data.ReadUInt32();
directoryEntry.Dummy0 = data.ReadUInt16();
return directoryEntry;
return data.ReadType<DirectoryEntry>();
}
}
}

View File

@@ -52,6 +52,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < header.LumpCount; i++)
{
var lump = ParseLump(data);
if (lump == null)
return null;
file.Lumps[i] = lump;
}
@@ -104,20 +107,13 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life Texture Package header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (header == null)
return null;
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != SignatureString)
return null;
header.LumpCount = data.ReadUInt32();
header.LumpOffset = data.ReadUInt32();
return header;
}
@@ -126,23 +122,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Half-Life Texture Package lump on success, null on error</returns>
private static Lump ParseLump(Stream data)
private static Lump? ParseLump(Stream data)
{
// TODO: Use marshalling here instead of building
Lump lump = new Lump();
lump.Offset = data.ReadUInt32();
lump.DiskLength = data.ReadUInt32();
lump.Length = data.ReadUInt32();
lump.Type = data.ReadByteValue();
lump.Compression = data.ReadByteValue();
lump.Padding0 = data.ReadByteValue();
lump.Padding1 = data.ReadByteValue();
byte[]? name = data.ReadBytes(16);
if (name != null)
lump.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
return lump;
return data.ReadType<Lump>();
}
/// <summary>
@@ -182,7 +164,7 @@ namespace SabreTools.Serialization.Deserializers
lumpInfo.Width = data.ReadUInt32();
lumpInfo.Height = data.ReadUInt32();
lumpInfo.PixelOffset = data.ReadUInt32();
_ = data.ReadBytes(12); // Unknown data
lumpInfo.UnknownData = data.ReadBytes(12);
// Cache the current offset
long currentOffset = data.Position;

View File

@@ -46,6 +46,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < header.DirectoryEntryCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
if (directoryEntry == null)
return null;
file.DirectoryEntries[i] = directoryEntry;
}
@@ -62,6 +65,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
{
var directoryEntry = ParseDirectoryEntry(data);
if (directoryEntry == null)
return null;
file.PreloadDirectoryEntries[i] = directoryEntry;
}
}
@@ -79,6 +85,9 @@ namespace SabreTools.Serialization.Deserializers
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
{
var directoryMapping = ParseDirectoryMapping(data);
if (directoryMapping == null)
return null;
file.PreloadDirectoryMappings[i] = directoryMapping;
}
}
@@ -135,29 +144,15 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled XBox Package File header on success, null on error</returns>
private static Header? ParseHeader(Stream data)
{
// TODO: Use marshalling here instead of building
Header header = new Header();
var header = data.ReadType<Header>();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (header == null)
return null;
header.Signature = Encoding.ASCII.GetString(signature);
if (header.Signature != HeaderSignatureString)
return null;
header.Version = data.ReadUInt32();
if (header.Version != 6)
return null;
header.PreloadDirectoryEntryCount = data.ReadUInt32();
header.DirectoryEntryCount = data.ReadUInt32();
header.PreloadBytes = data.ReadUInt32();
header.HeaderLength = data.ReadUInt32();
header.DirectoryItemCount = data.ReadUInt32();
header.DirectoryItemOffset = data.ReadUInt32();
header.DirectoryItemLength = data.ReadUInt32();
return header;
}
@@ -166,16 +161,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory entry on success, null on error</returns>
private static DirectoryEntry ParseDirectoryEntry(Stream data)
private static DirectoryEntry? ParseDirectoryEntry(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryEntry directoryEntry = new DirectoryEntry();
directoryEntry.FileNameCRC = data.ReadUInt32();
directoryEntry.EntryLength = data.ReadUInt32();
directoryEntry.EntryOffset = data.ReadUInt32();
return directoryEntry;
return data.ReadType<DirectoryEntry>();
}
/// <summary>
@@ -183,14 +171,9 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled XBox Package File directory mapping on success, null on error</returns>
private static DirectoryMapping ParseDirectoryMapping(Stream data)
private static DirectoryMapping? ParseDirectoryMapping(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryMapping directoryMapping = new DirectoryMapping();
directoryMapping.PreloadDirectoryEntryIndex = data.ReadUInt16();
return directoryMapping;
return data.ReadType<DirectoryMapping>();
}
/// <summary>
@@ -214,7 +197,7 @@ namespace SabreTools.Serialization.Deserializers
data.Seek(directoryItem.NameOffset, SeekOrigin.Begin);
// Read the name
directoryItem.Name = data.ReadString(Encoding.ASCII);
directoryItem.Name = data.ReadNullTerminatedAnsiString();
// Seek back to the right position
data.Seek(currentPosition, SeekOrigin.Begin);
@@ -229,15 +212,10 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled XBox Package File footer on success, null on error</returns>
private static Footer? ParseFooter(Stream data)
{
// TODO: Use marshalling here instead of building
Footer footer = new Footer();
var footer = data.ReadType<Footer>();
footer.FileLength = data.ReadUInt32();
byte[]? signature = data.ReadBytes(4);
if (signature == null)
if (footer == null)
return null;
footer.Signature = Encoding.ASCII.GetString(signature);
if (footer.Signature != FooterSignatureString)
return null;

View File

@@ -102,21 +102,13 @@ namespace SabreTools.Serialization
/// <returns>A filled NB10 Program Database on success, null on error</returns>
public static NB10ProgramDatabase? AsNB10ProgramDatabase(this byte[] data, ref int offset)
{
// If we have data that's invalid, we can't do anything
if (data == null)
var nb10ProgramDatabase = data.ReadType<NB10ProgramDatabase>(ref offset);
if (nb10ProgramDatabase == null)
return null;
var nb10ProgramDatabase = new NB10ProgramDatabase();
nb10ProgramDatabase.Signature = data.ReadUInt32(ref offset);
if (nb10ProgramDatabase.Signature != 0x3031424E)
return null;
nb10ProgramDatabase.Offset = data.ReadUInt32(ref offset);
nb10ProgramDatabase.Timestamp = data.ReadUInt32(ref offset);
nb10ProgramDatabase.Age = data.ReadUInt32(ref offset);
nb10ProgramDatabase.PdbFileName = data.ReadString(ref offset, Encoding.ASCII); // TODO: Actually null-terminated UTF-8?
return nb10ProgramDatabase;
}
@@ -126,24 +118,15 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse into a database</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled RSDS Program Database on success, null on error</returns>
public static RSDSProgramDatabase? AsRSDSProgramDatabase(this byte[]? data, ref int offset)
public static RSDSProgramDatabase? AsRSDSProgramDatabase(this byte[] data, ref int offset)
{
// If we have data that's invalid, we can't do anything
if (data == null)
var rsdsProgramDatabase = data.ReadType<RSDSProgramDatabase>(ref offset);
if (rsdsProgramDatabase == null)
return null;
var rsdsProgramDatabase = new RSDSProgramDatabase();
rsdsProgramDatabase.Signature = data.ReadUInt32(ref offset);
if (rsdsProgramDatabase.Signature != 0x53445352)
return null;
var guid = data.ReadBytes(ref offset, 0x10);
if (guid != null)
rsdsProgramDatabase.GUID = new Guid(guid);
rsdsProgramDatabase.Age = data.ReadUInt32(ref offset);
rsdsProgramDatabase.PathAndFileName = data.ReadString(ref offset, Encoding.ASCII); // TODO: Actually null-terminated UTF-8
return rsdsProgramDatabase;
}
@@ -157,12 +140,8 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse into overlay data</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled SecuROM AddD overlay data on success, null on error</returns>
public static SecuROMAddD? AsSecuROMAddD(this byte[]? data, ref int offset)
public static SecuROMAddD? AsSecuROMAddD(this byte[] data, ref int offset)
{
// If we have data that's invalid, we can't do anything
if (data == null)
return null;
// Read in the table
var addD = new SecuROMAddD();
@@ -173,7 +152,7 @@ namespace SabreTools.Serialization
int originalOffset = offset;
addD.EntryCount = data.ReadUInt32(ref offset);
addD.Version = data.ReadString(ref offset, Encoding.ASCII);
addD.Version = data.ReadNullTerminatedAnsiString(ref offset);
if (string.IsNullOrEmpty(addD.Version))
offset = originalOffset + 0x10;
@@ -193,18 +172,9 @@ namespace SabreTools.Serialization
addD.Entries = new SecuROMAddDEntry[addD.EntryCount];
for (int i = 0; i < addD.EntryCount; i++)
{
var addDEntry = new SecuROMAddDEntry();
addDEntry.PhysicalOffset = data.ReadUInt32(ref offset);
addDEntry.Length = data.ReadUInt32(ref offset);
addDEntry.Unknown08h = data.ReadUInt32(ref offset);
addDEntry.Unknown0Ch = data.ReadUInt32(ref offset);
addDEntry.Unknown10h = data.ReadUInt32(ref offset);
addDEntry.Unknown14h = data.ReadUInt32(ref offset);
addDEntry.Unknown18h = data.ReadUInt32(ref offset);
addDEntry.Unknown1Ch = data.ReadUInt32(ref offset);
addDEntry.FileName = data.ReadString(ref offset, Encoding.ASCII);
addDEntry.Unknown2Ch = data.ReadUInt32(ref offset);
var addDEntry = data.ReadType<SecuROMAddDEntry>(ref offset);
if (addDEntry == null)
return null;
addD.Entries[i] = addDEntry;
}
@@ -223,7 +193,7 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse into a resource header</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled resource header on success, null on error</returns>
public static ResourceHeader? AsResourceHeader(this byte[]? data, ref int offset)
public static ResourceHeader? AsResourceHeader(this byte[] data, ref int offset)
{
// If we have data that's invalid, we can't do anything
if (data == null)
@@ -267,12 +237,9 @@ namespace SabreTools.Serialization
// Read in the table
for (int i = 0; i < count; i++)
{
var acceleratorTableEntry = new AcceleratorTableEntry();
acceleratorTableEntry.Flags = (AcceleratorTableFlags)entry.Data.ReadUInt16(ref offset);
acceleratorTableEntry.Ansi = entry.Data.ReadUInt16(ref offset);
acceleratorTableEntry.Id = entry.Data.ReadUInt16(ref offset);
acceleratorTableEntry.Padding = entry.Data.ReadUInt16(ref offset);
var acceleratorTableEntry = entry.Data.ReadType<AcceleratorTableEntry>(ref offset);
if (acceleratorTableEntry == null)
return null;
table[i] = acceleratorTableEntry;
}
@@ -359,7 +326,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the menu resource as a string
dialogTemplateExtended.MenuResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplateExtended.MenuResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -395,7 +362,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the class resource as a string
dialogTemplateExtended.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplateExtended.ClassResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -426,7 +393,7 @@ namespace SabreTools.Serialization
else
{
// Read the title resource as a string
dialogTemplateExtended.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplateExtended.TitleResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -451,7 +418,7 @@ namespace SabreTools.Serialization
dialogTemplateExtended.Weight = entry.Data.ReadUInt16(ref offset);
dialogTemplateExtended.Italic = entry.Data.ReadByte(ref offset);
dialogTemplateExtended.CharSet = entry.Data.ReadByte(ref offset);
dialogTemplateExtended.Typeface = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplateExtended.Typeface = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
}
// Align to the DWORD boundary if we're not at the end
@@ -507,7 +474,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the class resource as a string
dialogItemTemplate.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogItemTemplate.ClassResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -537,7 +504,7 @@ namespace SabreTools.Serialization
else
{
// Read the title resource as a string
dialogItemTemplate.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogItemTemplate.TitleResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -609,7 +576,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the menu resource as a string
dialogTemplate.MenuResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplate.MenuResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -645,7 +612,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the class resource as a string
dialogTemplate.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplate.ClassResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -676,7 +643,7 @@ namespace SabreTools.Serialization
else
{
// Read the title resource as a string
dialogTemplate.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplate.TitleResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -700,7 +667,7 @@ namespace SabreTools.Serialization
dialogTemplate.PointSizeValue = entry.Data.ReadUInt16(ref offset);
// Read the font name as a string
dialogTemplate.Typeface = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogTemplate.Typeface = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
}
// Align to the DWORD boundary if we're not at the end
@@ -755,7 +722,7 @@ namespace SabreTools.Serialization
offset += sizeof(ushort);
// Read the class resource as a string
dialogItemTemplate.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogItemTemplate.ClassResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -785,7 +752,7 @@ namespace SabreTools.Serialization
else
{
// Read the title resource as a string
dialogItemTemplate.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode);
dialogItemTemplate.TitleResource = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the WORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -885,8 +852,8 @@ namespace SabreTools.Serialization
dirEntry.Entry.Reserved = entry.Data.ReadUInt32(ref offset);
// TODO: Determine how to read these two? Immediately after?
dirEntry.Entry.DeviceName = entry.Data.ReadString(ref offset);
dirEntry.Entry.FaceName = entry.Data.ReadString(ref offset);
dirEntry.Entry.DeviceName = entry.Data.ReadNullTerminatedAnsiString(ref offset);
dirEntry.Entry.FaceName = entry.Data.ReadNullTerminatedAnsiString(ref offset);
fontGroupHeader.DE[i] = dirEntry;
}
@@ -920,11 +887,9 @@ namespace SabreTools.Serialization
{
#region Extended menu header
var menuHeaderExtended = new MenuHeaderExtended();
menuHeaderExtended.Version = entry.Data.ReadUInt16(ref offset);
menuHeaderExtended.Offset = entry.Data.ReadUInt16(ref offset);
menuHeaderExtended.HelpID = entry.Data.ReadUInt32(ref offset);
var menuHeaderExtended = entry.Data.ReadType<MenuHeaderExtended>(ref offset);
if (menuHeaderExtended == null)
return null;
menuResource.ExtendedMenuHeader = menuHeaderExtended;
@@ -940,13 +905,9 @@ namespace SabreTools.Serialization
while (offset < entry.Data.Length)
{
var extendedMenuItem = new MenuItemExtended();
extendedMenuItem.ItemType = (MenuFlags)entry.Data.ReadUInt32(ref offset);
extendedMenuItem.State = (MenuFlags)entry.Data.ReadUInt32(ref offset);
extendedMenuItem.ID = entry.Data.ReadUInt32(ref offset);
extendedMenuItem.Flags = (MenuFlags)entry.Data.ReadUInt32(ref offset);
extendedMenuItem.MenuText = entry.Data.ReadString(ref offset, Encoding.Unicode);
var extendedMenuItem = entry.Data.ReadType<MenuItemExtended>(ref offset);
if (extendedMenuItem == null)
return null;
// Align to the DWORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -967,10 +928,9 @@ namespace SabreTools.Serialization
{
#region Menu header
var menuHeader = new MenuHeader();
menuHeader.Version = entry.Data.ReadUInt16(ref offset);
menuHeader.HeaderSize = entry.Data.ReadUInt16(ref offset);
var menuHeader = entry.Data.ReadType<MenuHeader>(ref offset);
if (menuHeader == null)
return null;
menuResource.MenuHeader = menuHeader;
@@ -982,28 +942,19 @@ namespace SabreTools.Serialization
while (offset < entry.Data.Length)
{
var menuItem = new MenuItem();
// Determine if this is a popup
int flagsOffset = offset;
var initialFlags = (MenuFlags)entry.Data.ReadUInt16(ref flagsOffset);
MenuItem? menuItem;
#if NET20 || NET35
if ((initialFlags & MenuFlags.MF_POPUP) != 0)
#else
if (initialFlags.HasFlag(MenuFlags.MF_POPUP))
#endif
{
menuItem.PopupItemType = (MenuFlags)entry.Data.ReadUInt32(ref offset);
menuItem.PopupState = (MenuFlags)entry.Data.ReadUInt32(ref offset);
menuItem.PopupID = entry.Data.ReadUInt32(ref offset);
menuItem.PopupResInfo = (MenuFlags)entry.Data.ReadUInt32(ref offset);
menuItem.PopupMenuText = entry.Data.ReadString(ref offset, Encoding.Unicode);
}
menuItem = entry.Data.ReadType<PopupMenuItem>(ref offset);
else
{
menuItem.NormalResInfo = (MenuFlags)entry.Data.ReadUInt16(ref offset);
menuItem.NormalMenuText = entry.Data.ReadString(ref offset, Encoding.Unicode);
}
menuItem = entry.Data.ReadType<NormalMenuItem>(ref offset);
// Align to the DWORD boundary if we're not at the end
if (offset < entry.Data.Length)
@@ -1012,6 +963,9 @@ namespace SabreTools.Serialization
_ = entry.Data.ReadByte(ref offset);
}
if (menuItem == null)
return null;
menuItems.Add(menuItem);
}
@@ -1048,11 +1002,9 @@ namespace SabreTools.Serialization
for (int i = 0; i < messageResourceData.NumberOfBlocks; i++)
{
var messageResourceBlock = new MessageResourceBlock();
messageResourceBlock.LowId = entry.Data.ReadUInt32(ref offset);
messageResourceBlock.HighId = entry.Data.ReadUInt32(ref offset);
messageResourceBlock.OffsetToEntries = entry.Data.ReadUInt32(ref offset);
var messageResourceBlock = entry.Data.ReadType<MessageResourceBlock>(ref offset);
if (messageResourceBlock == null)
return null;
messageResourceBlocks.Add(messageResourceBlock);
}
@@ -1158,7 +1110,7 @@ namespace SabreTools.Serialization
versionInfo.Length = entry.Data.ReadUInt16(ref offset);
versionInfo.ValueLength = entry.Data.ReadUInt16(ref offset);
versionInfo.ResourceType = (VersionResourceType)entry.Data.ReadUInt16(ref offset);
versionInfo.Key = entry.Data.ReadString(ref offset, Encoding.Unicode);
versionInfo.Key = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
if (versionInfo.Key != "VS_VERSION_INFO")
return null;
@@ -1168,23 +1120,13 @@ namespace SabreTools.Serialization
// Read fixed file info
if (versionInfo.ValueLength > 0)
{
var fixedFileInfo = new FixedFileInfo();
fixedFileInfo.Signature = entry.Data.ReadUInt32(ref offset);
var fixedFileInfo = entry.Data.ReadType<FixedFileInfo>(ref offset);
if (fixedFileInfo == null)
return null;
if (fixedFileInfo.Signature != 0xFEEF04BD)
return null;
fixedFileInfo.StrucVersion = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileVersionMS = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileVersionLS = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.ProductVersionMS = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.ProductVersionLS = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileFlagsMask = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileFlags = (FixedFileInfoFlags)(entry.Data.ReadUInt32(ref offset) & fixedFileInfo.FileFlagsMask);
fixedFileInfo.FileOS = (FixedFileInfoOS)entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileType = (FixedFileInfoFileType)entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileSubtype = (FixedFileInfoFileSubtype)entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileDateMS = entry.Data.ReadUInt32(ref offset);
fixedFileInfo.FileDateLS = entry.Data.ReadUInt32(ref offset);
versionInfo.Value = fixedFileInfo;
}
@@ -1200,7 +1142,7 @@ namespace SabreTools.Serialization
int currentOffset = offset;
offset += 6;
string? nextKey = entry.Data.ReadString(ref offset, Encoding.Unicode);
string? nextKey = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
offset = currentOffset;
if (nextKey == "StringFileInfo")
@@ -1222,7 +1164,7 @@ namespace SabreTools.Serialization
int currentOffset = offset;
offset += 6;
string? nextKey = entry.Data.ReadString(ref offset, Encoding.Unicode);
string? nextKey = entry.Data.ReadNullTerminatedUnicodeString(ref offset);
offset = currentOffset;
if (nextKey == "StringFileInfo")
@@ -1246,7 +1188,7 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse into a string file info</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled string file info resource on success, null on error</returns>
private static StringFileInfo? AsStringFileInfo(byte[] data, ref int offset)
public static StringFileInfo? AsStringFileInfo(byte[] data, ref int offset)
{
var stringFileInfo = new StringFileInfo();
@@ -1256,7 +1198,7 @@ namespace SabreTools.Serialization
stringFileInfo.Length = data.ReadUInt16(ref offset);
stringFileInfo.ValueLength = data.ReadUInt16(ref offset);
stringFileInfo.ResourceType = (VersionResourceType)data.ReadUInt16(ref offset);
stringFileInfo.Key = data.ReadString(ref offset, Encoding.Unicode);
stringFileInfo.Key = data.ReadNullTerminatedUnicodeString(ref offset);
if (stringFileInfo.Key != "StringFileInfo")
{
offset -= 6 + ((stringFileInfo.Key?.Length ?? 0 + 1) * 2);
@@ -1278,7 +1220,7 @@ namespace SabreTools.Serialization
stringTable.Length = data.ReadUInt16(ref offset);
stringTable.ValueLength = data.ReadUInt16(ref offset);
stringTable.ResourceType = (VersionResourceType)data.ReadUInt16(ref offset);
stringTable.Key = data.ReadString(ref offset, Encoding.Unicode);
stringTable.Key = data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the DWORD boundary if we're not at the end
if (offset < data.Length)
@@ -1296,7 +1238,7 @@ namespace SabreTools.Serialization
stringData.Length = data.ReadUInt16(ref offset);
stringData.ValueLength = data.ReadUInt16(ref offset);
stringData.ResourceType = (VersionResourceType)data.ReadUInt16(ref offset);
stringData.Key = data.ReadString(ref offset, Encoding.Unicode);
stringData.Key = data.ReadNullTerminatedUnicodeString(ref offset);
// Align to the DWORD boundary if we're not at the end
if (offset < data.Length)
@@ -1341,7 +1283,7 @@ namespace SabreTools.Serialization
/// <param name="data">Data to parse into a var file info</param>
/// <param name="offset">Offset into the byte array</param>
/// <returns>A filled var file info resource on success, null on error</returns>
private static VarFileInfo? AsVarFileInfo(byte[] data, ref int offset)
public static VarFileInfo? AsVarFileInfo(byte[] data, ref int offset)
{
var varFileInfo = new VarFileInfo();
@@ -1351,7 +1293,7 @@ namespace SabreTools.Serialization
varFileInfo.Length = data.ReadUInt16(ref offset);
varFileInfo.ValueLength = data.ReadUInt16(ref offset);
varFileInfo.ResourceType = (VersionResourceType)data.ReadUInt16(ref offset);
varFileInfo.Key = data.ReadString(ref offset, Encoding.Unicode);
varFileInfo.Key = data.ReadNullTerminatedUnicodeString(ref offset);
if (varFileInfo.Key != "VarFileInfo")
return null;
@@ -1370,7 +1312,7 @@ namespace SabreTools.Serialization
varData.Length = data.ReadUInt16(ref offset);
varData.ValueLength = data.ReadUInt16(ref offset);
varData.ResourceType = (VersionResourceType)data.ReadUInt16(ref offset);
varData.Key = data.ReadString(ref offset, Encoding.Unicode);
varData.Key = data.ReadNullTerminatedUnicodeString(ref offset);
if (varData.Key != "Translation")
{
offset -= 6 + ((varData.Key?.Length ?? 0 + 1) * 2);

View File

@@ -3,19 +3,13 @@ namespace SabreTools.Serialization.Interfaces
/// <summary>
/// Represents a wrapper around a top-level model
/// </summary>
/// <typeparam name="TModel">Top-level model for the wrapper</typeparam>
public interface IWrapper<TModel>
public interface IWrapper
{
/// <summary>
/// Get a human-readable description of the wrapper
/// </summary>
string Description();
/// <summary>
/// Get the backing model
/// </summary>
TModel GetModel();
#if !NETFRAMEWORK
/// <summary>
/// Export the item information as JSON

View File

@@ -0,0 +1,14 @@
namespace SabreTools.Serialization.Interfaces
{
/// <summary>
/// Represents a wrapper around a top-level model
/// </summary>
/// <typeparam name="TModel">Top-level model for the wrapper</typeparam>
public interface IWrapper<TModel> : IWrapper
{
/// <summary>
/// Get the backing model
/// </summary>
TModel GetModel();
}
}

View File

@@ -8,7 +8,7 @@
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.5.1</Version>
<Version>1.5.4</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
@@ -29,8 +29,8 @@
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.Hashing" Version="1.2.0" />
<PackageReference Include="SabreTools.IO" Version="1.3.6" />
<PackageReference Include="SabreTools.Models" Version="1.4.2" />
<PackageReference Include="SabreTools.IO" Version="1.4.0" />
<PackageReference Include="SabreTools.Models" Version="1.4.4" />
</ItemGroup>
</Project>

View File

@@ -57,7 +57,7 @@ namespace SabreTools.Serialization.Wrappers
Encrypted = directoryEntry.DirectoryFlags.HasFlag(Models.GCF.HL_GCF_FLAG.HL_GCF_FLAG_ENCRYPTED),
#endif
};
var pathParts = new List<string> { directoryEntry.Name ?? string.Empty };
var pathParts = new List<string> { this.Model.DirectoryNames![directoryEntry.NameOffset] ?? string.Empty };
var blockEntries = new List<Models.GCF.BlockEntry?>();
// Traverse the parent tree
@@ -68,7 +68,7 @@ namespace SabreTools.Serialization.Wrappers
if (parentDirectoryEntry == null)
break;
pathParts.Add(parentDirectoryEntry.Name ?? string.Empty);
pathParts.Add(this.Model.DirectoryNames![parentDirectoryEntry.NameOffset] ?? string.Empty);
index = parentDirectoryEntry.ParentIndex;
}

View File

@@ -1,8 +1,10 @@
using System.IO;
using System.Linq;
using SabreTools.Models.InstallShieldCabinet;
namespace SabreTools.Serialization.Wrappers
{
public partial class InstallShieldCabinet : WrapperBase<Models.InstallShieldCabinet.Cabinet>
public partial class InstallShieldCabinet : WrapperBase<Cabinet>
{
#region Descriptive Properties
@@ -41,14 +43,14 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
/// <inheritdoc/>
public InstallShieldCabinet(Models.InstallShieldCabinet.Cabinet? model, byte[]? data, int offset)
public InstallShieldCabinet(Cabinet? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public InstallShieldCabinet(Models.InstallShieldCabinet.Cabinet? model, Stream? data)
public InstallShieldCabinet(Cabinet? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
@@ -101,5 +103,182 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
#region Component
/// <summary>
/// Get the component name at a given index, if possible
/// </summary>
public string? GetComponentName(int index)
{
if (Model.Components == null)
return null;
if (index < 0 || index >= Model.Components.Length)
return null;
var component = Model.Components[index];
if (component?.Identifier == null)
return null;
return component.Identifier.Replace('\\', '/');
}
#endregion
#region Directory
/// <summary>
/// Get the directory name at a given index, if possible
/// </summary>
public string? GetDirectoryName(int index)
{
if (Model.DirectoryNames == null)
return null;
if (index < 0 || index >= Model.DirectoryNames.Length)
return null;
return Model.DirectoryNames[index];
}
/// <summary>
/// Get the directory index for the given file index
/// </summary>
/// <returns>Directory index if found, UInt32.MaxValue on error</returns>
public uint GetFileDirectoryIndex(int index)
{
FileDescriptor? descriptor = GetFileDescriptor(index);
if (descriptor != null)
return descriptor.DirectoryIndex;
else
return uint.MaxValue;
}
#endregion
#region File
/// <summary>
/// Returns if the file at a given index is marked as valid
/// </summary>
public bool FileIsValid(int index)
{
if (Model.Descriptor == null)
return false;
if (index < 0 || index > Model.Descriptor.FileCount)
return false;
FileDescriptor? descriptor = GetFileDescriptor(index);
if (descriptor == null)
return false;
#if NET20 || NET35
if ((descriptor.Flags & FileFlags.FILE_INVALID) != 0)
#else
if (descriptor.Flags.HasFlag(FileFlags.FILE_INVALID))
#endif
return false;
if (descriptor.NameOffset == default)
return false;
if (descriptor.DataOffset == default)
return false;
return true;
}
/// <summary>
/// Get the reported expanded file size for a given index
/// </summary>
public ulong GetExpandedFileSize(int index)
{
FileDescriptor? descriptor = GetFileDescriptor(index);
if (descriptor != null)
return descriptor.ExpandedSize;
else
return 0;
}
/// <summary>
/// Get the file descriptor at a given index, if possible
/// </summary>
public FileDescriptor? GetFileDescriptor(int index)
{
if (Model.FileDescriptors == null)
return null;
if (index < 0 || index >= Model.FileDescriptors.Length)
return null;
return Model.FileDescriptors[index];
}
/// <summary>
/// Get the file name at a given index, if possible
/// </summary>
public string? GetFileName(int index)
{
var descriptor = GetFileDescriptor(index);
#if NET20 || NET35
if (descriptor == null || (descriptor.Flags & FileFlags.FILE_INVALID) != 0)
#else
if (descriptor == null || descriptor.Flags.HasFlag(FileFlags.FILE_INVALID))
#endif
return null;
return descriptor.Name;
}
#endregion
#region File Group
/// <summary>
/// Get the file group at a given index, if possible
/// </summary>
public FileGroup? GetFileGroup(int index)
{
if (Model.FileGroups == null)
return null;
if (index < 0 || index >= Model.FileGroups.Length)
return null;
return Model.FileGroups[index];
}
/// <summary>
/// Get the file group at a given name, if possible
/// </summary>
public FileGroup? GetFileGroup(string name)
{
if (Model.FileGroups == null)
return null;
return Model.FileGroups.FirstOrDefault(fg => fg != null && string.Equals(fg.Name, name));
}
/// <summary>
/// Get the file group name at a given index, if possible
/// </summary>
public string? GetFileGroupName(int index)
{
if (Model.FileGroups == null)
return null;
if (index < 0 || index >= Model.FileGroups.Length)
return null;
var fileGroup = Model.FileGroups[index];
if (fileGroup == null)
return null;
return fileGroup.Name;
}
#endregion
}
}

View File

@@ -1,14 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public abstract class WrapperBase<T> : IWrapper<T>
public abstract class WrapperBase : IWrapper
{
#region Descriptive Properties
@@ -22,339 +16,13 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region Properties
/// <inheritdoc/>
public T GetModel() => Model;
/// <summary>
/// Internal model
/// </summary>
public T Model { get; private set; }
#endregion
#region Instance Variables
/// <summary>
/// Source of the original data
/// </summary>
protected DataSource _dataSource = DataSource.UNKNOWN;
/// <summary>
/// Source byte array data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected byte[]? _byteArrayData = null;
/// <summary>
/// Source byte array data offset
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected int _byteArrayOffset = -1;
/// <summary>
/// Source Stream data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.Stream"/></remarks>
protected Stream? _streamData = null;
#if !NETFRAMEWORK
/// <summary>
/// JSON serializer options for output printing
/// </summary>
protected System.Text.Json.JsonSerializerOptions _jsonSerializerOptions
{
get
{
#if NETCOREAPP3_1
var serializer = new System.Text.Json.JsonSerializerOptions { WriteIndented = true };
#else
var serializer = new System.Text.Json.JsonSerializerOptions { IncludeFields = true, WriteIndented = true };
#endif
serializer.Converters.Add(new ConcreteAbstractSerializer());
serializer.Converters.Add(new ConcreteInterfaceSerializer());
serializer.Converters.Add(new System.Text.Json.Serialization.JsonStringEnumConverter());
return serializer;
}
}
#endif
#endregion
#region Constructors
/// <summary>
/// Construct a new instance of the wrapper from a byte array
/// </summary>
protected WrapperBase(T? model, byte[]? data, int offset)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (offset < 0 || offset >= data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
this.Model = model;
_dataSource = DataSource.ByteArray;
_byteArrayData = data;
_byteArrayOffset = offset;
}
/// <summary>
/// Construct a new instance of the wrapper from a Stream
/// </summary>
protected WrapperBase(T? model, Stream? data)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (data.Length == 0 || !data.CanSeek || !data.CanRead)
throw new ArgumentOutOfRangeException(nameof(data));
this.Model = model;
_dataSource = DataSource.Stream;
_streamData = data;
}
#endregion
#region Data
/// <summary>
/// Validate the backing data source
/// </summary>
/// <returns>True if the data source is valid, false otherwise</returns>
public bool DataSourceIsValid()
{
switch (_dataSource)
{
// Byte array data requires both a valid array and offset
case DataSource.ByteArray:
return _byteArrayData != null && _byteArrayOffset >= 0;
// Stream data requires both a valid stream
case DataSource.Stream:
return _streamData != null && _streamData.CanRead && _streamData.CanSeek;
// Everything else is invalid
case DataSource.UNKNOWN:
default:
return false;
}
}
/// <summary>
/// Check if a data segment is valid in the data source
/// </summary>
/// <param name="position">Position in the source</param>
/// <param name="length">Length of the data to check</param>
/// <returns>True if the positional data is valid, false otherwise</returns>
public bool SegmentValid(int position, int length)
{
// Validate the data souece
if (!DataSourceIsValid())
return false;
// If we have an invalid position
if (position < 0 || position >= GetEndOfFile())
return false;
switch (_dataSource)
{
case DataSource.ByteArray:
return _byteArrayOffset + position + length <= _byteArrayData!.Length;
case DataSource.Stream:
return position + length <= _streamData!.Length;
// Everything else is invalid
case DataSource.UNKNOWN:
default:
return false;
}
}
/// <summary>
/// Read data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <returns>Byte array containing the requested data, null on error</returns>
public byte[]? ReadFromDataSource(int position, int length)
{
// Validate the data source
if (!DataSourceIsValid())
return null;
// Validate the requested segment
if (!SegmentValid(position, length))
return null;
// Read and return the data
byte[]? sectionData = null;
switch (_dataSource)
{
case DataSource.ByteArray:
sectionData = new byte[length];
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
break;
case DataSource.Stream:
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
}
return sectionData;
}
/// <summary>
/// Read string data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <returns>String list containing the requested data, null on error</returns>
public List<string>? ReadStringsFromDataSource(int position, int length, int charLimit = 5)
{
// Read the data as a byte array first
byte[]? sourceData = ReadFromDataSource(position, length);
if (sourceData == null)
return null;
// If we have an invalid character limit, default to 5
if (charLimit <= 0)
charLimit = 5;
// Create the string list to return
var sourceStrings = new List<string>();
// Setup cached data
int sourceDataIndex = 0;
string cachedString = string.Empty;
// Check for ASCII strings
while (sourceDataIndex < sourceData.Length)
{
// If we have a control character or an invalid byte
if (sourceData[sourceDataIndex] < 0x20 || sourceData[sourceDataIndex] > 0x7F)
{
// If we have no cached string
if (cachedString.Length == 0)
{
sourceDataIndex++;
continue;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
cachedString = string.Empty;
sourceDataIndex++;
continue;
}
// All other characters get read in
cachedString += Encoding.ASCII.GetString(sourceData, sourceDataIndex, 1);
sourceDataIndex++;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
// Reset cached data
sourceDataIndex = 0;
cachedString = string.Empty;
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
// Check for Unicode strings
while (sourceDataIndex < sourceData.Length)
{
// Unicode characters are always 2 bytes
if (sourceDataIndex == sourceData.Length - 1)
break;
ushort ch = BitConverter.ToUInt16(sourceData, sourceDataIndex);
// If we have a null terminator or "invalid" character
if (ch == 0x0000 || (ch & 0xFF00) != 0)
{
// If we have no cached string
if (cachedString.Length == 0)
{
sourceDataIndex += 2;
continue;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
cachedString = string.Empty;
sourceDataIndex += 2;
continue;
}
// All other characters get read in
cachedString += Encoding.Unicode.GetString(sourceData, sourceDataIndex, 2);
sourceDataIndex += 2;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
// Deduplicate the string list for storage
sourceStrings = sourceStrings.Distinct().OrderBy(s => s).ToList();
// TODO: Complete implementation of string finding
return sourceStrings;
}
/// <summary>
/// Get the ending offset of the source
/// </summary>
/// <returns>Value greater than 0 for a valid end of file, -1 on error</returns>
public int GetEndOfFile()
{
// Validate the data souece
if (!DataSourceIsValid())
return -1;
// Return the effective endpoint
switch (_dataSource)
{
case DataSource.ByteArray:
return _byteArrayData!.Length - _byteArrayOffset;
case DataSource.Stream:
return (int)_streamData!.Length;
case DataSource.UNKNOWN:
default:
return -1;
}
}
#endregion
#region JSON Export
#if !NETFRAMEWORK
/// <summary>
/// Export the item information as JSON
/// </summary>
public string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
public abstract string ExportJSON();
#endif
#endregion

View File

@@ -0,0 +1,350 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public abstract class WrapperBase<T> : WrapperBase, IWrapper<T>
{
#region Properties
/// <inheritdoc/>
public T GetModel() => Model;
/// <summary>
/// Internal model
/// </summary>
public T Model { get; private set; }
#endregion
#region Instance Variables
/// <summary>
/// Source of the original data
/// </summary>
protected DataSource _dataSource = DataSource.UNKNOWN;
/// <summary>
/// Source byte array data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected byte[]? _byteArrayData = null;
/// <summary>
/// Source byte array data offset
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected int _byteArrayOffset = -1;
/// <summary>
/// Source Stream data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.Stream"/></remarks>
protected Stream? _streamData = null;
#if !NETFRAMEWORK
/// <summary>
/// JSON serializer options for output printing
/// </summary>
protected System.Text.Json.JsonSerializerOptions _jsonSerializerOptions
{
get
{
#if NETCOREAPP3_1
var serializer = new System.Text.Json.JsonSerializerOptions { WriteIndented = true };
#else
var serializer = new System.Text.Json.JsonSerializerOptions { IncludeFields = true, WriteIndented = true };
#endif
serializer.Converters.Add(new ConcreteAbstractSerializer());
serializer.Converters.Add(new ConcreteInterfaceSerializer());
serializer.Converters.Add(new System.Text.Json.Serialization.JsonStringEnumConverter());
return serializer;
}
}
#endif
#endregion
#region Constructors
/// <summary>
/// Construct a new instance of the wrapper from a byte array
/// </summary>
protected WrapperBase(T? model, byte[]? data, int offset)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (offset < 0 || offset >= data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
this.Model = model;
_dataSource = DataSource.ByteArray;
_byteArrayData = data;
_byteArrayOffset = offset;
}
/// <summary>
/// Construct a new instance of the wrapper from a Stream
/// </summary>
protected WrapperBase(T? model, Stream? data)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (data.Length == 0 || !data.CanSeek || !data.CanRead)
throw new ArgumentOutOfRangeException(nameof(data));
this.Model = model;
_dataSource = DataSource.Stream;
_streamData = data;
}
#endregion
#region Data
/// <summary>
/// Validate the backing data source
/// </summary>
/// <returns>True if the data source is valid, false otherwise</returns>
public bool DataSourceIsValid()
{
switch (_dataSource)
{
// Byte array data requires both a valid array and offset
case DataSource.ByteArray:
return _byteArrayData != null && _byteArrayOffset >= 0;
// Stream data requires both a valid stream
case DataSource.Stream:
return _streamData != null && _streamData.CanRead && _streamData.CanSeek;
// Everything else is invalid
case DataSource.UNKNOWN:
default:
return false;
}
}
/// <summary>
/// Check if a data segment is valid in the data source
/// </summary>
/// <param name="position">Position in the source</param>
/// <param name="length">Length of the data to check</param>
/// <returns>True if the positional data is valid, false otherwise</returns>
public bool SegmentValid(int position, int length)
{
// Validate the data souece
if (!DataSourceIsValid())
return false;
// If we have an invalid position
if (position < 0 || position >= GetEndOfFile())
return false;
switch (_dataSource)
{
case DataSource.ByteArray:
return _byteArrayOffset + position + length <= _byteArrayData!.Length;
case DataSource.Stream:
return position + length <= _streamData!.Length;
// Everything else is invalid
case DataSource.UNKNOWN:
default:
return false;
}
}
/// <summary>
/// Read data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <returns>Byte array containing the requested data, null on error</returns>
public byte[]? ReadFromDataSource(int position, int length)
{
// Validate the data source
if (!DataSourceIsValid())
return null;
// Validate the requested segment
if (!SegmentValid(position, length))
return null;
// Read and return the data
byte[]? sectionData = null;
switch (_dataSource)
{
case DataSource.ByteArray:
sectionData = new byte[length];
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
break;
case DataSource.Stream:
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
}
return sectionData;
}
/// <summary>
/// Read string data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <returns>String list containing the requested data, null on error</returns>
public List<string>? ReadStringsFromDataSource(int position, int length, int charLimit = 5)
{
// Read the data as a byte array first
byte[]? sourceData = ReadFromDataSource(position, length);
if (sourceData == null)
return null;
// If we have an invalid character limit, default to 5
if (charLimit <= 0)
charLimit = 5;
// Create the string list to return
var sourceStrings = new List<string>();
// Setup cached data
int sourceDataIndex = 0;
string cachedString = string.Empty;
// Check for ASCII strings
while (sourceDataIndex < sourceData.Length)
{
// If we have a control character or an invalid byte
if (sourceData[sourceDataIndex] < 0x20 || sourceData[sourceDataIndex] > 0x7F)
{
// If we have no cached string
if (cachedString.Length == 0)
{
sourceDataIndex++;
continue;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
cachedString = string.Empty;
sourceDataIndex++;
continue;
}
// All other characters get read in
cachedString += Encoding.ASCII.GetString(sourceData, sourceDataIndex, 1);
sourceDataIndex++;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
// Reset cached data
sourceDataIndex = 0;
cachedString = string.Empty;
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
// Check for Unicode strings
while (sourceDataIndex < sourceData.Length)
{
// Unicode characters are always 2 bytes
if (sourceDataIndex == sourceData.Length - 1)
break;
ushort ch = BitConverter.ToUInt16(sourceData, sourceDataIndex);
// If we have a null terminator or "invalid" character
if (ch == 0x0000 || (ch & 0xFF00) != 0)
{
// If we have no cached string
if (cachedString.Length == 0)
{
sourceDataIndex += 2;
continue;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
cachedString = string.Empty;
sourceDataIndex += 2;
continue;
}
// All other characters get read in
cachedString += Encoding.Unicode.GetString(sourceData, sourceDataIndex, 2);
sourceDataIndex += 2;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
// Deduplicate the string list for storage
sourceStrings = sourceStrings.Distinct().OrderBy(s => s).ToList();
// TODO: Complete implementation of string finding
return sourceStrings;
}
/// <summary>
/// Get the ending offset of the source
/// </summary>
/// <returns>Value greater than 0 for a valid end of file, -1 on error</returns>
public int GetEndOfFile()
{
// Validate the data souece
if (!DataSourceIsValid())
return -1;
// Return the effective endpoint
switch (_dataSource)
{
case DataSource.ByteArray:
return _byteArrayData!.Length - _byteArrayOffset;
case DataSource.Stream:
return (int)_streamData!.Length;
case DataSource.UNKNOWN:
default:
return -1;
}
}
#endregion
#region JSON Export
#if !NETFRAMEWORK
/// <summary>
/// Export the item information as JSON
/// </summary>
public override string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
#endif
#endregion
}
}