From 8466edf80fc278a2adff1bf19844caa37f2dc350 Mon Sep 17 00:00:00 2001 From: Matt Nadareski Date: Sun, 10 Sep 2023 23:51:38 -0400 Subject: [PATCH] Migrate to Serialization package --- BinaryObjectScanner.Builders/AACS.cs | 470 ------ BinaryObjectScanner.Builders/BDPlus.cs | 95 -- BinaryObjectScanner.Builders/BFPK.cs | 150 -- BinaryObjectScanner.Builders/BSP.cs | 250 --- .../BinaryObjectScanner.Builders.csproj | 32 - BinaryObjectScanner.Builders/CFB.cs | 419 ----- BinaryObjectScanner.Builders/Extensions.cs | 1435 ----------------- BinaryObjectScanner.Builders/GCF.cs | 775 --------- .../InstallShieldCabinet.cs | 808 ---------- .../LinearExecutable.cs | 943 ----------- BinaryObjectScanner.Builders/MSDOS.cs | 175 -- .../MicrosoftCabinet.cs | 258 --- BinaryObjectScanner.Builders/MoPaQ.cs | 651 -------- BinaryObjectScanner.Builders/N3DS.cs | 1224 -------------- BinaryObjectScanner.Builders/NCF.cs | 544 ------- BinaryObjectScanner.Builders/NewExecutable.cs | 508 ------ BinaryObjectScanner.Builders/Nitro.cs | 393 ----- BinaryObjectScanner.Builders/PAK.cs | 137 -- BinaryObjectScanner.Builders/PFF.cs | 211 --- BinaryObjectScanner.Builders/PlayJ.cs | 463 ------ .../PortableExecutable.cs | 1344 --------------- BinaryObjectScanner.Builders/Quantum.cs | 184 --- BinaryObjectScanner.Builders/SGA.cs | 732 --------- BinaryObjectScanner.Builders/VBSP.cs | 141 -- BinaryObjectScanner.Builders/VPK.cs | 318 ---- BinaryObjectScanner.Builders/WAD.cs | 266 --- BinaryObjectScanner.Builders/XZP.cs | 274 ---- .../AACSMediaKeyBlock.cs | 2 +- BinaryObjectScanner.Wrappers/BDPlusSVM.cs | 2 +- BinaryObjectScanner.Wrappers/BFPK.cs | 2 +- BinaryObjectScanner.Wrappers/BSP.cs | 2 +- .../BinaryObjectScanner.Wrappers.csproj | 2 +- BinaryObjectScanner.Wrappers/CFB.cs | 2 +- BinaryObjectScanner.Wrappers/CIA.cs | 2 +- BinaryObjectScanner.Wrappers/GCF.cs | 2 +- .../InstallShieldCabinet.cs | 2 +- .../LinearExecutable.cs | 2 +- BinaryObjectScanner.Wrappers/MSDOS.cs | 2 +- .../MicrosoftCabinet.cs | 2 +- BinaryObjectScanner.Wrappers/N3DS.cs | 2 +- BinaryObjectScanner.Wrappers/NCF.cs | 2 +- BinaryObjectScanner.Wrappers/NewExecutable.cs | 4 +- BinaryObjectScanner.Wrappers/Nitro.cs | 2 +- BinaryObjectScanner.Wrappers/PAK.cs | 2 +- BinaryObjectScanner.Wrappers/PFF.cs | 2 +- .../PlayJAudioFile.cs | 2 +- .../PortableExecutable.cs | 4 +- BinaryObjectScanner.Wrappers/Quantum.cs | 2 +- BinaryObjectScanner.Wrappers/SGA.cs | 2 +- BinaryObjectScanner.Wrappers/VBSP.cs | 2 +- BinaryObjectScanner.Wrappers/VPK.cs | 2 +- BinaryObjectScanner.Wrappers/WAD.cs | 2 +- BinaryObjectScanner.Wrappers/XZP.cs | 2 +- BurnOutSharp.sln | 6 - BurnOutSharp/BurnOutSharp.csproj | 4 - Test/Test.csproj | 2 +- 56 files changed, 29 insertions(+), 13239 deletions(-) delete mode 100644 BinaryObjectScanner.Builders/AACS.cs delete mode 100644 BinaryObjectScanner.Builders/BDPlus.cs delete mode 100644 BinaryObjectScanner.Builders/BFPK.cs delete mode 100644 BinaryObjectScanner.Builders/BSP.cs delete mode 100644 BinaryObjectScanner.Builders/BinaryObjectScanner.Builders.csproj delete mode 100644 BinaryObjectScanner.Builders/CFB.cs delete mode 100644 BinaryObjectScanner.Builders/Extensions.cs delete mode 100644 BinaryObjectScanner.Builders/GCF.cs delete mode 100644 BinaryObjectScanner.Builders/InstallShieldCabinet.cs delete mode 100644 BinaryObjectScanner.Builders/LinearExecutable.cs delete mode 100644 BinaryObjectScanner.Builders/MSDOS.cs delete mode 100644 BinaryObjectScanner.Builders/MicrosoftCabinet.cs delete mode 100644 BinaryObjectScanner.Builders/MoPaQ.cs delete mode 100644 BinaryObjectScanner.Builders/N3DS.cs delete mode 100644 BinaryObjectScanner.Builders/NCF.cs delete mode 100644 BinaryObjectScanner.Builders/NewExecutable.cs delete mode 100644 BinaryObjectScanner.Builders/Nitro.cs delete mode 100644 BinaryObjectScanner.Builders/PAK.cs delete mode 100644 BinaryObjectScanner.Builders/PFF.cs delete mode 100644 BinaryObjectScanner.Builders/PlayJ.cs delete mode 100644 BinaryObjectScanner.Builders/PortableExecutable.cs delete mode 100644 BinaryObjectScanner.Builders/Quantum.cs delete mode 100644 BinaryObjectScanner.Builders/SGA.cs delete mode 100644 BinaryObjectScanner.Builders/VBSP.cs delete mode 100644 BinaryObjectScanner.Builders/VPK.cs delete mode 100644 BinaryObjectScanner.Builders/WAD.cs delete mode 100644 BinaryObjectScanner.Builders/XZP.cs diff --git a/BinaryObjectScanner.Builders/AACS.cs b/BinaryObjectScanner.Builders/AACS.cs deleted file mode 100644 index 5865287d..00000000 --- a/BinaryObjectScanner.Builders/AACS.cs +++ /dev/null @@ -1,470 +0,0 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.AACS; - -namespace BinaryObjectScanner.Builders -{ - public class AACS - { - #region Byte Data - - /// - /// Parse a byte array into an AACS media key block - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled archive on success, null on error - public static MediaKeyBlock ParseMediaKeyBlock(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseMediaKeyBlock(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into an AACS media key block - /// - /// Stream to parse - /// Filled cmedia key block on success, null on error - public static MediaKeyBlock ParseMediaKeyBlock(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new media key block to fill - var mediaKeyBlock = new MediaKeyBlock(); - - #region Records - - // Create the records list - var records = new List(); - - // Try to parse the records - while (data.Position < data.Length) - { - // Try to parse the record - var record = ParseRecord(data); - if (record == null) - return null; - - // Add the record - records.Add(record); - - // If we have an end of media key block record - if (record.RecordType == RecordType.EndOfMediaKeyBlock) - break; - - // Align to the 4-byte boundary if we're not at the end - if (data.Position != data.Length) - { - while ((data.Position % 4) != 0) - _ = data.ReadByteValue(); - } - else - { - break; - } - } - - // Set the records - mediaKeyBlock.Records = records.ToArray(); - - #endregion - - return mediaKeyBlock; - } - - /// - /// Parse a Stream into a record - /// - /// Stream to parse - /// Filled record on success, null on error - private static Record ParseRecord(Stream data) - { - // TODO: Use marshalling here instead of building - - // The first 4 bytes make up the type and length - byte[] typeAndLength = data.ReadBytes(4); - RecordType type = (RecordType)typeAndLength[0]; - - // Remove the first byte and parse as big-endian - typeAndLength[0] = 0x00; - Array.Reverse(typeAndLength); - uint length = BitConverter.ToUInt32(typeAndLength, 0); - - // Create a record based on the type - switch (type) - { - // Recognized record types - case RecordType.EndOfMediaKeyBlock: return ParseEndOfMediaKeyBlockRecord(data, type, length); - case RecordType.ExplicitSubsetDifference: return ParseExplicitSubsetDifferenceRecord(data, type, length); - case RecordType.MediaKeyData: return ParseMediaKeyDataRecord(data, type, length); - case RecordType.SubsetDifferenceIndex: return ParseSubsetDifferenceIndexRecord(data, type, length); - case RecordType.TypeAndVersion: return ParseTypeAndVersionRecord(data, type, length); - case RecordType.DriveRevocationList: return ParseDriveRevocationListRecord(data, type, length); - case RecordType.HostRevocationList: return ParseHostRevocationListRecord(data, type, length); - case RecordType.VerifyMediaKey: return ParseVerifyMediaKeyRecord(data, type, length); - case RecordType.Copyright: return ParseCopyrightRecord(data, type, length); - - // Unrecognized record type - default: - return null; - } - } - - /// - /// Parse a Stream into an end of media key block record - /// - /// Stream to parse - /// Filled end of media key block record on success, null on error - private static EndOfMediaKeyBlockRecord ParseEndOfMediaKeyBlockRecord(Stream data, RecordType type, uint length) - { - // Verify we're calling the right parser - if (type != RecordType.EndOfMediaKeyBlock) - return null; - - // TODO: Use marshalling here instead of building - var record = new EndOfMediaKeyBlockRecord(); - - record.RecordType = type; - record.RecordLength = length; - if (length > 4) - record.SignatureData = data.ReadBytes((int)(length - 4)); - - return record; - } - - /// - /// Parse a Stream into an explicit subset-difference record - /// - /// Stream to parse - /// Filled explicit subset-difference record on success, null on error - private static ExplicitSubsetDifferenceRecord ParseExplicitSubsetDifferenceRecord(Stream data, RecordType type, uint length) - { - // Verify we're calling the right parser - if (type != RecordType.ExplicitSubsetDifference) - return null; - - // TODO: Use marshalling here instead of building - var record = new ExplicitSubsetDifferenceRecord(); - - record.RecordType = type; - record.RecordLength = length; - - // Cache the current offset - long initialOffset = data.Position - 4; - - // Create the subset difference list - var subsetDifferences = new List(); - - // Try to parse the subset differences - while (data.Position < initialOffset + length - 5) - { - var subsetDifference = new SubsetDifference(); - - subsetDifference.Mask = data.ReadByteValue(); - subsetDifference.Number = data.ReadUInt32BigEndian(); - - subsetDifferences.Add(subsetDifference); - } - - // Set the subset differences - record.SubsetDifferences = subsetDifferences.ToArray(); - - // If there's any data left, discard it - if (data.Position < initialOffset + length) - _ = data.ReadBytes((int)(initialOffset + length - data.Position)); - - return record; - } - - /// - /// Parse a Stream into a media key data record - /// - /// Stream to parse - /// Filled media key data record on success, null on error - private static MediaKeyDataRecord ParseMediaKeyDataRecord(Stream data, RecordType type, uint length) - { - // Verify we're calling the right parser - if (type != RecordType.MediaKeyData) - return null; - - // TODO: Use marshalling here instead of building - var record = new MediaKeyDataRecord(); - - record.RecordType = type; - record.RecordLength = length; - - // Cache the current offset - long initialOffset = data.Position - 4; - - // Create the media key list - var mediaKeys = new List(); - - // Try to parse the media keys - while (data.Position < initialOffset + length) - { - byte[] mediaKey = data.ReadBytes(0x10); - mediaKeys.Add(mediaKey); - } - - // Set the media keys - record.MediaKeyData = mediaKeys.ToArray(); - - return record; - } - - /// - /// Parse a Stream into a subset-difference index record - /// - /// Stream to parse - /// Filled subset-difference index record on success, null on error - private static SubsetDifferenceIndexRecord ParseSubsetDifferenceIndexRecord(Stream data, RecordType type, uint length) - { - // Verify we're calling the right parser - if (type != RecordType.SubsetDifferenceIndex) - return null; - - // TODO: Use marshalling here instead of building - var record = new SubsetDifferenceIndexRecord(); - - record.RecordType = type; - record.RecordLength = length; - - // Cache the current offset - long initialOffset = data.Position - 4; - - record.Span = data.ReadUInt32BigEndian(); - - // Create the offset list - var offsets = new List(); - - // Try to parse the offsets - while (data.Position < initialOffset + length) - { - uint offset = data.ReadUInt32BigEndian(); - offsets.Add(offset); - } - - // Set the offsets - record.Offsets = offsets.ToArray(); - - return record; - } - - /// - /// Parse a Stream into a type and version record - /// - /// Stream to parse - /// Filled type and version record on success, null on error - private static TypeAndVersionRecord ParseTypeAndVersionRecord(Stream data, RecordType type, uint length) - { - // Verify we're calling the right parser - if (type != RecordType.TypeAndVersion) - return null; - - // TODO: Use marshalling here instead of building - var record = new TypeAndVersionRecord(); - - record.RecordType = type; - record.RecordLength = length; - record.MediaKeyBlockType = (MediaKeyBlockType)data.ReadUInt32BigEndian(); - record.VersionNumber = data.ReadUInt32BigEndian(); - - return record; - } - - /// - /// Parse a Stream into a drive revocation list record - /// - /// Stream to parse - /// Filled drive revocation list record on success, null on error - private static DriveRevocationListRecord ParseDriveRevocationListRecord(Stream data, RecordType type, uint length) - { - // Verify we're calling the right parser - if (type != RecordType.DriveRevocationList) - return null; - - // TODO: Use marshalling here instead of building - var record = new DriveRevocationListRecord(); - - record.RecordType = type; - record.RecordLength = length; - - // Cache the current offset - long initialOffset = data.Position - 4; - - record.TotalNumberOfEntries = data.ReadUInt32BigEndian(); - - // Create the signature blocks list - var blocks = new List(); - - // Try to parse the signature blocks - int entryCount = 0; - while (entryCount < record.TotalNumberOfEntries && data.Position < initialOffset + length) - { - var block = new DriveRevocationSignatureBlock(); - - block.NumberOfEntries = data.ReadUInt32BigEndian(); - block.EntryFields = new DriveRevocationListEntry[block.NumberOfEntries]; - for (int i = 0; i < block.EntryFields.Length; i++) - { - var entry = new DriveRevocationListEntry(); - - entry.Range = data.ReadUInt16BigEndian(); - entry.DriveID = data.ReadBytes(6); - - block.EntryFields[i] = entry; - entryCount++; - } - - blocks.Add(block); - - // If we have an empty block - if (block.NumberOfEntries == 0) - break; - } - - // Set the signature blocks - record.SignatureBlocks = blocks.ToArray(); - - // If there's any data left, discard it - if (data.Position < initialOffset + length) - _ = data.ReadBytes((int)(initialOffset + length - data.Position)); - - return record; - } - - /// - /// Parse a Stream into a host revocation list record - /// - /// Stream to parse - /// Filled host revocation list record on success, null on error - private static HostRevocationListRecord ParseHostRevocationListRecord(Stream data, RecordType type, uint length) - { - // Verify we're calling the right parser - if (type != RecordType.HostRevocationList) - return null; - - // TODO: Use marshalling here instead of building - var record = new HostRevocationListRecord(); - - record.RecordType = type; - record.RecordLength = length; - - // Cache the current offset - long initialOffset = data.Position - 4; - - record.TotalNumberOfEntries = data.ReadUInt32BigEndian(); - - // Create the signature blocks list - var blocks = new List(); - - // Try to parse the signature blocks - int entryCount = 0; - while (entryCount < record.TotalNumberOfEntries && data.Position < initialOffset + length) - { - var block = new HostRevocationSignatureBlock(); - - block.NumberOfEntries = data.ReadUInt32BigEndian(); - block.EntryFields = new HostRevocationListEntry[block.NumberOfEntries]; - for (int i = 0; i < block.EntryFields.Length; i++) - { - var entry = new HostRevocationListEntry(); - - entry.Range = data.ReadUInt16BigEndian(); - entry.HostID = data.ReadBytes(6); - - block.EntryFields[i] = entry; - entryCount++; - } - - blocks.Add(block); - - // If we have an empty block - if (block.NumberOfEntries == 0) - break; - } - - // Set the signature blocks - record.SignatureBlocks = blocks.ToArray(); - - // If there's any data left, discard it - if (data.Position < initialOffset + length) - _ = data.ReadBytes((int)(initialOffset + length - data.Position)); - - return record; - } - - /// - /// Parse a Stream into a verify media key record - /// - /// Stream to parse - /// Filled verify media key record on success, null on error - private static VerifyMediaKeyRecord ParseVerifyMediaKeyRecord(Stream data, RecordType type, uint length) - { - // Verify we're calling the right parser - if (type != RecordType.VerifyMediaKey) - return null; - - // TODO: Use marshalling here instead of building - var record = new VerifyMediaKeyRecord(); - - record.RecordType = type; - record.RecordLength = length; - record.CiphertextValue = data.ReadBytes(0x10); - - return record; - } - - /// - /// Parse a Stream into a copyright record - /// - /// Stream to parse - /// Filled copyright record on success, null on error - private static CopyrightRecord ParseCopyrightRecord(Stream data, RecordType type, uint length) - { - // Verify we're calling the right parser - if (type != RecordType.Copyright) - return null; - - // TODO: Use marshalling here instead of building - var record = new CopyrightRecord(); - - record.RecordType = type; - record.RecordLength = length; - if (length > 4) - { - byte[] copyright = data.ReadBytes((int)(length - 4)); - record.Copyright = Encoding.ASCII.GetString(copyright).TrimEnd('\0'); - } - - return record; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/BDPlus.cs b/BinaryObjectScanner.Builders/BDPlus.cs deleted file mode 100644 index f5ef3799..00000000 --- a/BinaryObjectScanner.Builders/BDPlus.cs +++ /dev/null @@ -1,95 +0,0 @@ -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.BDPlus; -using static SabreTools.Models.BDPlus.Constants; - -namespace BinaryObjectScanner.Builders -{ - public class BDPlus - { - #region Byte Data - - /// - /// Parse a byte array into a BD+ SVM - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled BD+ SVM on success, null on error - public static SVM ParseSVM(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseSVM(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into an BD+ SVM - /// - /// Stream to parse - /// Filled BD+ SVM on success, null on error - public static SVM ParseSVM(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Try to parse the SVM - return ParseSVMData(data); - } - - /// - /// Parse a Stream into an SVM - /// - /// Stream to parse - /// Filled SVM on success, null on error - private static SVM ParseSVMData(Stream data) - { - // TODO: Use marshalling here instead of building - var svm = new SVM(); - - byte[] signature = data.ReadBytes(8); - svm.Signature = Encoding.ASCII.GetString(signature); - if (svm.Signature != SignatureString) - return null; - - svm.Unknown1 = data.ReadBytes(5); - svm.Year = data.ReadUInt16BigEndian(); - svm.Month = data.ReadByteValue(); - if (svm.Month < 1 || svm.Month > 12) - return null; - - svm.Day = data.ReadByteValue(); - if (svm.Day < 1 || svm.Day > 31) - return null; - - svm.Unknown2 = data.ReadBytes(4); - svm.Length = data.ReadUInt32(); - // if (svm.Length > 0) - // svm.Data = data.ReadBytes((int)svm.Length); - - return svm; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/BFPK.cs b/BinaryObjectScanner.Builders/BFPK.cs deleted file mode 100644 index 5ca1563f..00000000 --- a/BinaryObjectScanner.Builders/BFPK.cs +++ /dev/null @@ -1,150 +0,0 @@ -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.BFPK; -using static SabreTools.Models.BFPK.Constants; - -namespace BinaryObjectScanner.Builders -{ - public class BFPK - { - #region Byte Data - - /// - /// Parse a byte array into a BFPK archive - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled archive on success, null on error - public static Archive ParseArchive(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseArchive(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a BFPK archive - /// - /// Stream to parse - /// Filled archive on success, null on error - public static Archive ParseArchive(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new archive to fill - var archive = new Archive(); - - #region Header - - // Try to parse the header - var header = ParseHeader(data); - if (header == null) - return null; - - // Set the archive header - archive.Header = header; - - #endregion - - #region Files - - // If we have any files - if (header.Files > 0) - { - var files = new FileEntry[header.Files]; - - // Read all entries in turn - for (int i = 0; i < header.Files; i++) - { - var file = ParseFileEntry(data); - if (file == null) - return null; - - files[i] = file; - } - - // Set the files - archive.Files = files; - } - - #endregion - - return archive; - } - - /// - /// Parse a Stream into a header - /// - /// Stream to parse - /// Filled header on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - Header header = new Header(); - - byte[] magic = data.ReadBytes(4); - header.Magic = Encoding.ASCII.GetString(magic); - if (header.Magic != SignatureString) - return null; - - header.Version = data.ReadInt32(); - header.Files = data.ReadInt32(); - - return header; - } - - /// - /// Parse a Stream into a file entry - /// - /// Stream to parse - /// Filled file entry on success, null on error - private static FileEntry ParseFileEntry(Stream data) - { - // TODO: Use marshalling here instead of building - FileEntry fileEntry = new FileEntry(); - - fileEntry.NameSize = data.ReadInt32(); - if (fileEntry.NameSize > 0) - { - byte[] name = data.ReadBytes(fileEntry.NameSize); - fileEntry.Name = Encoding.ASCII.GetString(name); - } - - fileEntry.UncompressedSize = data.ReadInt32(); - fileEntry.Offset = data.ReadInt32(); - if (fileEntry.Offset > 0) - { - long currentOffset = data.Position; - data.Seek(fileEntry.Offset, SeekOrigin.Begin); - fileEntry.CompressedSize = data.ReadInt32(); - data.Seek(currentOffset, SeekOrigin.Begin); - } - - return fileEntry; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/BSP.cs b/BinaryObjectScanner.Builders/BSP.cs deleted file mode 100644 index f9982db3..00000000 --- a/BinaryObjectScanner.Builders/BSP.cs +++ /dev/null @@ -1,250 +0,0 @@ -using System.IO; -using System.Linq; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.BSP; -using static SabreTools.Models.BSP.Constants; - -namespace BinaryObjectScanner.Builders -{ - public static class BSP - { - #region Byte Data - - /// - /// Parse a byte array into a Half-Life Level - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled Half-Life Level on success, null on error - public static SabreTools.Models.BSP.File ParseFile(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseFile(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Half-Life Level - /// - /// Stream to parse - /// Filled Half-Life Level on success, null on error - public static SabreTools.Models.BSP.File ParseFile(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new Half-Life Level to fill - var file = new SabreTools.Models.BSP.File(); - - #region Header - - // Try to parse the header - var header = ParseHeader(data); - if (header == null) - return null; - - // Set the level header - file.Header = header; - - #endregion - - #region Lumps - - // Create the lump array - file.Lumps = new Lump[HL_BSP_LUMP_COUNT]; - - // Try to parse the lumps - for (int i = 0; i < HL_BSP_LUMP_COUNT; i++) - { - var lump = ParseLump(data); - file.Lumps[i] = lump; - } - - #endregion - - #region Texture header - - // Try to get the texture header lump - var textureDataLump = file.Lumps[HL_BSP_LUMP_TEXTUREDATA]; - if (textureDataLump.Offset == 0 || textureDataLump.Length == 0) - return null; - - // Seek to the texture header - data.Seek(textureDataLump.Offset, SeekOrigin.Begin); - - // Try to parse the texture header - var textureHeader = ParseTextureHeader(data); - if (textureHeader == null) - return null; - - // Set the texture header - file.TextureHeader = textureHeader; - - #endregion - - #region Textures - - // Create the texture array - file.Textures = new Texture[textureHeader.TextureCount]; - - // Try to parse the textures - for (int i = 0; i < textureHeader.TextureCount; i++) - { - // Get the texture offset - int offset = (int)(textureHeader.Offsets[i] + file.Lumps[HL_BSP_LUMP_TEXTUREDATA].Offset); - if (offset < 0 || offset >= data.Length) - continue; - - // Seek to the texture - data.Seek(offset, SeekOrigin.Begin); - - var texture = ParseTexture(data); - file.Textures[i] = texture; - } - - #endregion - - return file; - } - - /// - /// Parse a Stream into a Half-Life Level header - /// - /// Stream to parse - /// Filled Half-Life Level header on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - Header header = new Header(); - - // Only recognized versions are 29 and 30 - header.Version = data.ReadUInt32(); - if (header.Version != 29 && header.Version != 30) - return null; - - return header; - } - - /// - /// Parse a Stream into a lump - /// - /// Stream to parse - /// Filled lump on success, null on error - private static Lump ParseLump(Stream data) - { - // TODO: Use marshalling here instead of building - Lump lump = new Lump(); - - lump.Offset = data.ReadUInt32(); - lump.Length = data.ReadUInt32(); - - return lump; - } - - /// - /// Parse a Stream into a Half-Life Level texture header - /// - /// Stream to parse - /// Filled Half-Life Level texture header on success, null on error - private static TextureHeader ParseTextureHeader(Stream data) - { - // TODO: Use marshalling here instead of building - TextureHeader textureHeader = new TextureHeader(); - - textureHeader.TextureCount = data.ReadUInt32(); - - var offsets = new uint[textureHeader.TextureCount]; - - for (int i = 0; i < textureHeader.TextureCount; i++) - { - offsets[i] = data.ReadUInt32(); - } - - textureHeader.Offsets = offsets; - - return textureHeader; - } - - /// - /// Parse a Stream into a texture - /// - /// Stream to parse - /// Mipmap level - /// Filled texture on success, null on error - private static Texture ParseTexture(Stream data, uint mipmap = 0) - { - // TODO: Use marshalling here instead of building - Texture texture = new Texture(); - - byte[] name = data.ReadBytes(16).TakeWhile(c => c != '\0').ToArray(); - texture.Name = Encoding.ASCII.GetString(name); - texture.Width = data.ReadUInt32(); - texture.Height = data.ReadUInt32(); - texture.Offsets = new uint[4]; - for (int i = 0; i < 4; i++) - { - texture.Offsets[i] = data.ReadUInt32(); - } - - // Get the size of the pixel data - uint pixelSize = 0; - for (int i = 0; i < HL_BSP_MIPMAP_COUNT; i++) - { - if (texture.Offsets[i] != 0) - { - pixelSize += (texture.Width >> i) * (texture.Height >> i); - } - } - - // If we have no pixel data - if (pixelSize == 0) - return texture; - - texture.TextureData = data.ReadBytes((int)pixelSize); - texture.PaletteSize = data.ReadUInt16(); - texture.PaletteData = data.ReadBytes((int)(texture.PaletteSize * 3)); - - // Adjust the dimensions based on mipmap level - switch (mipmap) - { - case 1: - texture.Width /= 2; - texture.Height /= 2; - break; - case 2: - texture.Width /= 4; - texture.Height /= 4; - break; - case 3: - texture.Width /= 8; - texture.Height /= 8; - break; - } - - return texture; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/BinaryObjectScanner.Builders.csproj b/BinaryObjectScanner.Builders/BinaryObjectScanner.Builders.csproj deleted file mode 100644 index 33ca0ff6..00000000 --- a/BinaryObjectScanner.Builders/BinaryObjectScanner.Builders.csproj +++ /dev/null @@ -1,32 +0,0 @@ - - - - net48;net6.0;net7.0 - win-x86;win-x64;linux-x64;osx-x64 - BinaryObjectScanner.Builders - BinaryObjectScanner.Builders - Matt Nadareski - BurnOutSharp - Copyright (c)2022 Matt Nadareski - https://github.com/mnadareski/BurnOutSharp - 2.8 - 2.8 - 2.8 - true - true - - - - true - - - - - - - - - - - - diff --git a/BinaryObjectScanner.Builders/CFB.cs b/BinaryObjectScanner.Builders/CFB.cs deleted file mode 100644 index 1683c143..00000000 --- a/BinaryObjectScanner.Builders/CFB.cs +++ /dev/null @@ -1,419 +0,0 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.CFB; -using static SabreTools.Models.CFB.Constants; - -namespace BinaryObjectScanner.Builders -{ - public class CFB - { - #region Byte Data - - /// - /// Parse a byte array into a Compound File Binary - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled Compound File Binary on success, null on error - public static Binary ParseBinary(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseBinary(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Compound File Binary - /// - /// Stream to parse - /// Filled Compound File Binary on success, null on error - public static Binary ParseBinary(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new binary to fill - var binary = new Binary(); - - #region Header - - // Try to parse the file header - var fileHeader = ParseFileHeader(data); - if (fileHeader == null) - return null; - - // Set the file header - binary.Header = fileHeader; - - #endregion - - #region DIFAT Sector Numbers - - // Create a DIFAT sector table -#if NET48 - var difatSectors = new List(); -#else - var difatSectors = new List(); -#endif - - // Add the sectors from the header - difatSectors.AddRange(fileHeader.DIFAT); - - // Loop through and add the DIFAT sectors -#if NET48 - var currentSector = (SectorNumber)fileHeader.FirstDIFATSectorLocation; -#else - var currentSector = (SectorNumber?)fileHeader.FirstDIFATSectorLocation; -#endif - for (int i = 0; i < fileHeader.NumberOfDIFATSectors; i++) - { - // If we have a readable sector - if (currentSector <= SectorNumber.MAXREGSECT) - { - // Get the new next sector information - long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift)); - if (sectorOffset < 0 || sectorOffset >= data.Length) - return null; - - // Seek to the next sector - data.Seek(sectorOffset, SeekOrigin.Begin); - - // Try to parse the sectors - var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift); - if (sectorNumbers == null) - return null; - - // Add the sector shifts - difatSectors.AddRange(sectorNumbers); - } - - // Get the next sector from the DIFAT - currentSector = difatSectors[i]; - } - - // Assign the DIFAT sectors table - binary.DIFATSectorNumbers = difatSectors.ToArray(); - - #endregion - - #region FAT Sector Numbers - - // Create a FAT sector table -#if NET48 - var fatSectors = new List(); -#else - var fatSectors = new List(); -#endif - - // Loop through and add the FAT sectors - currentSector = binary.DIFATSectorNumbers[0]; - for (int i = 0; i < fileHeader.NumberOfFATSectors; i++) - { - // If we have a readable sector - if (currentSector <= SectorNumber.MAXREGSECT) - { - // Get the new next sector information - long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift)); - if (sectorOffset < 0 || sectorOffset >= data.Length) - return null; - - // Seek to the next sector - data.Seek(sectorOffset, SeekOrigin.Begin); - - // Try to parse the sectors - var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift); - if (sectorNumbers == null) - return null; - - // Add the sector shifts - fatSectors.AddRange(sectorNumbers); - } - - // Get the next sector from the DIFAT - currentSector = binary.DIFATSectorNumbers[i]; - } - - // Assign the FAT sectors table - binary.FATSectorNumbers = fatSectors.ToArray(); - - #endregion - - #region Mini FAT Sector Numbers - - // Create a mini FAT sector table -#if NET48 - var miniFatSectors = new List(); -#else - var miniFatSectors = new List(); -#endif - - // Loop through and add the mini FAT sectors - currentSector = (SectorNumber)fileHeader.FirstMiniFATSectorLocation; - for (int i = 0; i < fileHeader.NumberOfMiniFATSectors; i++) - { - // If we have a readable sector - if (currentSector <= SectorNumber.MAXREGSECT) - { - // Get the new next sector information - long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift)); - if (sectorOffset < 0 || sectorOffset >= data.Length) - return null; - - // Seek to the next sector - data.Seek(sectorOffset, SeekOrigin.Begin); - - // Try to parse the sectors - var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift); - if (sectorNumbers == null) - return null; - - // Add the sector shifts - miniFatSectors.AddRange(sectorNumbers); - } - - // Get the next sector from the DIFAT - currentSector = binary.DIFATSectorNumbers[i]; - } - - // Assign the mini FAT sectors table - binary.MiniFATSectorNumbers = miniFatSectors.ToArray(); - - #endregion - - #region Directory Entries - - // Get the offset of the first directory sector - long firstDirectoryOffset = (long)(fileHeader.FirstDirectorySectorLocation * Math.Pow(2, fileHeader.SectorShift)); - if (firstDirectoryOffset < 0 || firstDirectoryOffset >= data.Length) - return null; - - // Seek to the first directory sector - data.Seek(firstDirectoryOffset, SeekOrigin.Begin); - - // Create a directory sector table - var directorySectors = new List(); - - // Get the number of directory sectors - uint directorySectorCount = 0; - switch (fileHeader.MajorVersion) - { - case 3: - directorySectorCount = int.MaxValue; - break; - case 4: - directorySectorCount = fileHeader.NumberOfDirectorySectors; - break; - } - - // Loop through and add the directory sectors - currentSector = (SectorNumber)fileHeader.FirstDirectorySectorLocation; - for (int i = 0; i < directorySectorCount; i++) - { - // If we have an end of chain - if (currentSector == SectorNumber.ENDOFCHAIN) - break; - - // If we have a readable sector - if (currentSector <= SectorNumber.MAXREGSECT) - { - // Get the new next sector information - long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift)); - if (sectorOffset < 0 || sectorOffset >= data.Length) - return null; - - // Seek to the next sector - data.Seek(sectorOffset, SeekOrigin.Begin); - - // Try to parse the sectors - var directoryEntries = ParseDirectoryEntries(data, fileHeader.SectorShift, fileHeader.MajorVersion); - if (directoryEntries == null) - return null; - - // Add the sector shifts - directorySectors.AddRange(directoryEntries); - } - - // Get the next sector from the DIFAT - currentSector = binary.DIFATSectorNumbers[i]; - } - - // Assign the Directory sectors table - binary.DirectoryEntries = directorySectors.ToArray(); - - #endregion - - return binary; - } - - /// - /// Parse a Stream into a file header - /// - /// Stream to parse - /// Filled file header on success, null on error - private static FileHeader ParseFileHeader(Stream data) - { - // TODO: Use marshalling here instead of building - FileHeader header = new FileHeader(); - - header.Signature = data.ReadUInt64(); - if (header.Signature != SignatureUInt64) - return null; - - header.CLSID = data.ReadGuid(); - header.MinorVersion = data.ReadUInt16(); - header.MajorVersion = data.ReadUInt16(); - header.ByteOrder = data.ReadUInt16(); - if (header.ByteOrder != 0xFFFE) - return null; - - header.SectorShift = data.ReadUInt16(); - if (header.MajorVersion == 3 && header.SectorShift != 0x0009) - return null; - else if (header.MajorVersion == 4 && header.SectorShift != 0x000C) - return null; - - header.MiniSectorShift = data.ReadUInt16(); - header.Reserved = data.ReadBytes(6); - header.NumberOfDirectorySectors = data.ReadUInt32(); - if (header.MajorVersion == 3 && header.NumberOfDirectorySectors != 0) - return null; - - header.NumberOfFATSectors = data.ReadUInt32(); - header.FirstDirectorySectorLocation = data.ReadUInt32(); - header.TransactionSignatureNumber = data.ReadUInt32(); - header.MiniStreamCutoffSize = data.ReadUInt32(); - if (header.MiniStreamCutoffSize != 0x00001000) - return null; - - header.FirstMiniFATSectorLocation = data.ReadUInt32(); - header.NumberOfMiniFATSectors = data.ReadUInt32(); - header.FirstDIFATSectorLocation = data.ReadUInt32(); - header.NumberOfDIFATSectors = data.ReadUInt32(); -#if NET48 - header.DIFAT = new SectorNumber[109]; -#else - header.DIFAT = new SectorNumber?[109]; -#endif - for (int i = 0; i < header.DIFAT.Length; i++) - { - header.DIFAT[i] = (SectorNumber)data.ReadUInt32(); - } - - // Skip rest of sector for version 4 - if (header.MajorVersion == 4) - _ = data.ReadBytes(3584); - - return header; - } - - /// - /// Parse a Stream into a sector full of sector numbers - /// - /// Stream to parse - /// Sector shift from the header - /// Filled sector full of sector numbers on success, null on error -#if NET48 - private static SectorNumber[] ParseSectorNumbers(Stream data, ushort sectorShift) -#else - private static SectorNumber?[] ParseSectorNumbers(Stream data, ushort sectorShift) -#endif - { - // TODO: Use marshalling here instead of building - int sectorCount = (int)(Math.Pow(2, sectorShift) / sizeof(uint)); -#if NET48 - var sectorNumbers = new SectorNumber[sectorCount]; -#else - var sectorNumbers = new SectorNumber?[sectorCount]; -#endif - - for (int i = 0; i < sectorNumbers.Length; i++) - { - sectorNumbers[i] = (SectorNumber)data.ReadUInt32(); - } - - return sectorNumbers; - } - - /// - /// Parse a Stream into a sector full of directory entries - /// - /// Stream to parse - /// Sector shift from the header - /// Major version from the header - /// Filled sector full of directory entries on success, null on error - private static DirectoryEntry[] ParseDirectoryEntries(Stream data, ushort sectorShift, ushort majorVersion) - { - // TODO: Use marshalling here instead of building - const int directoryEntrySize = 64 + 2 + 1 + 1 + 4 + 4 + 4 + 16 + 4 + 8 + 8 + 4 + 8; - int sectorCount = (int)(Math.Pow(2, sectorShift) / directoryEntrySize); - DirectoryEntry[] directoryEntries = new DirectoryEntry[sectorCount]; - - for (int i = 0; i < directoryEntries.Length; i++) - { - var directoryEntry = ParseDirectoryEntry(data, majorVersion); - if (directoryEntry == null) - return null; - - directoryEntries[i] = directoryEntry; - } - - return directoryEntries; - } - - /// - /// Parse a Stream into a directory entry - /// - /// Stream to parse - /// Major version from the header - /// Filled directory entry on success, null on error - private static DirectoryEntry ParseDirectoryEntry(Stream data, ushort majorVersion) - { - // TODO: Use marshalling here instead of building - DirectoryEntry directoryEntry = new DirectoryEntry(); - - byte[] name = data.ReadBytes(64); - directoryEntry.Name = Encoding.Unicode.GetString(name).TrimEnd('\0'); - directoryEntry.NameLength = data.ReadUInt16(); - directoryEntry.ObjectType = (ObjectType)data.ReadByteValue(); - directoryEntry.ColorFlag = (ColorFlag)data.ReadByteValue(); - directoryEntry.LeftSiblingID = (StreamID)data.ReadUInt32(); - directoryEntry.RightSiblingID = (StreamID)data.ReadUInt32(); - directoryEntry.ChildID = (StreamID)data.ReadUInt32(); - directoryEntry.CLSID = data.ReadGuid(); - directoryEntry.StateBits = data.ReadUInt32(); - directoryEntry.CreationTime = data.ReadUInt64(); - directoryEntry.ModifiedTime = data.ReadUInt64(); - directoryEntry.StartingSectorLocation = data.ReadUInt32(); - directoryEntry.StreamSize = data.ReadUInt64(); - if (majorVersion == 3) - directoryEntry.StreamSize &= 0x0000FFFF; - - return directoryEntry; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/Extensions.cs b/BinaryObjectScanner.Builders/Extensions.cs deleted file mode 100644 index acaff1bb..00000000 --- a/BinaryObjectScanner.Builders/Extensions.cs +++ /dev/null @@ -1,1435 +0,0 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; -using System.Xml.Serialization; -using SabreTools.IO; - -namespace BinaryObjectScanner.Builders -{ - public static class Extensions - { - #region New Executable - - /// - /// Determine if a resource type information entry is an integer or offset - /// - /// Resource type information entry to check - /// True if the entry is an integer type, false if an offset, null on error - public static bool? IsIntegerType(this SabreTools.Models.NewExecutable.ResourceTypeInformationEntry entry) - { - // We can't do anything with an invalid entry - if (entry == null) - return null; - - // If the highest order bit is set, it's an integer type - return (entry.TypeID & 0x8000) != 0; - } - - /// - /// Determine if a resource type resource entry is an integer or offset - /// - /// Resource type resource entry to check - /// True if the entry is an integer type, false if an offset, null on error - public static bool? IsIntegerType(this SabreTools.Models.NewExecutable.ResourceTypeResourceEntry entry) - { - // We can't do anything with an invalid entry - if (entry == null) - return null; - - // If the highest order bit is set, it's an integer type - return (entry.ResourceID & 0x8000) != 0; - } - - /// - /// Get the segment entry type for an entry table bundle - /// - /// Entry table bundle to check - /// SegmentEntryType corresponding to the type - public static SabreTools.Models.NewExecutable.SegmentEntryType GetEntryType(this SabreTools.Models.NewExecutable.EntryTableBundle entry) - { - // We can't do anything with an invalid entry - if (entry == null) - return SabreTools.Models.NewExecutable.SegmentEntryType.Unused; - - // Determine the entry type based on segment indicator - if (entry.SegmentIndicator == 0x00) - return SabreTools.Models.NewExecutable.SegmentEntryType.Unused; - else if (entry.SegmentIndicator >= 0x01 && entry.SegmentIndicator <= 0xFE) - return SabreTools.Models.NewExecutable.SegmentEntryType.FixedSegment; - else if (entry.SegmentIndicator == 0xFF) - return SabreTools.Models.NewExecutable.SegmentEntryType.MoveableSegment; - - // We should never get here - return SabreTools.Models.NewExecutable.SegmentEntryType.Unused; - } - - #endregion - - #region Portable Executable - - /// - /// Convert a relative virtual address to a physical one - /// - /// Relative virtual address to convert - /// Array of sections to check against - /// Physical address, 0 on error - public static uint ConvertVirtualAddress(this uint rva, SabreTools.Models.PortableExecutable.SectionHeader[] sections) - { - // If we have an invalid section table, we can't do anything - if (sections == null || sections.Length == 0) - return 0; - - // If the RVA is 0, we just return 0 because it's invalid - if (rva == 0) - return 0; - - // If the RVA matches a section start exactly, use that - var matchingSection = sections.FirstOrDefault(s => s.VirtualAddress == rva); - if (matchingSection != null) - return rva - matchingSection.VirtualAddress + matchingSection.PointerToRawData; - - // Loop through all of the sections - for (int i = 0; i < sections.Length; i++) - { - // If the section is invalid, just skip it - if (sections[i] == null) - continue; - - // If the section "starts" at 0, just skip it - if (sections[i].PointerToRawData == 0) - continue; - - // Attempt to derive the physical address from the current section - var section = sections[i]; - if (rva >= section.VirtualAddress && section.VirtualSize != 0 && rva <= section.VirtualAddress + section.VirtualSize) - return rva - section.VirtualAddress + section.PointerToRawData; - else if (rva >= section.VirtualAddress && section.SizeOfRawData != 0 && rva <= section.VirtualAddress + section.SizeOfRawData) - return rva - section.VirtualAddress + section.PointerToRawData; - } - - return 0; - } - - /// - /// Find the section a revlative virtual address lives in - /// - /// Relative virtual address to convert - /// Array of sections to check against - /// Section index, null on error - public static int ContainingSectionIndex(this uint rva, SabreTools.Models.PortableExecutable.SectionHeader[] sections) - { - // If we have an invalid section table, we can't do anything - if (sections == null || sections.Length == 0) - return -1; - - // If the RVA is 0, we just return -1 because it's invalid - if (rva == 0) - return -1; - - // Loop through all of the sections - for (int i = 0; i < sections.Length; i++) - { - // If the section is invalid, just skip it - if (sections[i] == null) - continue; - - // If the section "starts" at 0, just skip it - if (sections[i].PointerToRawData == 0) - continue; - - // Attempt to derive the physical address from the current section - var section = sections[i]; - if (rva >= section.VirtualAddress && section.VirtualSize != 0 && rva <= section.VirtualAddress + section.VirtualSize) - return i; - else if (rva >= section.VirtualAddress && section.SizeOfRawData != 0 && rva <= section.VirtualAddress + section.SizeOfRawData) - return i; - } - - return -1; - } - - /// - /// Read overlay data as a SecuROM AddD overlay data - /// - /// Data to parse into overlay data - /// Offset into the byte array - /// A filled SecuROM AddD overlay data on success, null on error - public static SabreTools.Models.PortableExecutable.SecuROMAddD AsSecuROMAddD(this byte[] data, ref int offset) - { - // If we have data that's invalid, we can't do anything - if (data == null) - return null; - - // Read in the table - var addD = new SabreTools.Models.PortableExecutable.SecuROMAddD(); - - addD.Signature = data.ReadUInt32(ref offset); - if (addD.Signature != 0x44646441) - return null; - - int originalOffset = offset; - - addD.EntryCount = data.ReadUInt32(ref offset); - addD.Version = data.ReadString(ref offset, Encoding.ASCII); - if (string.IsNullOrWhiteSpace(addD.Version)) - offset = originalOffset + 0x10; - - addD.Build = data.ReadBytes(ref offset, 4).Select(b => (char)b).ToArray(); - - // Distinguish between v1 and v2 - int bytesToRead = 112; // v2 - if (string.IsNullOrWhiteSpace(addD.Version) - || addD.Version.StartsWith("3") - || addD.Version.StartsWith("4.47")) - { - bytesToRead = 44; - } - - addD.Unknown14h = data.ReadBytes(ref offset, bytesToRead); - - addD.Entries = new SabreTools.Models.PortableExecutable.SecuROMAddDEntry[addD.EntryCount]; - for (int i = 0; i < addD.EntryCount; i++) - { - var addDEntry = new SabreTools.Models.PortableExecutable.SecuROMAddDEntry(); - - addDEntry.PhysicalOffset = data.ReadUInt32(ref offset); - addDEntry.Length = data.ReadUInt32(ref offset); - addDEntry.Unknown08h = data.ReadUInt32(ref offset); - addDEntry.Unknown0Ch = data.ReadUInt32(ref offset); - addDEntry.Unknown10h = data.ReadUInt32(ref offset); - addDEntry.Unknown14h = data.ReadUInt32(ref offset); - addDEntry.Unknown18h = data.ReadUInt32(ref offset); - addDEntry.Unknown1Ch = data.ReadUInt32(ref offset); - addDEntry.FileName = data.ReadString(ref offset, Encoding.ASCII); - addDEntry.Unknown2Ch = data.ReadUInt32(ref offset); - - addD.Entries[i] = addDEntry; - } - - return addD; - } - - #region Debug - - /// - /// Read debug data as an NB10 Program Database - /// - /// Data to parse into a database - /// Offset into the byte array - /// A filled NB10 Program Database on success, null on error - public static SabreTools.Models.PortableExecutable.NB10ProgramDatabase AsNB10ProgramDatabase(this byte[] data, ref int offset) - { - // If we have data that's invalid, we can't do anything - if (data == null) - return null; - - var nb10ProgramDatabase = new SabreTools.Models.PortableExecutable.NB10ProgramDatabase(); - - nb10ProgramDatabase.Signature = data.ReadUInt32(ref offset); - if (nb10ProgramDatabase.Signature != 0x3031424E) - return null; - - nb10ProgramDatabase.Offset = data.ReadUInt32(ref offset); - nb10ProgramDatabase.Timestamp = data.ReadUInt32(ref offset); - nb10ProgramDatabase.Age = data.ReadUInt32(ref offset); - nb10ProgramDatabase.PdbFileName = data.ReadString(ref offset, Encoding.ASCII); // TODO: Actually null-terminated UTF-8? - - return nb10ProgramDatabase; - } - - /// - /// Read debug data as an RSDS Program Database - /// - /// Data to parse into a database - /// Offset into the byte array - /// A filled RSDS Program Database on success, null on error - public static SabreTools.Models.PortableExecutable.RSDSProgramDatabase AsRSDSProgramDatabase(this byte[] data, ref int offset) - { - // If we have data that's invalid, we can't do anything - if (data == null) - return null; - - var rsdsProgramDatabase = new SabreTools.Models.PortableExecutable.RSDSProgramDatabase(); - - rsdsProgramDatabase.Signature = data.ReadUInt32(ref offset); - if (rsdsProgramDatabase.Signature != 0x53445352) - return null; - - rsdsProgramDatabase.GUID = new Guid(data.ReadBytes(ref offset, 0x10)); - rsdsProgramDatabase.Age = data.ReadUInt32(ref offset); - rsdsProgramDatabase.PathAndFileName = data.ReadString(ref offset, Encoding.ASCII); // TODO: Actually null-terminated UTF-8 - - return rsdsProgramDatabase; - } - - #endregion - - // TODO: Implement other resource types from https://learn.microsoft.com/en-us/windows/win32/menurc/resource-file-formats - #region Resources - - /// - /// Read resource data as a resource header - /// - /// Data to parse into a resource header - /// Offset into the byte array - /// A filled resource header on success, null on error - public static SabreTools.Models.PortableExecutable.ResourceHeader AsResourceHeader(this byte[] data, ref int offset) - { - // If we have data that's invalid, we can't do anything - if (data == null) - return null; - - // Read in the table - var header = new SabreTools.Models.PortableExecutable.ResourceHeader(); - header.DataSize = data.ReadUInt32(ref offset); - header.HeaderSize = data.ReadUInt32(ref offset); - header.ResourceType = (SabreTools.Models.PortableExecutable.ResourceType)data.ReadUInt32(ref offset); // TODO: Could be a string too - header.Name = data.ReadUInt32(ref offset); // TODO: Could be a string too - header.DataVersion = data.ReadUInt32(ref offset); - header.MemoryFlags = (SabreTools.Models.PortableExecutable.MemoryFlags)data.ReadUInt16(ref offset); - header.LanguageId = data.ReadUInt16(ref offset); - header.Version = data.ReadUInt32(ref offset); - header.Characteristics = data.ReadUInt32(ref offset); - - return header; - } - - /// - /// Read resource data as an accelerator table resource - /// - /// Resource data entry to parse into an accelerator table resource - /// A filled accelerator table resource on success, null on error - public static SabreTools.Models.PortableExecutable.AcceleratorTableEntry[] AsAcceleratorTableResource(this SabreTools.Models.PortableExecutable.ResourceDataEntry entry) - { - // If we have data that's invalid for this resource type, we can't do anything - if (entry?.Data == null || entry.Data.Length % 8 != 0) - return null; - - // Get the number of entries - int count = entry.Data.Length / 8; - - // Initialize the iterator - int offset = 0; - - // Create the output object - var table = new SabreTools.Models.PortableExecutable.AcceleratorTableEntry[count]; - - // Read in the table - for (int i = 0; i < count; i++) - { - var acceleratorTableEntry = new SabreTools.Models.PortableExecutable.AcceleratorTableEntry(); - - acceleratorTableEntry.Flags = (SabreTools.Models.PortableExecutable.AcceleratorTableFlags)entry.Data.ReadUInt16(ref offset); - acceleratorTableEntry.Ansi = entry.Data.ReadUInt16(ref offset); - acceleratorTableEntry.Id = entry.Data.ReadUInt16(ref offset); - acceleratorTableEntry.Padding = entry.Data.ReadUInt16(ref offset); - - table[i] = acceleratorTableEntry; - } - - return table; - } - - /// - /// Read resource data as a side-by-side assembly manifest - /// - /// Resource data entry to parse into a side-by-side assembly manifest - /// A filled side-by-side assembly manifest on success, null on error - public static SabreTools.Models.PortableExecutable.AssemblyManifest AsAssemblyManifest(this SabreTools.Models.PortableExecutable.ResourceDataEntry entry) - { - // If we have an invalid entry, just skip - if (entry?.Data == null) - return null; - - try - { - XmlSerializer serializer = new XmlSerializer(typeof(SabreTools.Models.PortableExecutable.AssemblyManifest)); - return serializer.Deserialize(new MemoryStream(entry.Data)) as SabreTools.Models.PortableExecutable.AssemblyManifest; - } - catch - { - return null; - } - } - - /// - /// Read resource data as a dialog box - /// - /// Resource data entry to parse into a dialog box - /// A filled dialog box on success, null on error - public static SabreTools.Models.PortableExecutable.DialogBoxResource AsDialogBox(this SabreTools.Models.PortableExecutable.ResourceDataEntry entry) - { - // If we have an invalid entry, just skip - if (entry?.Data == null) - return null; - - // Initialize the iterator - int offset = 0; - - // Create the output object - var dialogBoxResource = new SabreTools.Models.PortableExecutable.DialogBoxResource(); - - // Try to read the signature for an extended dialog box template - int signatureOffset = sizeof(ushort); - int possibleSignature = entry.Data.ReadUInt16(ref signatureOffset); - if (possibleSignature == 0xFFFF) - { - #region Extended dialog template - - var dialogTemplateExtended = new SabreTools.Models.PortableExecutable.DialogTemplateExtended(); - - dialogTemplateExtended.Version = entry.Data.ReadUInt16(ref offset); - dialogTemplateExtended.Signature = entry.Data.ReadUInt16(ref offset); - dialogTemplateExtended.HelpID = entry.Data.ReadUInt32(ref offset); - dialogTemplateExtended.ExtendedStyle = (SabreTools.Models.PortableExecutable.ExtendedWindowStyles)entry.Data.ReadUInt32(ref offset); - dialogTemplateExtended.Style = (SabreTools.Models.PortableExecutable.WindowStyles)entry.Data.ReadUInt32(ref offset); - dialogTemplateExtended.DialogItems = entry.Data.ReadUInt16(ref offset); - dialogTemplateExtended.PositionX = entry.Data.ReadInt16(ref offset); - dialogTemplateExtended.PositionY = entry.Data.ReadInt16(ref offset); - dialogTemplateExtended.WidthX = entry.Data.ReadInt16(ref offset); - dialogTemplateExtended.HeightY = entry.Data.ReadInt16(ref offset); - - #region Menu resource - - int currentOffset = offset; - ushort menuResourceIdentifier = entry.Data.ReadUInt16(ref offset); - offset = currentOffset; - - // 0x0000 means no elements - if (menuResourceIdentifier == 0x0000) - { - // Increment the pointer if it was empty - offset += sizeof(ushort); - } - else - { - // Flag if there's an ordinal at the end - bool menuResourceHasOrdinal = menuResourceIdentifier == 0xFFFF; - if (menuResourceHasOrdinal) - offset += sizeof(ushort); - - // Read the menu resource as a string - dialogTemplateExtended.MenuResource = entry.Data.ReadString(ref offset, Encoding.Unicode); - - // Align to the WORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 2) != 0) - _ = entry.Data.ReadByte(ref offset); - } - - // Read the ordinal if we have the flag set - if (menuResourceHasOrdinal) - dialogTemplateExtended.MenuResourceOrdinal = entry.Data.ReadUInt16(ref offset); - } - - #endregion - - #region Class resource - - currentOffset = offset; - ushort classResourceIdentifier = entry.Data.ReadUInt16(ref offset); - offset = currentOffset; - - // 0x0000 means no elements - if (classResourceIdentifier == 0x0000) - { - // Increment the pointer if it was empty - offset += sizeof(ushort); - } - else - { - // Flag if there's an ordinal at the end - bool classResourcehasOrdinal = classResourceIdentifier == 0xFFFF; - if (classResourcehasOrdinal) - offset += sizeof(ushort); - - // Read the class resource as a string - dialogTemplateExtended.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode); - - // Align to the WORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 2) != 0) - _ = entry.Data.ReadByte(ref offset); - } - - // Read the ordinal if we have the flag set - if (classResourcehasOrdinal) - dialogTemplateExtended.ClassResourceOrdinal = entry.Data.ReadUInt16(ref offset); - } - - #endregion - - #region Title resource - - currentOffset = offset; - ushort titleResourceIdentifier = entry.Data.ReadUInt16(ref offset); - offset = currentOffset; - - // 0x0000 means no elements - if (titleResourceIdentifier == 0x0000) - { - // Increment the pointer if it was empty - offset += sizeof(ushort); - } - else - { - // Read the title resource as a string - dialogTemplateExtended.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode); - - // Align to the WORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 2) != 0) - _ = entry.Data.ReadByte(ref offset); - } - } - - #endregion - - #region Point size and typeface - - // Only if DS_SETFONT is set are the values here used - if (dialogTemplateExtended.Style.HasFlag(SabreTools.Models.PortableExecutable.WindowStyles.DS_SETFONT)) - { - dialogTemplateExtended.PointSize = entry.Data.ReadUInt16(ref offset); - dialogTemplateExtended.Weight = entry.Data.ReadUInt16(ref offset); - dialogTemplateExtended.Italic = entry.Data.ReadByte(ref offset); - dialogTemplateExtended.CharSet = entry.Data.ReadByte(ref offset); - dialogTemplateExtended.Typeface = entry.Data.ReadString(ref offset, Encoding.Unicode); - } - - // Align to the DWORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 4) != 0) - _ = entry.Data.ReadByte(ref offset); - } - - #endregion - - dialogBoxResource.ExtendedDialogTemplate = dialogTemplateExtended; - - #endregion - - #region Extended dialog item templates - - var dialogItemExtendedTemplates = new List(); - - for (int i = 0; i < dialogTemplateExtended.DialogItems; i++) - { - var dialogItemTemplate = new SabreTools.Models.PortableExecutable.DialogItemTemplateExtended(); - - dialogItemTemplate.HelpID = entry.Data.ReadUInt32(ref offset); - dialogItemTemplate.ExtendedStyle = (SabreTools.Models.PortableExecutable.ExtendedWindowStyles)entry.Data.ReadUInt32(ref offset); - dialogItemTemplate.Style = (SabreTools.Models.PortableExecutable.WindowStyles)entry.Data.ReadUInt32(ref offset); - dialogItemTemplate.PositionX = entry.Data.ReadInt16(ref offset); - dialogItemTemplate.PositionY = entry.Data.ReadInt16(ref offset); - dialogItemTemplate.WidthX = entry.Data.ReadInt16(ref offset); - dialogItemTemplate.HeightY = entry.Data.ReadInt16(ref offset); - dialogItemTemplate.ID = entry.Data.ReadUInt32(ref offset); - - #region Class resource - - currentOffset = offset; - ushort itemClassResourceIdentifier = entry.Data.ReadUInt16(ref offset); - offset = currentOffset; - - // 0xFFFF means ordinal only - if (itemClassResourceIdentifier == 0xFFFF) - { - // Increment the pointer - _ = entry.Data.ReadUInt16(ref offset); - - // Read the ordinal - dialogItemTemplate.ClassResourceOrdinal = (SabreTools.Models.PortableExecutable.DialogItemTemplateOrdinal)entry.Data.ReadUInt16(ref offset); - } - else - { - // Flag if there's an ordinal at the end - bool classResourcehasOrdinal = itemClassResourceIdentifier == 0xFFFF; - if (classResourcehasOrdinal) - offset += sizeof(ushort); - - // Read the class resource as a string - dialogItemTemplate.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode); - - // Align to the WORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 2) != 0) - _ = entry.Data.ReadByte(ref offset); - } - } - - #endregion - - #region Title resource - - currentOffset = offset; - ushort itemTitleResourceIdentifier = entry.Data.ReadUInt16(ref offset); - offset = currentOffset; - - // 0xFFFF means ordinal only - if (itemTitleResourceIdentifier == 0xFFFF) - { - // Increment the pointer - _ = entry.Data.ReadUInt16(ref offset); - - // Read the ordinal - dialogItemTemplate.TitleResourceOrdinal = entry.Data.ReadUInt16(ref offset); - } - else - { - // Read the title resource as a string - dialogItemTemplate.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode); - - // Align to the WORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 2) != 0) - _ = entry.Data.ReadByte(ref offset); - } - } - - #endregion - - #region Creation data - - dialogItemTemplate.CreationDataSize = entry.Data.ReadUInt16(ref offset); - if (dialogItemTemplate.CreationDataSize != 0) - dialogItemTemplate.CreationData = entry.Data.ReadBytes(ref offset, dialogItemTemplate.CreationDataSize); - - #endregion - - // Align to the DWORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 4) != 0) - _ = entry.Data.ReadByte(ref offset); - } - - dialogItemExtendedTemplates.Add(dialogItemTemplate); - } - - dialogBoxResource.ExtendedDialogItemTemplates = dialogItemExtendedTemplates.ToArray(); - - #endregion - } - else - { - #region Dialog template - - var dialogTemplate = new SabreTools.Models.PortableExecutable.DialogTemplate(); - - dialogTemplate.Style = (SabreTools.Models.PortableExecutable.WindowStyles)entry.Data.ReadUInt32(ref offset); - dialogTemplate.ExtendedStyle = (SabreTools.Models.PortableExecutable.ExtendedWindowStyles)entry.Data.ReadUInt32(ref offset); - dialogTemplate.ItemCount = entry.Data.ReadUInt16(ref offset); - dialogTemplate.PositionX = entry.Data.ReadInt16(ref offset); - dialogTemplate.PositionY = entry.Data.ReadInt16(ref offset); - dialogTemplate.WidthX = entry.Data.ReadInt16(ref offset); - dialogTemplate.HeightY = entry.Data.ReadInt16(ref offset); - - #region Menu resource - - int currentOffset = offset; - ushort menuResourceIdentifier = entry.Data.ReadUInt16(ref offset); - offset = currentOffset; - - // 0x0000 means no elements - if (menuResourceIdentifier == 0x0000) - { - // Increment the pointer if it was empty - offset += sizeof(ushort); - } - else - { - // Flag if there's an ordinal at the end - bool menuResourceHasOrdinal = menuResourceIdentifier == 0xFFFF; - if (menuResourceHasOrdinal) - offset += sizeof(ushort); - - // Read the menu resource as a string - dialogTemplate.MenuResource = entry.Data.ReadString(ref offset, Encoding.Unicode); - - // Align to the WORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 2) != 0) - _ = entry.Data.ReadByte(ref offset); - } - - // Read the ordinal if we have the flag set - if (menuResourceHasOrdinal) - dialogTemplate.MenuResourceOrdinal = entry.Data.ReadUInt16(ref offset); - } - - #endregion - - #region Class resource - - currentOffset = offset; - ushort classResourceIdentifier = entry.Data.ReadUInt16(ref offset); - offset = currentOffset; - - // 0x0000 means no elements - if (classResourceIdentifier == 0x0000) - { - // Increment the pointer if it was empty - offset += sizeof(ushort); - } - else - { - // Flag if there's an ordinal at the end - bool classResourcehasOrdinal = classResourceIdentifier == 0xFFFF; - if (classResourcehasOrdinal) - offset += sizeof(ushort); - - // Read the class resource as a string - dialogTemplate.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode); - - // Align to the WORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 2) != 0) - _ = entry.Data.ReadByte(ref offset); - } - - // Read the ordinal if we have the flag set - if (classResourcehasOrdinal) - dialogTemplate.ClassResourceOrdinal = entry.Data.ReadUInt16(ref offset); - } - - #endregion - - #region Title resource - - currentOffset = offset; - ushort titleResourceIdentifier = entry.Data.ReadUInt16(ref offset); - offset = currentOffset; - - // 0x0000 means no elements - if (titleResourceIdentifier == 0x0000) - { - // Increment the pointer if it was empty - offset += sizeof(ushort); - } - else - { - // Read the title resource as a string - dialogTemplate.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode); - - // Align to the WORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 2) != 0) - _ = entry.Data.ReadByte(ref offset); - } - } - - #endregion - - #region Point size and typeface - - // Only if DS_SETFONT is set are the values here used - if (dialogTemplate.Style.HasFlag(SabreTools.Models.PortableExecutable.WindowStyles.DS_SETFONT)) - { - dialogTemplate.PointSizeValue = entry.Data.ReadUInt16(ref offset); - - // Read the font name as a string - dialogTemplate.Typeface = entry.Data.ReadString(ref offset, Encoding.Unicode); - } - - // Align to the DWORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 4) != 0) - _ = entry.Data.ReadByte(ref offset); - } - - #endregion - - dialogBoxResource.DialogTemplate = dialogTemplate; - - #endregion - - #region Dialog item templates - - var dialogItemTemplates = new List(); - - for (int i = 0; i < dialogTemplate.ItemCount; i++) - { - var dialogItemTemplate = new SabreTools.Models.PortableExecutable.DialogItemTemplate(); - - dialogItemTemplate.Style = (SabreTools.Models.PortableExecutable.WindowStyles)entry.Data.ReadUInt32(ref offset); - dialogItemTemplate.ExtendedStyle = (SabreTools.Models.PortableExecutable.ExtendedWindowStyles)entry.Data.ReadUInt32(ref offset); - dialogItemTemplate.PositionX = entry.Data.ReadInt16(ref offset); - dialogItemTemplate.PositionY = entry.Data.ReadInt16(ref offset); - dialogItemTemplate.WidthX = entry.Data.ReadInt16(ref offset); - dialogItemTemplate.HeightY = entry.Data.ReadInt16(ref offset); - dialogItemTemplate.ID = entry.Data.ReadUInt16(ref offset); - - #region Class resource - - currentOffset = offset; - ushort itemClassResourceIdentifier = entry.Data.ReadUInt16(ref offset); - offset = currentOffset; - - // 0xFFFF means ordinal only - if (itemClassResourceIdentifier == 0xFFFF) - { - // Increment the pointer - _ = entry.Data.ReadUInt16(ref offset); - - // Read the ordinal - dialogItemTemplate.ClassResourceOrdinal = (SabreTools.Models.PortableExecutable.DialogItemTemplateOrdinal)entry.Data.ReadUInt16(ref offset); - } - else - { - // Flag if there's an ordinal at the end - bool classResourcehasOrdinal = itemClassResourceIdentifier == 0xFFFF; - if (classResourcehasOrdinal) - offset += sizeof(ushort); - - // Read the class resource as a string - dialogItemTemplate.ClassResource = entry.Data.ReadString(ref offset, Encoding.Unicode); - - // Align to the WORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 2) != 0) - _ = entry.Data.ReadByte(ref offset); - } - } - - #endregion - - #region Title resource - - currentOffset = offset; - ushort itemTitleResourceIdentifier = entry.Data.ReadUInt16(ref offset); - offset = currentOffset; - - // 0xFFFF means ordinal only - if (itemTitleResourceIdentifier == 0xFFFF) - { - // Increment the pointer - _ = entry.Data.ReadUInt16(ref offset); - - // Read the ordinal - dialogItemTemplate.TitleResourceOrdinal = entry.Data.ReadUInt16(ref offset); - } - else - { - // Read the title resource as a string - dialogItemTemplate.TitleResource = entry.Data.ReadString(ref offset, Encoding.Unicode); - - // Align to the WORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 2) != 0) - _ = entry.Data.ReadByte(ref offset); - } - } - - #endregion - - #region Creation data - - dialogItemTemplate.CreationDataSize = entry.Data.ReadUInt16(ref offset); - if (dialogItemTemplate.CreationDataSize != 0) - dialogItemTemplate.CreationData = entry.Data.ReadBytes(ref offset, dialogItemTemplate.CreationDataSize); - - #endregion - - // Align to the DWORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 4) != 0) - _ = entry.Data.ReadByte(ref offset); - } - - dialogItemTemplates.Add(dialogItemTemplate); - } - - dialogBoxResource.DialogItemTemplates = dialogItemTemplates.ToArray(); - - #endregion - } - - return dialogBoxResource; - } - - /// - /// Read resource data as a font group - /// - /// Resource data entry to parse into a font group - /// A filled font group on success, null on error - public static SabreTools.Models.PortableExecutable.FontGroupHeader AsFontGroup(this SabreTools.Models.PortableExecutable.ResourceDataEntry entry) - { - // If we have an invalid entry, just skip - if (entry?.Data == null) - return null; - - // Initialize the iterator - int offset = 0; - - // Create the output object - var fontGroupHeader = new SabreTools.Models.PortableExecutable.FontGroupHeader(); - - fontGroupHeader.NumberOfFonts = entry.Data.ReadUInt16(ref offset); - if (fontGroupHeader.NumberOfFonts > 0) - { - fontGroupHeader.DE = new SabreTools.Models.PortableExecutable.DirEntry[fontGroupHeader.NumberOfFonts]; - for (int i = 0; i < fontGroupHeader.NumberOfFonts; i++) - { - var dirEntry = new SabreTools.Models.PortableExecutable.DirEntry(); - - dirEntry.FontOrdinal = entry.Data.ReadUInt16(ref offset); - - dirEntry.Entry = new SabreTools.Models.PortableExecutable.FontDirEntry(); - dirEntry.Entry.Version = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.Size = entry.Data.ReadUInt32(ref offset); - dirEntry.Entry.Copyright = entry.Data.ReadBytes(ref offset, 60); - dirEntry.Entry.Type = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.Points = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.VertRes = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.HorizRes = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.Ascent = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.InternalLeading = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.ExternalLeading = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.Italic = entry.Data.ReadByte(ref offset); - dirEntry.Entry.Underline = entry.Data.ReadByte(ref offset); - dirEntry.Entry.StrikeOut = entry.Data.ReadByte(ref offset); - dirEntry.Entry.Weight = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.CharSet = entry.Data.ReadByte(ref offset); - dirEntry.Entry.PixWidth = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.PixHeight = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.PitchAndFamily = entry.Data.ReadByte(ref offset); - dirEntry.Entry.AvgWidth = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.MaxWidth = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.FirstChar = entry.Data.ReadByte(ref offset); - dirEntry.Entry.LastChar = entry.Data.ReadByte(ref offset); - dirEntry.Entry.DefaultChar = entry.Data.ReadByte(ref offset); - dirEntry.Entry.BreakChar = entry.Data.ReadByte(ref offset); - dirEntry.Entry.WidthBytes = entry.Data.ReadUInt16(ref offset); - dirEntry.Entry.Device = entry.Data.ReadUInt32(ref offset); - dirEntry.Entry.Face = entry.Data.ReadUInt32(ref offset); - dirEntry.Entry.Reserved = entry.Data.ReadUInt32(ref offset); - - // TODO: Determine how to read these two? Immediately after? - dirEntry.Entry.DeviceName = entry.Data.ReadString(ref offset); - dirEntry.Entry.FaceName = entry.Data.ReadString(ref offset); - - fontGroupHeader.DE[i] = dirEntry; - } - } - - // TODO: Implement entry parsing - return fontGroupHeader; - } - - /// - /// Read resource data as a menu - /// - /// Resource data entry to parse into a menu - /// A filled menu on success, null on error - public static SabreTools.Models.PortableExecutable.MenuResource AsMenu(this SabreTools.Models.PortableExecutable.ResourceDataEntry entry) - { - // If we have an invalid entry, just skip - if (entry?.Data == null) - return null; - - // Initialize the iterator - int offset = 0; - - // Create the output object - var menuResource = new SabreTools.Models.PortableExecutable.MenuResource(); - - // Try to read the version for an extended header - int versionOffset = 0; - int possibleVersion = entry.Data.ReadUInt16(ref versionOffset); - if (possibleVersion == 0x0001) - { - #region Extended menu header - - var menuHeaderExtended = new SabreTools.Models.PortableExecutable.MenuHeaderExtended(); - - menuHeaderExtended.Version = entry.Data.ReadUInt16(ref offset); - menuHeaderExtended.Offset = entry.Data.ReadUInt16(ref offset); - menuHeaderExtended.HelpID = entry.Data.ReadUInt32(ref offset); - - menuResource.ExtendedMenuHeader = menuHeaderExtended; - - #endregion - - #region Extended dialog item templates - - var extendedMenuItems = new List(); - - if (offset != 0) - { - offset = menuHeaderExtended.Offset; - - while (offset < entry.Data.Length) - { - var extendedMenuItem = new SabreTools.Models.PortableExecutable.MenuItemExtended(); - - extendedMenuItem.ItemType = (SabreTools.Models.PortableExecutable.MenuFlags)entry.Data.ReadUInt32(ref offset); - extendedMenuItem.State = (SabreTools.Models.PortableExecutable.MenuFlags)entry.Data.ReadUInt32(ref offset); - extendedMenuItem.ID = entry.Data.ReadUInt32(ref offset); - extendedMenuItem.Flags = (SabreTools.Models.PortableExecutable.MenuFlags)entry.Data.ReadUInt32(ref offset); - extendedMenuItem.MenuText = entry.Data.ReadString(ref offset, Encoding.Unicode); - - // Align to the DWORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 4) != 0) - _ = entry.Data.ReadByte(ref offset); - } - - extendedMenuItems.Add(extendedMenuItem); - } - } - - menuResource.ExtendedMenuItems = extendedMenuItems.ToArray(); - - #endregion - } - else - { - #region Menu header - - var menuHeader = new SabreTools.Models.PortableExecutable.MenuHeader(); - - menuHeader.Version = entry.Data.ReadUInt16(ref offset); - menuHeader.HeaderSize = entry.Data.ReadUInt16(ref offset); - - menuResource.MenuHeader = menuHeader; - - #endregion - - #region Menu items - - var menuItems = new List(); - - while (offset < entry.Data.Length) - { - var menuItem = new SabreTools.Models.PortableExecutable.MenuItem(); - - // Determine if this is a popup - int flagsOffset = offset; - var initialFlags = (SabreTools.Models.PortableExecutable.MenuFlags)entry.Data.ReadUInt16(ref flagsOffset); - if (initialFlags.HasFlag(SabreTools.Models.PortableExecutable.MenuFlags.MF_POPUP)) - { - menuItem.PopupItemType = (SabreTools.Models.PortableExecutable.MenuFlags)entry.Data.ReadUInt32(ref offset); - menuItem.PopupState = (SabreTools.Models.PortableExecutable.MenuFlags)entry.Data.ReadUInt32(ref offset); - menuItem.PopupID = entry.Data.ReadUInt32(ref offset); - menuItem.PopupResInfo = (SabreTools.Models.PortableExecutable.MenuFlags)entry.Data.ReadUInt32(ref offset); - menuItem.PopupMenuText = entry.Data.ReadString(ref offset, Encoding.Unicode); - } - else - { - menuItem.NormalResInfo = (SabreTools.Models.PortableExecutable.MenuFlags)entry.Data.ReadUInt16(ref offset); - menuItem.NormalMenuText = entry.Data.ReadString(ref offset, Encoding.Unicode); - } - - // Align to the DWORD boundary if we're not at the end - if (offset != entry.Data.Length) - { - while ((offset % 4) != 0) - _ = entry.Data.ReadByte(ref offset); - } - - menuItems.Add(menuItem); - } - - menuResource.MenuItems = menuItems.ToArray(); - - #endregion - } - - return menuResource; - } - - /// - /// Read resource data as a message table resource - /// - /// Resource data entry to parse into a message table resource - /// A filled message table resource on success, null on error - public static SabreTools.Models.PortableExecutable.MessageResourceData AsMessageResourceData(this SabreTools.Models.PortableExecutable.ResourceDataEntry entry) - { - // If we have an invalid entry, just skip - if (entry?.Data == null) - return null; - - // Initialize the iterator - int offset = 0; - - // Create the output object - var messageResourceData = new SabreTools.Models.PortableExecutable.MessageResourceData(); - - // Message resource blocks - messageResourceData.NumberOfBlocks = entry.Data.ReadUInt32(ref offset); - if (messageResourceData.NumberOfBlocks > 0) - { - var messageResourceBlocks = new List(); - - for (int i = 0; i < messageResourceData.NumberOfBlocks; i++) - { - var messageResourceBlock = new SabreTools.Models.PortableExecutable.MessageResourceBlock(); - - messageResourceBlock.LowId = entry.Data.ReadUInt32(ref offset); - messageResourceBlock.HighId = entry.Data.ReadUInt32(ref offset); - messageResourceBlock.OffsetToEntries = entry.Data.ReadUInt32(ref offset); - - messageResourceBlocks.Add(messageResourceBlock); - } - - messageResourceData.Blocks = messageResourceBlocks.ToArray(); - } - - // Message resource entries - if (messageResourceData.Blocks != null && messageResourceData.Blocks.Length != 0) - { - var messageResourceEntries = new Dictionary(); - - for (int i = 0; i < messageResourceData.Blocks.Length; i++) - { - var messageResourceBlock = messageResourceData.Blocks[i]; - offset = (int)messageResourceBlock.OffsetToEntries; - - for (uint j = messageResourceBlock.LowId; j <= messageResourceBlock.HighId; j++) - { - var messageResourceEntry = new SabreTools.Models.PortableExecutable.MessageResourceEntry(); - - messageResourceEntry.Length = entry.Data.ReadUInt16(ref offset); - messageResourceEntry.Flags = entry.Data.ReadUInt16(ref offset); - - Encoding textEncoding = messageResourceEntry.Flags == 0x0001 ? Encoding.Unicode : Encoding.ASCII; - byte[] textArray = entry.Data.ReadBytes(ref offset, messageResourceEntry.Length - 4); - messageResourceEntry.Text = textEncoding.GetString(textArray); - - messageResourceEntries[j] = messageResourceEntry; - } - } - - messageResourceData.Entries = messageResourceEntries; - } - - return messageResourceData; - } - - /// - /// Read resource data as a string table resource - /// - /// Resource data entry to parse into a string table resource - /// A filled string table resource on success, null on error - public static Dictionary AsStringTable(this SabreTools.Models.PortableExecutable.ResourceDataEntry entry) - { - // If we have an invalid entry, just skip - if (entry?.Data == null) - return null; - - // Initialize the iterators - int offset = 0, stringIndex = 0; - - // Create the output table - var stringTable = new Dictionary(); - - // Loop through and add - while (offset < entry.Data.Length) - { - ushort stringLength = entry.Data.ReadUInt16(ref offset); - if (stringLength == 0) - { - stringTable[stringIndex++] = "[EMPTY]"; - } - else - { - if (stringLength * 2 > entry.Data.Length - offset) - { - Console.WriteLine($"{stringLength * 2} requested but {entry.Data.Length - offset} remains"); - stringLength = (ushort)((entry.Data.Length - offset) / 2); - } - - string stringValue = Encoding.Unicode.GetString(entry.Data, offset, stringLength * 2); - offset += stringLength * 2; - stringValue = stringValue.Replace("\n", "\\n").Replace("\r", newValue: "\\r"); - stringTable[stringIndex++] = stringValue; - } - } - - return stringTable; - } - - /// - /// Read resource data as a version info resource - /// - /// Resource data entry to parse into a version info resource - /// A filled version info resource on success, null on error - public static SabreTools.Models.PortableExecutable.VersionInfo AsVersionInfo(this SabreTools.Models.PortableExecutable.ResourceDataEntry entry) - { - // If we have an invalid entry, just skip - if (entry?.Data == null) - return null; - - // Initialize the iterator - int offset = 0; - - // Create the output object - var versionInfo = new SabreTools.Models.PortableExecutable.VersionInfo(); - - versionInfo.Length = entry.Data.ReadUInt16(ref offset); - versionInfo.ValueLength = entry.Data.ReadUInt16(ref offset); - versionInfo.ResourceType = (SabreTools.Models.PortableExecutable.VersionResourceType)entry.Data.ReadUInt16(ref offset); - versionInfo.Key = entry.Data.ReadString(ref offset, Encoding.Unicode); - if (versionInfo.Key != "VS_VERSION_INFO") - return null; - - while ((offset % 4) != 0) - versionInfo.Padding1 = entry.Data.ReadUInt16(ref offset); - - // Read fixed file info - if (versionInfo.ValueLength > 0) - { - var fixedFileInfo = new SabreTools.Models.PortableExecutable.FixedFileInfo(); - fixedFileInfo.Signature = entry.Data.ReadUInt32(ref offset); - if (fixedFileInfo.Signature != 0xFEEF04BD) - return null; - - fixedFileInfo.StrucVersion = entry.Data.ReadUInt32(ref offset); - fixedFileInfo.FileVersionMS = entry.Data.ReadUInt32(ref offset); - fixedFileInfo.FileVersionLS = entry.Data.ReadUInt32(ref offset); - fixedFileInfo.ProductVersionMS = entry.Data.ReadUInt32(ref offset); - fixedFileInfo.ProductVersionLS = entry.Data.ReadUInt32(ref offset); - fixedFileInfo.FileFlagsMask = entry.Data.ReadUInt32(ref offset); - fixedFileInfo.FileFlags = (SabreTools.Models.PortableExecutable.FixedFileInfoFlags)(entry.Data.ReadUInt32(ref offset) & fixedFileInfo.FileFlagsMask); - fixedFileInfo.FileOS = (SabreTools.Models.PortableExecutable.FixedFileInfoOS)entry.Data.ReadUInt32(ref offset); - fixedFileInfo.FileType = (SabreTools.Models.PortableExecutable.FixedFileInfoFileType)entry.Data.ReadUInt32(ref offset); - fixedFileInfo.FileSubtype = (SabreTools.Models.PortableExecutable.FixedFileInfoFileSubtype)entry.Data.ReadUInt32(ref offset); - fixedFileInfo.FileDateMS = entry.Data.ReadUInt32(ref offset); - fixedFileInfo.FileDateLS = entry.Data.ReadUInt32(ref offset); - versionInfo.Value = fixedFileInfo; - } - - while ((offset % 4) != 0) - versionInfo.Padding2 = entry.Data.ReadUInt16(ref offset); - - // TODO: Make the following block a private helper method - - // Determine if we have a StringFileInfo or VarFileInfo next - if (offset < versionInfo.Length) - { - // Cache the current offset for reading - int currentOffset = offset; - - offset += 6; - - string nextKey = entry.Data.ReadString(ref offset, Encoding.Unicode); - offset = currentOffset; - - if (nextKey == "StringFileInfo") - { - var stringFileInfo = AsStringFileInfo(entry.Data, ref offset); - versionInfo.StringFileInfo = stringFileInfo; - } - else if (nextKey == "VarFileInfo") - { - var varFileInfo = AsVarFileInfo(entry.Data, ref offset); - versionInfo.VarFileInfo = varFileInfo; - } - } - - // And again - if (offset < versionInfo.Length) - { - // Cache the current offset for reading - int currentOffset = offset; - - offset += 6; - string nextKey = entry.Data.ReadString(ref offset, Encoding.Unicode); - offset = currentOffset; - - if (nextKey == "StringFileInfo") - { - var stringFileInfo = AsStringFileInfo(entry.Data, ref offset); - versionInfo.StringFileInfo = stringFileInfo; - } - else if (nextKey == "VarFileInfo") - { - var varFileInfo = AsVarFileInfo(entry.Data, ref offset); - versionInfo.VarFileInfo = varFileInfo; - } - } - - return versionInfo; - } - - /// - /// Read byte data as a string file info resource - /// - /// Data to parse into a string file info - /// Offset into the byte array - /// A filled string file info resource on success, null on error - private static SabreTools.Models.PortableExecutable.StringFileInfo AsStringFileInfo(byte[] data, ref int offset) - { - var stringFileInfo = new SabreTools.Models.PortableExecutable.StringFileInfo(); - - // Cache the initial offset - int currentOffset = offset; - - stringFileInfo.Length = data.ReadUInt16(ref offset); - stringFileInfo.ValueLength = data.ReadUInt16(ref offset); - stringFileInfo.ResourceType = (SabreTools.Models.PortableExecutable.VersionResourceType)data.ReadUInt16(ref offset); - stringFileInfo.Key = data.ReadString(ref offset, Encoding.Unicode); - if (stringFileInfo.Key != "StringFileInfo") - { - offset -= 6 + ((stringFileInfo.Key.Length + 1) * 2); - return null; - } - - // Align to the DWORD boundary if we're not at the end - if (offset != data.Length) - { - while ((offset % 4) != 0) - stringFileInfo.Padding = data.ReadByte(ref offset); - } - - var stringFileInfoChildren = new List(); - while ((offset - currentOffset) < stringFileInfo.Length) - { - var stringTable = new SabreTools.Models.PortableExecutable.StringTable(); - - stringTable.Length = data.ReadUInt16(ref offset); - stringTable.ValueLength = data.ReadUInt16(ref offset); - stringTable.ResourceType = (SabreTools.Models.PortableExecutable.VersionResourceType)data.ReadUInt16(ref offset); - stringTable.Key = data.ReadString(ref offset, Encoding.Unicode); - - // Align to the DWORD boundary if we're not at the end - if (offset != data.Length) - { - while ((offset % 4) != 0) - stringTable.Padding = data.ReadByte(ref offset); - } - - var stringTableChildren = new List(); - while ((offset - currentOffset) < stringTable.Length) - { - var stringData = new SabreTools.Models.PortableExecutable.StringData(); - - stringData.Length = data.ReadUInt16(ref offset); - stringData.ValueLength = data.ReadUInt16(ref offset); - stringData.ResourceType = (SabreTools.Models.PortableExecutable.VersionResourceType)data.ReadUInt16(ref offset); - stringData.Key = data.ReadString(ref offset, Encoding.Unicode); - - // Align to the DWORD boundary if we're not at the end - if (offset != data.Length) - { - while ((offset % 4) != 0) - stringData.Padding = data.ReadByte(ref offset); - } - - if (stringData.ValueLength > 0) - { - byte[] valueBytes = data.ReadBytes(ref offset, stringData.ValueLength * sizeof(ushort)); - stringData.Value = Encoding.Unicode.GetString(valueBytes); - } - - // Align to the DWORD boundary if we're not at the end - if (offset != data.Length) - { - while ((offset % 4) != 0) - _ = data.ReadByte(ref offset); - } - - stringTableChildren.Add(stringData); - } - - stringTable.Children = stringTableChildren.ToArray(); - - stringFileInfoChildren.Add(stringTable); - } - - stringFileInfo.Children = stringFileInfoChildren.ToArray(); - - return stringFileInfo; - } - - /// - /// Read byte data as a var file info resource - /// - /// Data to parse into a var file info - /// Offset into the byte array - /// A filled var file info resource on success, null on error - private static SabreTools.Models.PortableExecutable.VarFileInfo AsVarFileInfo(byte[] data, ref int offset) - { - var varFileInfo = new SabreTools.Models.PortableExecutable.VarFileInfo(); - - // Cache the initial offset - int initialOffset = offset; - - varFileInfo.Length = data.ReadUInt16(ref offset); - varFileInfo.ValueLength = data.ReadUInt16(ref offset); - varFileInfo.ResourceType = (SabreTools.Models.PortableExecutable.VersionResourceType)data.ReadUInt16(ref offset); - varFileInfo.Key = data.ReadString(ref offset, Encoding.Unicode); - if (varFileInfo.Key != "VarFileInfo") - return null; - - // Align to the DWORD boundary if we're not at the end - if (offset != data.Length) - { - while ((offset % 4) != 0) - varFileInfo.Padding = data.ReadByte(ref offset); - } - - var varFileInfoChildren = new List(); - while ((offset - initialOffset) < varFileInfo.Length) - { - var varData = new SabreTools.Models.PortableExecutable.VarData(); - - varData.Length = data.ReadUInt16(ref offset); - varData.ValueLength = data.ReadUInt16(ref offset); - varData.ResourceType = (SabreTools.Models.PortableExecutable.VersionResourceType)data.ReadUInt16(ref offset); - varData.Key = data.ReadString(ref offset, Encoding.Unicode); - if (varData.Key != "Translation") - { - offset -= 6 + ((varData.Key.Length + 1) * 2); - return null; - } - - // Align to the DWORD boundary if we're not at the end - if (offset != data.Length) - { - while ((offset % 4) != 0) - varData.Padding = data.ReadByte(ref offset); - } - - // Cache the current offset - int currentOffset = offset; - - var varDataValue = new List(); - while ((offset - currentOffset) < varData.ValueLength) - { - uint languageAndCodeIdentifierPair = data.ReadUInt32(ref offset); - varDataValue.Add(languageAndCodeIdentifierPair); - } - - varData.Value = varDataValue.ToArray(); - - varFileInfoChildren.Add(varData); - } - - varFileInfo.Children = varFileInfoChildren.ToArray(); - - return varFileInfo; - } - - #endregion - - #endregion - } -} \ No newline at end of file diff --git a/BinaryObjectScanner.Builders/GCF.cs b/BinaryObjectScanner.Builders/GCF.cs deleted file mode 100644 index 6df569cd..00000000 --- a/BinaryObjectScanner.Builders/GCF.cs +++ /dev/null @@ -1,775 +0,0 @@ -using System.Collections.Generic; -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.GCF; - -namespace BinaryObjectScanner.Builders -{ - public static class GCF - { - #region Byte Data - - /// - /// Parse a byte array into a Half-Life Game Cache - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled Half-Life Game Cache on success, null on error - public static SabreTools.Models.GCF.File ParseFile(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseFile(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Half-Life Game Cache - /// - /// Stream to parse - /// Filled Half-Life Game Cache on success, null on error - public static SabreTools.Models.GCF.File ParseFile(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - long initialOffset = data.Position; - - // Create a new Half-Life Game Cache to fill - var file = new SabreTools.Models.GCF.File(); - - #region Header - - // Try to parse the header - var header = ParseHeader(data); - if (header == null) - return null; - - // Set the game cache header - file.Header = header; - - #endregion - - #region Block Entry Header - - // Try to parse the block entry header - var blockEntryHeader = ParseBlockEntryHeader(data); - if (blockEntryHeader == null) - return null; - - // Set the game cache block entry header - file.BlockEntryHeader = blockEntryHeader; - - #endregion - - #region Block Entries - - // Create the block entry array - file.BlockEntries = new BlockEntry[blockEntryHeader.BlockCount]; - - // Try to parse the block entries - for (int i = 0; i < blockEntryHeader.BlockCount; i++) - { - var blockEntry = ParseBlockEntry(data); - file.BlockEntries[i] = blockEntry; - } - - #endregion - - #region Fragmentation Map Header - - // Try to parse the fragmentation map header - var fragmentationMapHeader = ParseFragmentationMapHeader(data); - if (fragmentationMapHeader == null) - return null; - - // Set the game cache fragmentation map header - file.FragmentationMapHeader = fragmentationMapHeader; - - #endregion - - #region Fragmentation Maps - - // Create the fragmentation map array - file.FragmentationMaps = new FragmentationMap[fragmentationMapHeader.BlockCount]; - - // Try to parse the fragmentation maps - for (int i = 0; i < fragmentationMapHeader.BlockCount; i++) - { - var fragmentationMap = ParseFragmentationMap(data); - file.FragmentationMaps[i] = fragmentationMap; - } - - #endregion - - #region Block Entry Map Header - - if (header.MinorVersion < 6) - { - // Try to parse the block entry map header - var blockEntryMapHeader = ParseBlockEntryMapHeader(data); - if (blockEntryMapHeader == null) - return null; - - // Set the game cache block entry map header - file.BlockEntryMapHeader = blockEntryMapHeader; - } - - #endregion - - #region Block Entry Maps - - if (header.MinorVersion < 6) - { - // Create the block entry map array - file.BlockEntryMaps = new BlockEntryMap[file.BlockEntryMapHeader.BlockCount]; - - // Try to parse the block entry maps - for (int i = 0; i < file.BlockEntryMapHeader.BlockCount; i++) - { - var blockEntryMap = ParseBlockEntryMap(data); - file.BlockEntryMaps[i] = blockEntryMap; - } - } - - #endregion - - // Cache the current offset - initialOffset = data.Position; - - #region Directory Header - - // Try to parse the directory header - var directoryHeader = ParseDirectoryHeader(data); - if (directoryHeader == null) - return null; - - // Set the game cache directory header - file.DirectoryHeader = directoryHeader; - - #endregion - - #region Directory Entries - - // Create the directory entry array - file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount]; - - // Try to parse the directory entries - for (int i = 0; i < directoryHeader.ItemCount; i++) - { - var directoryEntry = ParseDirectoryEntry(data); - file.DirectoryEntries[i] = directoryEntry; - } - - #endregion - - #region Directory Names - - if (directoryHeader.NameSize > 0) - { - // Get the current offset for adjustment - long directoryNamesStart = data.Position; - - // Get the ending offset - long directoryNamesEnd = data.Position + directoryHeader.NameSize; - - // Create the string dictionary - file.DirectoryNames = new Dictionary(); - - // Loop and read the null-terminated strings - while (data.Position < directoryNamesEnd) - { - long nameOffset = data.Position - directoryNamesStart; - string directoryName = data.ReadString(Encoding.ASCII); - if (data.Position > directoryNamesEnd) - { - data.Seek(-directoryName.Length, SeekOrigin.Current); - byte[] endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position)); - if (endingData != null) - directoryName = Encoding.ASCII.GetString(endingData); - else - directoryName = null; - } - - file.DirectoryNames[nameOffset] = directoryName; - } - - // Loop and assign to entries - foreach (var directoryEntry in file.DirectoryEntries) - { - directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset]; - } - } - - #endregion - - #region Directory Info 1 Entries - - // Create the directory info 1 entry array - file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count]; - - // Try to parse the directory info 1 entries - for (int i = 0; i < directoryHeader.Info1Count; i++) - { - var directoryInfo1Entry = ParseDirectoryInfo1Entry(data); - file.DirectoryInfo1Entries[i] = directoryInfo1Entry; - } - - #endregion - - #region Directory Info 2 Entries - - // Create the directory info 2 entry array - file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount]; - - // Try to parse the directory info 2 entries - for (int i = 0; i < directoryHeader.ItemCount; i++) - { - var directoryInfo2Entry = ParseDirectoryInfo2Entry(data); - file.DirectoryInfo2Entries[i] = directoryInfo2Entry; - } - - #endregion - - #region Directory Copy Entries - - // Create the directory copy entry array - file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount]; - - // Try to parse the directory copy entries - for (int i = 0; i < directoryHeader.CopyCount; i++) - { - var directoryCopyEntry = ParseDirectoryCopyEntry(data); - file.DirectoryCopyEntries[i] = directoryCopyEntry; - } - - #endregion - - #region Directory Local Entries - - // Create the directory local entry array - file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount]; - - // Try to parse the directory local entries - for (int i = 0; i < directoryHeader.LocalCount; i++) - { - var directoryLocalEntry = ParseDirectoryLocalEntry(data); - file.DirectoryLocalEntries[i] = directoryLocalEntry; - } - - #endregion - - // Seek to end of directory section, just in case - data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin); - - #region Directory Map Header - - if (header.MinorVersion >= 5) - { - // Try to parse the directory map header - var directoryMapHeader = ParseDirectoryMapHeader(data); - if (directoryMapHeader == null) - return null; - - // Set the game cache directory map header - file.DirectoryMapHeader = directoryMapHeader; - } - - #endregion - - #region Directory Map Entries - - // Create the directory map entry array - file.DirectoryMapEntries = new DirectoryMapEntry[directoryHeader.ItemCount]; - - // Try to parse the directory map entries - for (int i = 0; i < directoryHeader.ItemCount; i++) - { - var directoryMapEntry = ParseDirectoryMapEntry(data); - file.DirectoryMapEntries[i] = directoryMapEntry; - } - - #endregion - - #region Checksum Header - - // Try to parse the checksum header - var checksumHeader = ParseChecksumHeader(data); - if (checksumHeader == null) - return null; - - // Set the game cache checksum header - file.ChecksumHeader = checksumHeader; - - #endregion - - // Cache the current offset - initialOffset = data.Position; - - #region Checksum Map Header - - // Try to parse the checksum map header - var checksumMapHeader = ParseChecksumMapHeader(data); - if (checksumMapHeader == null) - return null; - - // Set the game cache checksum map header - file.ChecksumMapHeader = checksumMapHeader; - - #endregion - - #region Checksum Map Entries - - // Create the checksum map entry array - file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount]; - - // Try to parse the checksum map entries - for (int i = 0; i < checksumMapHeader.ItemCount; i++) - { - var checksumMapEntry = ParseChecksumMapEntry(data); - file.ChecksumMapEntries[i] = checksumMapEntry; - } - - #endregion - - #region Checksum Entries - - // Create the checksum entry array - file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount]; - - // Try to parse the checksum entries - for (int i = 0; i < checksumMapHeader.ChecksumCount; i++) - { - var checksumEntry = ParseChecksumEntry(data); - file.ChecksumEntries[i] = checksumEntry; - } - - #endregion - - // Seek to end of checksum section, just in case - data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin); - - #region Data Block Header - - // Try to parse the data block header - var dataBlockHeader = ParseDataBlockHeader(data, header.MinorVersion); - if (dataBlockHeader == null) - return null; - - // Set the game cache data block header - file.DataBlockHeader = dataBlockHeader; - - #endregion - - return file; - } - - /// - /// Parse a Stream into a Half-Life Game Cache header - /// - /// Stream to parse - /// Filled Half-Life Game Cache on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - Header header = new Header(); - - header.Dummy0 = data.ReadUInt32(); - if (header.Dummy0 != 0x00000001) - return null; - - header.MajorVersion = data.ReadUInt32(); - if (header.MajorVersion != 0x00000001) - return null; - - header.MinorVersion = data.ReadUInt32(); - if (header.MinorVersion != 3 && header.MinorVersion != 5 && header.MinorVersion != 6) - return null; - - header.CacheID = data.ReadUInt32(); - header.LastVersionPlayed = data.ReadUInt32(); - header.Dummy1 = data.ReadUInt32(); - header.Dummy2 = data.ReadUInt32(); - header.FileSize = data.ReadUInt32(); - header.BlockSize = data.ReadUInt32(); - header.BlockCount = data.ReadUInt32(); - header.Dummy3 = data.ReadUInt32(); - - return header; - } - - /// - /// Parse a Stream into a Half-Life Game Cache block entry header - /// - /// Stream to parse - /// Filled Half-Life Game Cache block entry header on success, null on error - private static BlockEntryHeader ParseBlockEntryHeader(Stream data) - { - // TODO: Use marshalling here instead of building - BlockEntryHeader blockEntryHeader = new BlockEntryHeader(); - - blockEntryHeader.BlockCount = data.ReadUInt32(); - blockEntryHeader.BlocksUsed = data.ReadUInt32(); - blockEntryHeader.Dummy0 = data.ReadUInt32(); - blockEntryHeader.Dummy1 = data.ReadUInt32(); - blockEntryHeader.Dummy2 = data.ReadUInt32(); - blockEntryHeader.Dummy3 = data.ReadUInt32(); - blockEntryHeader.Dummy4 = data.ReadUInt32(); - blockEntryHeader.Checksum = data.ReadUInt32(); - - return blockEntryHeader; - } - - /// - /// Parse a Stream into a Half-Life Game Cache block entry - /// - /// Stream to parse - /// Filled Half-Life Game Cache block entry on success, null on error - private static BlockEntry ParseBlockEntry(Stream data) - { - // TODO: Use marshalling here instead of building - BlockEntry blockEntry = new BlockEntry(); - - blockEntry.EntryFlags = data.ReadUInt32(); - blockEntry.FileDataOffset = data.ReadUInt32(); - blockEntry.FileDataSize = data.ReadUInt32(); - blockEntry.FirstDataBlockIndex = data.ReadUInt32(); - blockEntry.NextBlockEntryIndex = data.ReadUInt32(); - blockEntry.PreviousBlockEntryIndex = data.ReadUInt32(); - blockEntry.DirectoryIndex = data.ReadUInt32(); - - return blockEntry; - } - - /// - /// Parse a Stream into a Half-Life Game Cache fragmentation map header - /// - /// Stream to parse - /// Filled Half-Life Game Cache fragmentation map header on success, null on error - private static FragmentationMapHeader ParseFragmentationMapHeader(Stream data) - { - // TODO: Use marshalling here instead of building - FragmentationMapHeader fragmentationMapHeader = new FragmentationMapHeader(); - - fragmentationMapHeader.BlockCount = data.ReadUInt32(); - fragmentationMapHeader.FirstUnusedEntry = data.ReadUInt32(); - fragmentationMapHeader.Terminator = data.ReadUInt32(); - fragmentationMapHeader.Checksum = data.ReadUInt32(); - - return fragmentationMapHeader; - } - - /// - /// Parse a Stream into a Half-Life Game Cache fragmentation map - /// - /// Stream to parse - /// Filled Half-Life Game Cache fragmentation map on success, null on error - private static FragmentationMap ParseFragmentationMap(Stream data) - { - // TODO: Use marshalling here instead of building - FragmentationMap fragmentationMap = new FragmentationMap(); - - fragmentationMap.NextDataBlockIndex = data.ReadUInt32(); - - return fragmentationMap; - } - - /// - /// Parse a Stream into a Half-Life Game Cache block entry map header - /// - /// Stream to parse - /// Filled Half-Life Game Cache block entry map header on success, null on error - private static BlockEntryMapHeader ParseBlockEntryMapHeader(Stream data) - { - // TODO: Use marshalling here instead of building - BlockEntryMapHeader blockEntryMapHeader = new BlockEntryMapHeader(); - - blockEntryMapHeader.BlockCount = data.ReadUInt32(); - blockEntryMapHeader.FirstBlockEntryIndex = data.ReadUInt32(); - blockEntryMapHeader.LastBlockEntryIndex = data.ReadUInt32(); - blockEntryMapHeader.Dummy0 = data.ReadUInt32(); - blockEntryMapHeader.Checksum = data.ReadUInt32(); - - return blockEntryMapHeader; - } - - /// - /// Parse a Stream into a Half-Life Game Cache block entry map - /// - /// Stream to parse - /// Filled Half-Life Game Cache block entry map on success, null on error - private static BlockEntryMap ParseBlockEntryMap(Stream data) - { - // TODO: Use marshalling here instead of building - BlockEntryMap blockEntryMap = new BlockEntryMap(); - - blockEntryMap.PreviousBlockEntryIndex = data.ReadUInt32(); - blockEntryMap.NextBlockEntryIndex = data.ReadUInt32(); - - return blockEntryMap; - } - - /// - /// Parse a Stream into a Half-Life Game Cache directory header - /// - /// Stream to parse - /// Filled Half-Life Game Cache directory header on success, null on error - private static DirectoryHeader ParseDirectoryHeader(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryHeader directoryHeader = new DirectoryHeader(); - - directoryHeader.Dummy0 = data.ReadUInt32(); - directoryHeader.CacheID = data.ReadUInt32(); - directoryHeader.LastVersionPlayed = data.ReadUInt32(); - directoryHeader.ItemCount = data.ReadUInt32(); - directoryHeader.FileCount = data.ReadUInt32(); - directoryHeader.Dummy1 = data.ReadUInt32(); - directoryHeader.DirectorySize = data.ReadUInt32(); - directoryHeader.NameSize = data.ReadUInt32(); - directoryHeader.Info1Count = data.ReadUInt32(); - directoryHeader.CopyCount = data.ReadUInt32(); - directoryHeader.LocalCount = data.ReadUInt32(); - directoryHeader.Dummy2 = data.ReadUInt32(); - directoryHeader.Dummy3 = data.ReadUInt32(); - directoryHeader.Checksum = data.ReadUInt32(); - - return directoryHeader; - } - - /// - /// Parse a Stream into a Half-Life Game Cache directory entry - /// - /// Stream to parse - /// Filled Half-Life Game Cache directory entry on success, null on error - private static DirectoryEntry ParseDirectoryEntry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryEntry directoryEntry = new DirectoryEntry(); - - directoryEntry.NameOffset = data.ReadUInt32(); - directoryEntry.ItemSize = data.ReadUInt32(); - directoryEntry.ChecksumIndex = data.ReadUInt32(); - directoryEntry.DirectoryFlags = (HL_GCF_FLAG)data.ReadUInt32(); - directoryEntry.ParentIndex = data.ReadUInt32(); - directoryEntry.NextIndex = data.ReadUInt32(); - directoryEntry.FirstIndex = data.ReadUInt32(); - - return directoryEntry; - } - - /// - /// Parse a Stream into a Half-Life Game Cache directory info 1 entry - /// - /// Stream to parse - /// Filled Half-Life Game Cache directory info 1 entry on success, null on error - private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry(); - - directoryInfo1Entry.Dummy0 = data.ReadUInt32(); - - return directoryInfo1Entry; - } - - /// - /// Parse a Stream into a Half-Life Game Cache directory info 2 entry - /// - /// Stream to parse - /// Filled Half-Life Game Cache directory info 2 entry on success, null on error - private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry(); - - directoryInfo2Entry.Dummy0 = data.ReadUInt32(); - - return directoryInfo2Entry; - } - - /// - /// Parse a Stream into a Half-Life Game Cache directory copy entry - /// - /// Stream to parse - /// Filled Half-Life Game Cache directory copy entry on success, null on error - private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry(); - - directoryCopyEntry.DirectoryIndex = data.ReadUInt32(); - - return directoryCopyEntry; - } - - /// - /// Parse a Stream into a Half-Life Game Cache directory local entry - /// - /// Stream to parse - /// Filled Half-Life Game Cache directory local entry on success, null on error - private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry(); - - directoryLocalEntry.DirectoryIndex = data.ReadUInt32(); - - return directoryLocalEntry; - } - - /// - /// Parse a Stream into a Half-Life Game Cache directory map header - /// - /// Stream to parse - /// Filled Half-Life Game Cache directory map header on success, null on error - private static DirectoryMapHeader ParseDirectoryMapHeader(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryMapHeader directoryMapHeader = new DirectoryMapHeader(); - - directoryMapHeader.Dummy0 = data.ReadUInt32(); - if (directoryMapHeader.Dummy0 != 0x00000001) - return null; - - directoryMapHeader.Dummy1 = data.ReadUInt32(); - if (directoryMapHeader.Dummy1 != 0x00000000) - return null; - - return directoryMapHeader; - } - - /// - /// Parse a Stream into a Half-Life Game Cache directory map entry - /// - /// Stream to parse - /// Filled Half-Life Game Cache directory map entry on success, null on error - private static DirectoryMapEntry ParseDirectoryMapEntry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryMapEntry directoryMapEntry = new DirectoryMapEntry(); - - directoryMapEntry.FirstBlockIndex = data.ReadUInt32(); - - return directoryMapEntry; - } - - /// - /// Parse a Stream into a Half-Life Game Cache checksum header - /// - /// Stream to parse - /// Filled Half-Life Game Cache checksum header on success, null on error - private static ChecksumHeader ParseChecksumHeader(Stream data) - { - // TODO: Use marshalling here instead of building - ChecksumHeader checksumHeader = new ChecksumHeader(); - - checksumHeader.Dummy0 = data.ReadUInt32(); - if (checksumHeader.Dummy0 != 0x00000001) - return null; - - checksumHeader.ChecksumSize = data.ReadUInt32(); - - return checksumHeader; - } - - /// - /// Parse a Stream into a Half-Life Game Cache checksum map header - /// - /// Stream to parse - /// Filled Half-Life Game Cache checksum map header on success, null on error - private static ChecksumMapHeader ParseChecksumMapHeader(Stream data) - { - // TODO: Use marshalling here instead of building - ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader(); - - checksumMapHeader.Dummy0 = data.ReadUInt32(); - if (checksumMapHeader.Dummy0 != 0x14893721) - return null; - - checksumMapHeader.Dummy1 = data.ReadUInt32(); - if (checksumMapHeader.Dummy1 != 0x00000001) - return null; - - checksumMapHeader.ItemCount = data.ReadUInt32(); - checksumMapHeader.ChecksumCount = data.ReadUInt32(); - - return checksumMapHeader; - } - - /// - /// Parse a Stream into a Half-Life Game Cache checksum map entry - /// - /// Stream to parse - /// Filled Half-Life Game Cache checksum map entry on success, null on error - private static ChecksumMapEntry ParseChecksumMapEntry(Stream data) - { - // TODO: Use marshalling here instead of building - ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry(); - - checksumMapEntry.ChecksumCount = data.ReadUInt32(); - checksumMapEntry.FirstChecksumIndex = data.ReadUInt32(); - - return checksumMapEntry; - } - - /// - /// Parse a Stream into a Half-Life Game Cache checksum entry - /// - /// Stream to parse - /// Filled Half-Life Game Cache checksum entry on success, null on error - private static ChecksumEntry ParseChecksumEntry(Stream data) - { - // TODO: Use marshalling here instead of building - ChecksumEntry checksumEntry = new ChecksumEntry(); - - checksumEntry.Checksum = data.ReadUInt32(); - - return checksumEntry; - } - - /// - /// Parse a Stream into a Half-Life Game Cache data block header - /// - /// Stream to parse - /// Minor version field from the header - /// Filled Half-Life Game Cache data block header on success, null on error - private static DataBlockHeader ParseDataBlockHeader(Stream data, uint minorVersion) - { - // TODO: Use marshalling here instead of building - DataBlockHeader dataBlockHeader = new DataBlockHeader(); - - // In version 3 the DataBlockHeader is missing the LastVersionPlayed field. - if (minorVersion >= 5) - dataBlockHeader.LastVersionPlayed = data.ReadUInt32(); - - dataBlockHeader.BlockCount = data.ReadUInt32(); - dataBlockHeader.BlockSize = data.ReadUInt32(); - dataBlockHeader.FirstBlockOffset = data.ReadUInt32(); - dataBlockHeader.BlocksUsed = data.ReadUInt32(); - dataBlockHeader.Checksum = data.ReadUInt32(); - - return dataBlockHeader; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/InstallShieldCabinet.cs b/BinaryObjectScanner.Builders/InstallShieldCabinet.cs deleted file mode 100644 index f86c6201..00000000 --- a/BinaryObjectScanner.Builders/InstallShieldCabinet.cs +++ /dev/null @@ -1,808 +0,0 @@ -using System.Collections.Generic; -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.InstallShieldCabinet; -using static SabreTools.Models.InstallShieldCabinet.Constants; - -namespace BinaryObjectScanner.Builders -{ - // TODO: Add multi-cabinet reading - public class InstallShieldCabinet - { - #region Byte Data - - /// - /// Parse a byte array into a InstallShield Cabinet file - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled cabinet on success, null on error - public static Cabinet ParseCabinet(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseCabinet(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a InstallShield Cabinet file - /// - /// Stream to parse - /// Filled cabinet on success, null on error - public static Cabinet ParseCabinet(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new cabinet to fill - var cabinet = new Cabinet(); - - #region Common Header - - // Try to parse the cabinet header - var commonHeader = ParseCommonHeader(data); - if (commonHeader == null) - return null; - - // Set the cabinet header - cabinet.CommonHeader = commonHeader; - - #endregion - - #region Volume Header - - // Try to parse the volume header - var volumeHeader = ParseVolumeHeader(data, GetMajorVersion(commonHeader)); - if (volumeHeader == null) - return null; - - // Set the volume header - cabinet.VolumeHeader = volumeHeader; - - #endregion - - #region Descriptor - - // Get the descriptor offset - uint descriptorOffset = commonHeader.DescriptorOffset; - if (descriptorOffset < 0 || descriptorOffset >= data.Length) - return null; - - // Seek to the descriptor - data.Seek(descriptorOffset, SeekOrigin.Begin); - - // Try to parse the descriptor - var descriptor = ParseDescriptor(data); - if (descriptor == null) - return null; - - // Set the descriptor - cabinet.Descriptor = descriptor; - - #endregion - - #region File Descriptor Offsets - - // Get the file table offset - uint fileTableOffset = commonHeader.DescriptorOffset + descriptor.FileTableOffset; - if (fileTableOffset < 0 || fileTableOffset >= data.Length) - return null; - - // Seek to the file table - data.Seek(fileTableOffset, SeekOrigin.Begin); - - // Get the number of file table items - uint fileTableItems; - if (GetMajorVersion(commonHeader) <= 5) - fileTableItems = descriptor.DirectoryCount + descriptor.FileCount; - else - fileTableItems = descriptor.DirectoryCount; - - // Create and fill the file table - cabinet.FileDescriptorOffsets = new uint[fileTableItems]; - for (int i = 0; i < cabinet.FileDescriptorOffsets.Length; i++) - { - cabinet.FileDescriptorOffsets[i] = data.ReadUInt32(); - } - - #endregion - - #region Directory Descriptors - - // Create and fill the directory descriptors - cabinet.DirectoryNames = new string[descriptor.DirectoryCount]; - for (int i = 0; i < descriptor.DirectoryCount; i++) - { - // Get the directory descriptor offset - uint offset = descriptorOffset - + descriptor.FileTableOffset - + cabinet.FileDescriptorOffsets[i]; - - // If we have an invalid offset - if (offset < 0 || offset >= data.Length) - continue; - - // Seek to the file descriptor offset - data.Seek(offset, SeekOrigin.Begin); - - // Create and add the file descriptor - string directoryName = ParseDirectoryName(data, GetMajorVersion(commonHeader)); - cabinet.DirectoryNames[i] = directoryName; - } - - #endregion - - #region File Descriptors - - // Create and fill the file descriptors - cabinet.FileDescriptors = new FileDescriptor[descriptor.FileCount]; - for (int i = 0; i < descriptor.FileCount; i++) - { - // Get the file descriptor offset - uint offset; - if (GetMajorVersion(commonHeader) <= 5) - { - offset = descriptorOffset - + descriptor.FileTableOffset - + cabinet.FileDescriptorOffsets[descriptor.DirectoryCount + i]; - } - else - { - offset = descriptorOffset - + descriptor.FileTableOffset - + descriptor.FileTableOffset2 - + (uint)(i * 0x57); - } - - // If we have an invalid offset - if (offset < 0 || offset >= data.Length) - continue; - - // Seek to the file descriptor offset - data.Seek(offset, SeekOrigin.Begin); - - // Create and add the file descriptor - FileDescriptor fileDescriptor = ParseFileDescriptor(data, GetMajorVersion(commonHeader), descriptorOffset + descriptor.FileTableOffset); - cabinet.FileDescriptors[i] = fileDescriptor; - } - - #endregion - - #region File Group Offsets - - // Create and fill the file group offsets - cabinet.FileGroupOffsets = new Dictionary(); - for (int i = 0; i < descriptor.FileGroupOffsets.Length; i++) - { - // Get the file group offset - uint offset = descriptor.FileGroupOffsets[i]; - if (offset == 0) - continue; - - // Adjust the file group offset - offset += commonHeader.DescriptorOffset; - if (offset < 0 || offset >= data.Length) - continue; - - // Seek to the file group offset - data.Seek(offset, SeekOrigin.Begin); - - // Create and add the offset - OffsetList offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset); - cabinet.FileGroupOffsets[descriptor.FileGroupOffsets[i]] = offsetList; - - // If we have a nonzero next offset - uint nextOffset = offsetList.NextOffset; - while (nextOffset != 0) - { - // Get the next offset to read - uint internalOffset = nextOffset + commonHeader.DescriptorOffset; - - // Seek to the file group offset - data.Seek(internalOffset, SeekOrigin.Begin); - - // Create and add the offset - offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset); - cabinet.FileGroupOffsets[nextOffset] = offsetList; - - // Set the next offset - nextOffset = offsetList.NextOffset; - } - } - - #endregion - - #region File Groups - - // Create the file groups array - cabinet.FileGroups = new FileGroup[cabinet.FileGroupOffsets.Count]; - - // Create and fill the file groups - int fileGroupId = 0; - foreach (var kvp in cabinet.FileGroupOffsets) - { - // Get the offset - OffsetList list = kvp.Value; - if (list == null) - { - fileGroupId++; - continue; - } - - // If we have an invalid offset - if (list.DescriptorOffset <= 0) - { - fileGroupId++; - continue; - } - - /// Seek to the file group - data.Seek(list.DescriptorOffset + descriptorOffset, SeekOrigin.Begin); - - // Try to parse the file group - var fileGroup = ParseFileGroup(data, GetMajorVersion(commonHeader), descriptorOffset); - if (fileGroup == null) - return null; - - // Add the file group - cabinet.FileGroups[fileGroupId++] = fileGroup; - } - - #endregion - - #region Component Offsets - - // Create and fill the component offsets - cabinet.ComponentOffsets = new Dictionary(); - for (int i = 0; i < descriptor.ComponentOffsets.Length; i++) - { - // Get the component offset - uint offset = descriptor.ComponentOffsets[i]; - if (offset == 0) - continue; - - // Adjust the component offset - offset += commonHeader.DescriptorOffset; - if (offset < 0 || offset >= data.Length) - continue; - - // Seek to the component offset - data.Seek(offset, SeekOrigin.Begin); - - // Create and add the offset - OffsetList offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset); - cabinet.ComponentOffsets[descriptor.ComponentOffsets[i]] = offsetList; - - // If we have a nonzero next offset - uint nextOffset = offsetList.NextOffset; - while (nextOffset != 0) - { - // Get the next offset to read - uint internalOffset = nextOffset + commonHeader.DescriptorOffset; - - // Seek to the file group offset - data.Seek(internalOffset, SeekOrigin.Begin); - - // Create and add the offset - offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset); - cabinet.ComponentOffsets[nextOffset] = offsetList; - - // Set the next offset - nextOffset = offsetList.NextOffset; - } - } - - #endregion - - #region Components - - // Create the components array - cabinet.Components = new Component[cabinet.ComponentOffsets.Count]; - - // Create and fill the components - int componentId = 0; - foreach (KeyValuePair kvp in cabinet.ComponentOffsets) - { - // Get the offset - OffsetList list = kvp.Value; - if (list == null) - { - componentId++; - continue; - } - - // If we have an invalid offset - if (list.DescriptorOffset <= 0) - { - componentId++; - continue; - } - - // Seek to the component - data.Seek(list.DescriptorOffset + descriptorOffset, SeekOrigin.Begin); - - // Try to parse the component - var component = ParseComponent(data, GetMajorVersion(commonHeader), descriptorOffset); - if (component == null) - return null; - - // Add the component - cabinet.Components[componentId++] = component; - } - - #endregion - - // TODO: Parse setup types - - return cabinet; - } - - /// - /// Parse a Stream into a common header - /// - /// Stream to parse - /// Filled common header on success, null on error - private static CommonHeader ParseCommonHeader(Stream data) - { - CommonHeader commonHeader = new CommonHeader(); - - byte[] signature = data.ReadBytes(4); - commonHeader.Signature = Encoding.ASCII.GetString(signature); - if (commonHeader.Signature != SignatureString) - return null; - - commonHeader.Version = data.ReadUInt32(); - commonHeader.VolumeInfo = data.ReadUInt32(); - commonHeader.DescriptorOffset = data.ReadUInt32(); - commonHeader.DescriptorSize = data.ReadUInt32(); - - return commonHeader; - } - - /// - /// Parse a Stream into a volume header - /// - /// Stream to parse - /// Major version of the cabinet - /// Filled volume header on success, null on error - private static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion) - { - VolumeHeader volumeHeader = new VolumeHeader(); - - // Read the descriptor based on version - if (majorVersion <= 5) - { - volumeHeader.DataOffset = data.ReadUInt32(); - _ = data.ReadBytes(0x04); // Skip 0x04 bytes, unknown data? - volumeHeader.FirstFileIndex = data.ReadUInt32(); - volumeHeader.LastFileIndex = data.ReadUInt32(); - volumeHeader.FirstFileOffset = data.ReadUInt32(); - volumeHeader.FirstFileSizeExpanded = data.ReadUInt32(); - volumeHeader.FirstFileSizeCompressed = data.ReadUInt32(); - volumeHeader.LastFileOffset = data.ReadUInt32(); - volumeHeader.LastFileSizeExpanded = data.ReadUInt32(); - volumeHeader.LastFileSizeCompressed = data.ReadUInt32(); - } - else - { - // TODO: Should standard and high values be combined? - volumeHeader.DataOffset = data.ReadUInt32(); - volumeHeader.DataOffsetHigh = data.ReadUInt32(); - volumeHeader.FirstFileIndex = data.ReadUInt32(); - volumeHeader.LastFileIndex = data.ReadUInt32(); - volumeHeader.FirstFileOffset = data.ReadUInt32(); - volumeHeader.FirstFileOffsetHigh = data.ReadUInt32(); - volumeHeader.FirstFileSizeExpanded = data.ReadUInt32(); - volumeHeader.FirstFileSizeExpandedHigh = data.ReadUInt32(); - volumeHeader.FirstFileSizeCompressed = data.ReadUInt32(); - volumeHeader.FirstFileSizeCompressedHigh = data.ReadUInt32(); - volumeHeader.LastFileOffset = data.ReadUInt32(); - volumeHeader.LastFileOffsetHigh = data.ReadUInt32(); - volumeHeader.LastFileSizeExpanded = data.ReadUInt32(); - volumeHeader.LastFileSizeExpandedHigh = data.ReadUInt32(); - volumeHeader.LastFileSizeCompressed = data.ReadUInt32(); - volumeHeader.LastFileSizeCompressedHigh = data.ReadUInt32(); - } - - return volumeHeader; - } - - /// - /// Parse a Stream into a descriptor - /// - /// Stream to parse - /// Filled descriptor on success, null on error - private static Descriptor ParseDescriptor(Stream data) - { - Descriptor descriptor = new Descriptor(); - - descriptor.StringsOffset = data.ReadUInt32(); - descriptor.Reserved0 = data.ReadBytes(4); - descriptor.ComponentListOffset = data.ReadUInt32(); - descriptor.FileTableOffset = data.ReadUInt32(); - descriptor.Reserved1 = data.ReadBytes(4); - descriptor.FileTableSize = data.ReadUInt32(); - descriptor.FileTableSize2 = data.ReadUInt32(); - descriptor.DirectoryCount = data.ReadUInt16(); - descriptor.Reserved2 = data.ReadBytes(4); - descriptor.Reserved3 = data.ReadBytes(2); - descriptor.Reserved4 = data.ReadBytes(4); - descriptor.FileCount = data.ReadUInt32(); - descriptor.FileTableOffset2 = data.ReadUInt32(); - descriptor.ComponentTableInfoCount = data.ReadUInt16(); - descriptor.ComponentTableOffset = data.ReadUInt32(); - descriptor.Reserved5 = data.ReadBytes(4); - descriptor.Reserved6 = data.ReadBytes(4); - - descriptor.FileGroupOffsets = new uint[MAX_FILE_GROUP_COUNT]; - for (int i = 0; i < descriptor.FileGroupOffsets.Length; i++) - { - descriptor.FileGroupOffsets[i] = data.ReadUInt32(); - } - - descriptor.ComponentOffsets = new uint[MAX_COMPONENT_COUNT]; - for (int i = 0; i < descriptor.ComponentOffsets.Length; i++) - { - descriptor.ComponentOffsets[i] = data.ReadUInt32(); - } - - descriptor.SetupTypesOffset = data.ReadUInt32(); - descriptor.SetupTableOffset = data.ReadUInt32(); - descriptor.Reserved7 = data.ReadBytes(4); - descriptor.Reserved8 = data.ReadBytes(4); - - return descriptor; - } - - /// - /// Parse a Stream into an offset list - /// - /// Stream to parse - /// Major version of the cabinet - /// Offset of the cabinet descriptor - /// Filled offset list on success, null on error - private static OffsetList ParseOffsetList(Stream data, int majorVersion, uint descriptorOffset) - { - OffsetList offsetList = new OffsetList(); - - offsetList.NameOffset = data.ReadUInt32(); - offsetList.DescriptorOffset = data.ReadUInt32(); - offsetList.NextOffset = data.ReadUInt32(); - - // Cache the current offset - long currentOffset = data.Position; - - // Seek to the name offset - data.Seek(offsetList.NameOffset + descriptorOffset, SeekOrigin.Begin); - - // Read the string - if (majorVersion >= 17) - offsetList.Name = data.ReadString(Encoding.Unicode); - else - offsetList.Name = data.ReadString(Encoding.ASCII); - - // Seek back to the correct offset - data.Seek(currentOffset, SeekOrigin.Begin); - - return offsetList; - } - - /// - /// Parse a Stream into a file group - /// - /// Stream to parse - /// Major version of the cabinet - /// Offset of the cabinet descriptor - /// Filled file group on success, null on error - private static FileGroup ParseFileGroup(Stream data, int majorVersion, uint descriptorOffset) - { - FileGroup fileGroup = new FileGroup(); - - fileGroup.NameOffset = data.ReadUInt32(); - - fileGroup.ExpandedSize = data.ReadUInt32(); - fileGroup.Reserved0 = data.ReadBytes(4); - fileGroup.CompressedSize = data.ReadUInt32(); - fileGroup.Reserved1 = data.ReadBytes(4); - fileGroup.Reserved2 = data.ReadBytes(2); - fileGroup.Attribute1 = data.ReadUInt16(); - fileGroup.Attribute2 = data.ReadUInt16(); - - // TODO: Figure out what data lives in this area for V5 and below - if (majorVersion <= 5) - data.Seek(0x36, SeekOrigin.Current); - - fileGroup.FirstFile = data.ReadUInt32(); - fileGroup.LastFile = data.ReadUInt32(); - fileGroup.UnknownOffset = data.ReadUInt32(); - fileGroup.Var4Offset = data.ReadUInt32(); - fileGroup.Var1Offset = data.ReadUInt32(); - fileGroup.HTTPLocationOffset = data.ReadUInt32(); - fileGroup.FTPLocationOffset = data.ReadUInt32(); - fileGroup.MiscOffset = data.ReadUInt32(); - fileGroup.Var2Offset = data.ReadUInt32(); - fileGroup.TargetDirectoryOffset = data.ReadUInt32(); - fileGroup.Reserved3 = data.ReadBytes(2); - fileGroup.Reserved4 = data.ReadBytes(2); - fileGroup.Reserved5 = data.ReadBytes(2); - fileGroup.Reserved6 = data.ReadBytes(2); - fileGroup.Reserved7 = data.ReadBytes(2); - - // Cache the current position - long currentPosition = data.Position; - - // Read the name, if possible - if (fileGroup.NameOffset != 0) - { - // Seek to the name - data.Seek(fileGroup.NameOffset + descriptorOffset, SeekOrigin.Begin); - - // Read the string - if (majorVersion >= 17) - fileGroup.Name = data.ReadString(Encoding.Unicode); - else - fileGroup.Name = data.ReadString(Encoding.ASCII); - } - - // Seek back to the correct offset - data.Seek(currentPosition, SeekOrigin.Begin); - - return fileGroup; - } - - /// - /// Parse a Stream into a component - /// - /// Stream to parse - /// Major version of the cabinet - /// Offset of the cabinet descriptor - /// Filled component on success, null on error - private static Component ParseComponent(Stream data, int majorVersion, uint descriptorOffset) - { - Component component = new Component(); - - component.IdentifierOffset = data.ReadUInt32(); - component.DescriptorOffset = data.ReadUInt32(); - component.DisplayNameOffset = data.ReadUInt32(); - component.Reserved0 = data.ReadBytes(2); - component.ReservedOffset0 = data.ReadUInt32(); - component.ReservedOffset1 = data.ReadUInt32(); - component.ComponentIndex = data.ReadUInt16(); - component.NameOffset = data.ReadUInt32(); - component.ReservedOffset2 = data.ReadUInt32(); - component.ReservedOffset3 = data.ReadUInt32(); - component.ReservedOffset4 = data.ReadUInt32(); - component.Reserved1 = data.ReadBytes(32); - component.CLSIDOffset = data.ReadUInt32(); - component.Reserved2 = data.ReadBytes(28); - component.Reserved3 = data.ReadBytes(majorVersion <= 5 ? 2 : 1); - component.DependsCount = data.ReadUInt16(); - component.DependsOffset = data.ReadUInt32(); - component.FileGroupCount = data.ReadUInt16(); - component.FileGroupNamesOffset = data.ReadUInt32(); - component.X3Count = data.ReadUInt16(); - component.X3Offset = data.ReadUInt32(); - component.SubComponentsCount = data.ReadUInt16(); - component.SubComponentsOffset = data.ReadUInt32(); - component.NextComponentOffset = data.ReadUInt32(); - component.ReservedOffset5 = data.ReadUInt32(); - component.ReservedOffset6 = data.ReadUInt32(); - component.ReservedOffset7 = data.ReadUInt32(); - component.ReservedOffset8 = data.ReadUInt32(); - - // Cache the current position - long currentPosition = data.Position; - - // Read the identifier, if possible - if (component.IdentifierOffset != 0) - { - // Seek to the identifier - data.Seek(component.IdentifierOffset + descriptorOffset, SeekOrigin.Begin); - - // Read the string - if (majorVersion >= 17) - component.Identifier = data.ReadString(Encoding.Unicode); - else - component.Identifier = data.ReadString(Encoding.ASCII); - } - - // Read the display name, if possible - if (component.DisplayNameOffset != 0) - { - // Seek to the name - data.Seek(component.DisplayNameOffset + descriptorOffset, SeekOrigin.Begin); - - // Read the string - if (majorVersion >= 17) - component.DisplayName = data.ReadString(Encoding.Unicode); - else - component.DisplayName = data.ReadString(Encoding.ASCII); - } - - // Read the name, if possible - if (component.NameOffset != 0) - { - // Seek to the name - data.Seek(component.NameOffset + descriptorOffset, SeekOrigin.Begin); - - // Read the string - if (majorVersion >= 17) - component.Name = data.ReadString(Encoding.Unicode); - else - component.Name = data.ReadString(Encoding.ASCII); - } - - // Read the CLSID, if possible - if (component.CLSIDOffset != 0) - { - // Seek to the CLSID - data.Seek(component.CLSIDOffset + descriptorOffset, SeekOrigin.Begin); - - // Read the GUID - component.CLSID = data.ReadGuid(); - } - - // Read the file group names, if possible - if (component.FileGroupCount != 0 && component.FileGroupNamesOffset != 0) - { - // Seek to the file group table offset - data.Seek(component.FileGroupNamesOffset + descriptorOffset, SeekOrigin.Begin); - - // Read the file group names table - component.FileGroupNames = new string[component.FileGroupCount]; - for (int j = 0; j < component.FileGroupCount; j++) - { - // Get the name offset - uint nameOffset = data.ReadUInt32(); - - // Cache the current offset - long preNameOffset = data.Position; - - // Seek to the name offset - data.Seek(nameOffset + descriptorOffset, SeekOrigin.Begin); - - if (majorVersion >= 17) - component.FileGroupNames[j] = data.ReadString(Encoding.Unicode); - else - component.FileGroupNames[j] = data.ReadString(Encoding.ASCII); - - // Seek back to the original position - data.Seek(preNameOffset, SeekOrigin.Begin); - } - } - - // Seek back to the correct offset - data.Seek(currentPosition, SeekOrigin.Begin); - - return component; - } - - /// - /// Parse a Stream into a directory name - /// - /// Stream to parse - /// Major version of the cabinet - /// Filled directory name on success, null on error - private static string ParseDirectoryName(Stream data, int majorVersion) - { - // Read the string - if (majorVersion >= 17) - return data.ReadString(Encoding.Unicode); - else - return data.ReadString(Encoding.ASCII); - } - - /// - /// Parse a Stream into a file descriptor - /// - /// Stream to parse - /// Major version of the cabinet - /// Offset of the cabinet descriptor - /// Filled file descriptor on success, null on error - private static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset) - { - FileDescriptor fileDescriptor = new FileDescriptor(); - - // Read the descriptor based on version - if (majorVersion <= 5) - { - fileDescriptor.Volume = 0xFFFF; // Set by the header index - fileDescriptor.NameOffset = data.ReadUInt32(); - fileDescriptor.DirectoryIndex = data.ReadUInt32(); - fileDescriptor.Flags = (FileFlags)data.ReadUInt16(); - fileDescriptor.ExpandedSize = data.ReadUInt32(); - fileDescriptor.CompressedSize = data.ReadUInt32(); - _ = data.ReadBytes(0x14); // Skip 0x14 bytes, unknown data? - fileDescriptor.DataOffset = data.ReadUInt32(); - - if (majorVersion == 5) - fileDescriptor.MD5 = data.ReadBytes(0x10); - } - else - { - fileDescriptor.Flags = (FileFlags)data.ReadUInt16(); - fileDescriptor.ExpandedSize = data.ReadUInt64(); - fileDescriptor.CompressedSize = data.ReadUInt64(); - fileDescriptor.DataOffset = data.ReadUInt64(); - fileDescriptor.MD5 = data.ReadBytes(0x10); - _ = data.ReadBytes(0x10); // Skip 0x10 bytes, unknown data? - fileDescriptor.NameOffset = data.ReadUInt32(); - fileDescriptor.DirectoryIndex = data.ReadUInt16(); - _ = data.ReadBytes(0x0C); // Skip 0x0C bytes, unknown data? - fileDescriptor.LinkPrevious = data.ReadUInt32(); - fileDescriptor.LinkNext = data.ReadUInt32(); - fileDescriptor.LinkFlags = (LinkFlags)data.ReadByteValue(); - fileDescriptor.Volume = data.ReadUInt16(); - } - - // Cache the current position - long currentPosition = data.Position; - - // Read the name, if possible - if (fileDescriptor.NameOffset != 0) - { - // Seek to the name - data.Seek(fileDescriptor.NameOffset + descriptorOffset, SeekOrigin.Begin); - - // Read the string - if (majorVersion >= 17) - fileDescriptor.Name = data.ReadString(Encoding.Unicode); - else - fileDescriptor.Name = data.ReadString(Encoding.ASCII); - } - - // Seek back to the correct offset - data.Seek(currentPosition, SeekOrigin.Begin); - - return fileDescriptor; - } - - #endregion - - #region Helpers - - /// - /// Get the major version of the cabinet - /// - /// This should live in the wrapper but is needed during parsing - private static int GetMajorVersion(CommonHeader commonHeader) - { - uint majorVersion = commonHeader.Version; - if (majorVersion >> 24 == 1) - { - majorVersion = (majorVersion >> 12) & 0x0F; - } - else if (majorVersion >> 24 == 2 || majorVersion >> 24 == 4) - { - majorVersion = majorVersion & 0xFFFF; - if (majorVersion != 0) - majorVersion /= 100; - } - - return (int)majorVersion; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/LinearExecutable.cs b/BinaryObjectScanner.Builders/LinearExecutable.cs deleted file mode 100644 index 1d2f9f33..00000000 --- a/BinaryObjectScanner.Builders/LinearExecutable.cs +++ /dev/null @@ -1,943 +0,0 @@ -using System.Collections.Generic; -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.LinearExecutable; -using static SabreTools.Models.LinearExecutable.Constants; - -namespace BinaryObjectScanner.Builders -{ - public static class LinearExecutable - { - #region Byte Data - - /// - /// Parse a byte array into a Linear Executable - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled executable on success, null on error - public static Executable ParseExecutable(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseExecutable(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Linear Executable - /// - /// Stream to parse - /// Filled executable on success, null on error - public static Executable ParseExecutable(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new executable to fill - var executable = new Executable(); - - #region MS-DOS Stub - - // Parse the MS-DOS stub - var stub = MSDOS.ParseExecutable(data); - if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0) - return null; - - // Set the MS-DOS stub - executable.Stub = stub; - - #endregion - - #region Information Block - - // Try to parse the executable header - data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin); - var informationBlock = ParseInformationBlock(data); - if (informationBlock == null) - return null; - - // Set the executable header - executable.InformationBlock = informationBlock; - - #endregion - - #region Object Table - - // Get the object table offset - long offset = informationBlock.ObjectTableOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the object table - data.Seek(offset, SeekOrigin.Begin); - - // Create the object table - executable.ObjectTable = new ObjectTableEntry[informationBlock.ObjectTableCount]; - - // Try to parse the object table - for (int i = 0; i < executable.ObjectTable.Length; i++) - { - var entry = ParseObjectTableEntry(data); - if (entry == null) - return null; - - executable.ObjectTable[i] = entry; - } - } - - #endregion - - #region Object Page Map - - // Get the object page map offset - offset = informationBlock.ObjectPageMapOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the object page map - data.Seek(offset, SeekOrigin.Begin); - - // Create the object page map - executable.ObjectPageMap = new ObjectPageMapEntry[informationBlock.ObjectTableCount]; - - // Try to parse the object page map - for (int i = 0; i < executable.ObjectPageMap.Length; i++) - { - var entry = ParseObjectPageMapEntry(data); - if (entry == null) - return null; - - executable.ObjectPageMap[i] = entry; - } - } - - #endregion - - #region Object Iterate Data Map - - offset = informationBlock.ObjectIterateDataMapOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the object page map - data.Seek(offset, SeekOrigin.Begin); - - // TODO: Implement when model found - // No model has been found in the documentation about what - // each of the entries looks like for this map. - } - - #endregion - - #region Resource Table - - // Get the resource table offset - offset = informationBlock.ResourceTableOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the resource table - data.Seek(offset, SeekOrigin.Begin); - - // Create the resource table - executable.ResourceTable = new ResourceTableEntry[informationBlock.ResourceTableCount]; - - // Try to parse the resource table - for (int i = 0; i < executable.ResourceTable.Length; i++) - { - var entry = ParseResourceTableEntry(data); - if (entry == null) - return null; - - executable.ResourceTable[i] = entry; - } - } - - #endregion - - #region Resident Names Table - - // Get the resident names table offset - offset = informationBlock.ResidentNamesTableOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the resident names table - data.Seek(offset, SeekOrigin.Begin); - - // Create the resident names table - var residentNamesTable = new List(); - - // Try to parse the resident names table - while (true) - { - var entry = ParseResidentNamesTableEntry(data); - residentNamesTable.Add(entry); - - // If we have a 0-length entry - if (entry.Length == 0) - break; - } - - // Assign the resident names table - executable.ResidentNamesTable = residentNamesTable.ToArray(); - } - - #endregion - - #region Entry Table - - // Get the entry table offset - offset = informationBlock.EntryTableOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the entry table - data.Seek(offset, SeekOrigin.Begin); - - // Create the entry table - var entryTable = new List(); - - // Try to parse the entry table - while (true) - { - var bundle = ParseEntryTableBundle(data); - entryTable.Add(bundle); - - // If we have a 0-length entry - if (bundle.Entries == 0) - break; - } - - // Assign the entry table - executable.EntryTable = entryTable.ToArray(); - } - - #endregion - - #region Module Format Directives Table - - // Get the module format directives table offset - offset = informationBlock.ModuleDirectivesTableOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the module format directives table - data.Seek(offset, SeekOrigin.Begin); - - // Create the module format directives table - executable.ModuleFormatDirectivesTable = new ModuleFormatDirectivesTableEntry[informationBlock.ModuleDirectivesCount]; - - // Try to parse the module format directives table - for (int i = 0; i < executable.ModuleFormatDirectivesTable.Length; i++) - { - var entry = ParseModuleFormatDirectivesTableEntry(data); - if (entry == null) - return null; - - executable.ModuleFormatDirectivesTable[i] = entry; - } - } - - #endregion - - #region Verify Record Directive Table - - // TODO: Figure out where the offset to this table is stored - // The documentation suggests it's either part of or immediately following - // the Module Format Directives Table - - #endregion - - #region Fix-up Page Table - - // Get the fix-up page table offset - offset = informationBlock.FixupPageTableOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the fix-up page table - data.Seek(offset, SeekOrigin.Begin); - - // Create the fix-up page table - executable.FixupPageTable = new FixupPageTableEntry[executable.ObjectPageMap.Length + 1]; - - // Try to parse the fix-up page table - for (int i = 0; i < executable.FixupPageTable.Length; i++) - { - var entry = ParseFixupPageTableEntry(data); - if (entry == null) - return null; - - executable.FixupPageTable[i] = entry; - } - } - - #endregion - - #region Fix-up Record Table - - // Get the fix-up record table offset - offset = informationBlock.FixupRecordTableOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the fix-up record table - data.Seek(offset, SeekOrigin.Begin); - - // Create the fix-up record table - executable.FixupRecordTable = new FixupRecordTableEntry[executable.ObjectPageMap.Length + 1]; - - // Try to parse the fix-up record table - for (int i = 0; i < executable.FixupRecordTable.Length; i++) - { - var entry = ParseFixupRecordTableEntry(data); - if (entry == null) - return null; - - executable.FixupRecordTable[i] = entry; - } - } - - #endregion - - #region Imported Module Name Table - - // Get the imported module name table offset - offset = informationBlock.ImportedModulesNameTableOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the imported module name table - data.Seek(offset, SeekOrigin.Begin); - - // Create the imported module name table - executable.ImportModuleNameTable = new ImportModuleNameTableEntry[informationBlock.ImportedModulesCount]; - - // Try to parse the imported module name table - for (int i = 0; i < executable.ImportModuleNameTable.Length; i++) - { - var entry = ParseImportModuleNameTableEntry(data); - if (entry == null) - return null; - - executable.ImportModuleNameTable[i] = entry; - } - } - - #endregion - - #region Imported Module Procedure Name Table - - // Get the imported module procedure name table offset - offset = informationBlock.ImportProcedureNameTableOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the imported module procedure name table - data.Seek(offset, SeekOrigin.Begin); - - // Get the size of the imported module procedure name table - long tableSize = informationBlock.FixupPageTableOffset - + informationBlock.FixupSectionSize - - informationBlock.ImportProcedureNameTableOffset; - - // Create the imported module procedure name table - var importModuleProcedureNameTable = new List(); - - // Try to parse the imported module procedure name table - while (data.Position < offset + tableSize) - { - var entry = ParseImportModuleProcedureNameTableEntry(data); - if (entry == null) - return null; - - importModuleProcedureNameTable.Add(entry); - } - - // Assign the resident names table - executable.ImportModuleProcedureNameTable = importModuleProcedureNameTable.ToArray(); - } - - #endregion - - #region Per-Page Checksum Table - - // Get the per-page checksum table offset - offset = informationBlock.PerPageChecksumTableOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the per-page checksum name table - data.Seek(offset, SeekOrigin.Begin); - - // Create the per-page checksum name table - executable.PerPageChecksumTable = new PerPageChecksumTableEntry[informationBlock.ModuleNumberPages]; - - // Try to parse the per-page checksum name table - for (int i = 0; i < executable.PerPageChecksumTable.Length; i++) - { - var entry = ParsePerPageChecksumTableEntry(data); - if (entry == null) - return null; - - executable.PerPageChecksumTable[i] = entry; - } - } - - #endregion - - #region Non-Resident Names Table - - // Get the non-resident names table offset - offset = informationBlock.NonResidentNamesTableOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the non-resident names table - data.Seek(offset, SeekOrigin.Begin); - - // Create the non-resident names table - var nonResidentNamesTable = new List(); - - // Try to parse the non-resident names table - while (true) - { - var entry = ParseNonResidentNameTableEntry(data); - nonResidentNamesTable.Add(entry); - - // If we have a 0-length entry - if (entry.Length == 0) - break; - } - - // Assign the non-resident names table - executable.NonResidentNamesTable = nonResidentNamesTable.ToArray(); - } - - #endregion - - #region Debug Information - - // Get the debug information offset - offset = informationBlock.DebugInformationOffset + stub.Header.NewExeHeaderAddr; - if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length) - { - // Seek to the debug information - data.Seek(offset, SeekOrigin.Begin); - - // Try to parse the debug information - var debugInformation = ParseDebugInformation(data, informationBlock.DebugInformationLength); - if (debugInformation == null) - return null; - - // Set the debug information - executable.DebugInformation = debugInformation; - } - - #endregion - - return executable; - } - - /// - /// Parse a Stream into an information block - /// - /// Stream to parse - /// Filled information block on success, null on error - private static InformationBlock ParseInformationBlock(Stream data) - { - // TODO: Use marshalling here instead of building - var informationBlock = new InformationBlock(); - - byte[] magic = data.ReadBytes(2); - informationBlock.Signature = Encoding.ASCII.GetString(magic); - if (informationBlock.Signature != LESignatureString && informationBlock.Signature != LXSignatureString) - return null; - - informationBlock.ByteOrder = (ByteOrder)data.ReadByteValue(); - informationBlock.WordOrder = (WordOrder)data.ReadByteValue(); - informationBlock.ExecutableFormatLevel = data.ReadUInt32(); - informationBlock.CPUType = (CPUType)data.ReadUInt16(); - informationBlock.ModuleOS = (OperatingSystem)data.ReadUInt16(); - informationBlock.ModuleVersion = data.ReadUInt32(); - informationBlock.ModuleTypeFlags = (ModuleFlags)data.ReadUInt32(); - informationBlock.ModuleNumberPages = data.ReadUInt32(); - informationBlock.InitialObjectCS = data.ReadUInt32(); - informationBlock.InitialEIP = data.ReadUInt32(); - informationBlock.InitialObjectSS = data.ReadUInt32(); - informationBlock.InitialESP = data.ReadUInt32(); - informationBlock.MemoryPageSize = data.ReadUInt32(); - informationBlock.BytesOnLastPage = data.ReadUInt32(); - informationBlock.FixupSectionSize = data.ReadUInt32(); - informationBlock.FixupSectionChecksum = data.ReadUInt32(); - informationBlock.LoaderSectionSize = data.ReadUInt32(); - informationBlock.LoaderSectionChecksum = data.ReadUInt32(); - informationBlock.ObjectTableOffset = data.ReadUInt32(); - informationBlock.ObjectTableCount = data.ReadUInt32(); - informationBlock.ObjectPageMapOffset = data.ReadUInt32(); - informationBlock.ObjectIterateDataMapOffset = data.ReadUInt32(); - informationBlock.ResourceTableOffset = data.ReadUInt32(); - informationBlock.ResourceTableCount = data.ReadUInt32(); - informationBlock.ResidentNamesTableOffset = data.ReadUInt32(); - informationBlock.EntryTableOffset = data.ReadUInt32(); - informationBlock.ModuleDirectivesTableOffset = data.ReadUInt32(); - informationBlock.ModuleDirectivesCount = data.ReadUInt32(); - informationBlock.FixupPageTableOffset = data.ReadUInt32(); - informationBlock.FixupRecordTableOffset = data.ReadUInt32(); - informationBlock.ImportedModulesNameTableOffset = data.ReadUInt32(); - informationBlock.ImportedModulesCount = data.ReadUInt32(); - informationBlock.ImportProcedureNameTableOffset = data.ReadUInt32(); - informationBlock.PerPageChecksumTableOffset = data.ReadUInt32(); - informationBlock.DataPagesOffset = data.ReadUInt32(); - informationBlock.PreloadPageCount = data.ReadUInt32(); - informationBlock.NonResidentNamesTableOffset = data.ReadUInt32(); - informationBlock.NonResidentNamesTableLength = data.ReadUInt32(); - informationBlock.NonResidentNamesTableChecksum = data.ReadUInt32(); - informationBlock.AutomaticDataObject = data.ReadUInt32(); - informationBlock.DebugInformationOffset = data.ReadUInt32(); - informationBlock.DebugInformationLength = data.ReadUInt32(); - informationBlock.PreloadInstancePagesNumber = data.ReadUInt32(); - informationBlock.DemandInstancePagesNumber = data.ReadUInt32(); - informationBlock.ExtraHeapAllocation = data.ReadUInt32(); - - return informationBlock; - } - - /// - /// Parse a Stream into an object table entry - /// - /// Stream to parse - /// Filled object table entry on success, null on error - private static ObjectTableEntry ParseObjectTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new ObjectTableEntry(); - - entry.VirtualSegmentSize = data.ReadUInt32(); - entry.RelocationBaseAddress = data.ReadUInt32(); - entry.ObjectFlags = (ObjectFlags)data.ReadUInt16(); - entry.PageTableIndex = data.ReadUInt32(); - entry.PageTableEntries = data.ReadUInt32(); - entry.Reserved = data.ReadUInt32(); - - return entry; - } - - /// - /// Parse a Stream into an object page map entry - /// - /// Stream to parse - /// Filled object page map entry on success, null on error - private static ObjectPageMapEntry ParseObjectPageMapEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new ObjectPageMapEntry(); - - entry.PageDataOffset = data.ReadUInt32(); - entry.DataSize = data.ReadUInt16(); - entry.Flags = (ObjectPageFlags)data.ReadUInt16(); - - return entry; - } - - /// - /// Parse a Stream into a resource table entry - /// - /// Stream to parse - /// Filled resource table entry on success, null on error - private static ResourceTableEntry ParseResourceTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new ResourceTableEntry(); - - entry.TypeID = (ResourceTableEntryType)data.ReadUInt32(); - entry.NameID = data.ReadUInt16(); - entry.ResourceSize = data.ReadUInt32(); - entry.ObjectNumber = data.ReadUInt16(); - entry.Offset = data.ReadUInt32(); - - return entry; - } - - /// - /// Parse a Stream into a resident names table entry - /// - /// Stream to parse - /// Filled resident names table entry on success, null on error - private static ResidentNamesTableEntry ParseResidentNamesTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new ResidentNamesTableEntry(); - - entry.Length = data.ReadByteValue(); - if (entry.Length > 0) - { - byte[] name = data.ReadBytes(entry.Length); - entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0'); - } - entry.OrdinalNumber = data.ReadUInt16(); - - return entry; - } - - /// - /// Parse a Stream into an entry table bundle - /// - /// Stream to parse - /// Filled entry table bundle on success, null on error - private static EntryTableBundle ParseEntryTableBundle(Stream data) - { - // TODO: Use marshalling here instead of building - var bundle = new EntryTableBundle(); - - bundle.Entries = data.ReadByteValue(); - if (bundle.Entries == 0) - return bundle; - - bundle.BundleType = (BundleType)data.ReadByteValue(); - bundle.TableEntries = new EntryTableEntry[bundle.Entries]; - for (int i = 0; i < bundle.Entries; i++) - { - var entry = new EntryTableEntry(); - - switch (bundle.BundleType & ~BundleType.ParameterTypingInformationPresent) - { - case BundleType.UnusedEntry: - // Empty entry with no information - break; - - case BundleType.SixteenBitEntry: - entry.SixteenBitObjectNumber = data.ReadUInt16(); - entry.SixteenBitEntryFlags = (EntryFlags)data.ReadByteValue(); - entry.SixteenBitOffset = data.ReadUInt16(); - break; - - case BundleType.TwoEightySixCallGateEntry: - entry.TwoEightySixObjectNumber = data.ReadUInt16(); - entry.TwoEightySixEntryFlags = (EntryFlags)data.ReadByteValue(); - entry.TwoEightySixOffset = data.ReadUInt16(); - entry.TwoEightySixCallgate = data.ReadUInt16(); - break; - - case BundleType.ThirtyTwoBitEntry: - entry.ThirtyTwoBitObjectNumber = data.ReadUInt16(); - entry.ThirtyTwoBitEntryFlags = (EntryFlags)data.ReadByteValue(); - entry.ThirtyTwoBitOffset = data.ReadUInt32(); - break; - - case BundleType.ForwarderEntry: - entry.ForwarderReserved = data.ReadUInt16(); - entry.ForwarderFlags = (ForwarderFlags)data.ReadByteValue(); - entry.ForwarderModuleOrdinalNumber = data.ReadUInt16(); - entry.ProcedureNameOffset = data.ReadUInt32(); - entry.ImportOrdinalNumber = data.ReadUInt32(); - break; - - default: - return null; - } - - bundle.TableEntries[i] = entry; - } - - return bundle; - } - - /// - /// Parse a Stream into a module format directives table entry - /// - /// Stream to parse - /// Filled module format directives table entry on success, null on error - private static ModuleFormatDirectivesTableEntry ParseModuleFormatDirectivesTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new ModuleFormatDirectivesTableEntry(); - - entry.DirectiveNumber = (DirectiveNumber)data.ReadUInt16(); - entry.DirectiveDataLength = data.ReadUInt16(); - entry.DirectiveDataOffset = data.ReadUInt32(); - - return entry; - } - - /// - /// Parse a Stream into a verify record directive table entry - /// - /// Stream to parse - /// Filled verify record directive table entry on success, null on error - private static VerifyRecordDirectiveTableEntry ParseVerifyRecordDirectiveTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new VerifyRecordDirectiveTableEntry(); - - entry.EntryCount = data.ReadUInt16(); - entry.OrdinalIndex = data.ReadUInt16(); - entry.Version = data.ReadUInt16(); - entry.ObjectEntriesCount = data.ReadUInt16(); - entry.ObjectNumberInModule = data.ReadUInt16(); - entry.ObjectLoadBaseAddress = data.ReadUInt16(); - entry.ObjectVirtualAddressSize = data.ReadUInt16(); - - return entry; - } - - /// - /// Parse a Stream into a fix-up page table entry - /// - /// Stream to parse - /// Filled fix-up page table entry on success, null on error - private static FixupPageTableEntry ParseFixupPageTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new FixupPageTableEntry(); - - entry.Offset = data.ReadUInt32(); - - return entry; - } - - /// - /// Parse a Stream into a fix-up record table entry - /// - /// Stream to parse - /// Filled fix-up record table entry on success, null on error - private static FixupRecordTableEntry ParseFixupRecordTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new FixupRecordTableEntry(); - - entry.SourceType = (FixupRecordSourceType)data.ReadByteValue(); - entry.TargetFlags = (FixupRecordTargetFlags)data.ReadByteValue(); - - // Source list flag - if (entry.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag)) - entry.SourceOffsetListCount = data.ReadByteValue(); - else - entry.SourceOffset = data.ReadUInt16(); - - // OBJECT / TRGOFF - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReference)) - { - // 16-bit Object Number/Module Ordinal Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag)) - entry.TargetObjectNumberWORD = data.ReadUInt16(); - else - entry.TargetObjectNumberByte = data.ReadByteValue(); - - // 16-bit Selector fixup - if (!entry.SourceType.HasFlag(FixupRecordSourceType.SixteenBitSelectorFixup)) - { - // 32-bit Target Offset Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag)) - entry.TargetOffsetDWORD = data.ReadUInt32(); - else - entry.TargetOffsetWORD = data.ReadUInt16(); - } - } - - // MOD ORD# / IMPORT ORD / ADDITIVE - else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByOrdinal)) - { - // 16-bit Object Number/Module Ordinal Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag)) - entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16(); - else - entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue(); - - // 8-bit Ordinal Flag & 32-bit Target Offset Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.EightBitOrdinalFlag)) - entry.ImportedOrdinalNumberByte = data.ReadByteValue(); - else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag)) - entry.ImportedOrdinalNumberDWORD = data.ReadUInt32(); - else - entry.ImportedOrdinalNumberWORD = data.ReadUInt16(); - - // Additive Fixup Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag)) - { - // 32-bit Additive Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag)) - entry.AdditiveFixupValueDWORD = data.ReadUInt32(); - else - entry.AdditiveFixupValueWORD = data.ReadUInt16(); - } - } - - // MOD ORD# / PROCEDURE NAME OFFSET / ADDITIVE - else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByName)) - { - // 16-bit Object Number/Module Ordinal Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag)) - entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16(); - else - entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue(); - - // 32-bit Target Offset Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag)) - entry.OffsetImportProcedureNameTableDWORD = data.ReadUInt32(); - else - entry.OffsetImportProcedureNameTableWORD = data.ReadUInt16(); - - // Additive Fixup Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag)) - { - // 32-bit Additive Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag)) - entry.AdditiveFixupValueDWORD = data.ReadUInt32(); - else - entry.AdditiveFixupValueWORD = data.ReadUInt16(); - } - } - - // ORD # / ADDITIVE - else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReferenceViaEntryTable)) - { - // 16-bit Object Number/Module Ordinal Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag)) - entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16(); - else - entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue(); - - // Additive Fixup Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag)) - { - // 32-bit Additive Flag - if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag)) - entry.AdditiveFixupValueDWORD = data.ReadUInt32(); - else - entry.AdditiveFixupValueWORD = data.ReadUInt16(); - } - } - - // No other top-level flags recognized - else - { - return null; - } - - #region SCROFFn - - if (entry.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag)) - { - entry.SourceOffsetList = new ushort[entry.SourceOffsetListCount]; - for (int i = 0; i < entry.SourceOffsetList.Length; i++) - { - entry.SourceOffsetList[i] = data.ReadUInt16(); - } - } - - #endregion - - return entry; - } - - /// - /// Parse a Stream into a import module name table entry - /// - /// Stream to parse - /// Filled import module name table entry on success, null on error - private static ImportModuleNameTableEntry ParseImportModuleNameTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new ImportModuleNameTableEntry(); - - entry.Length = data.ReadByteValue(); - if (entry.Length > 0) - { - byte[] name = data.ReadBytes(entry.Length); - entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0'); - } - - return entry; - } - - /// - /// Parse a Stream into a import module name table entry - /// - /// Stream to parse - /// Filled import module name table entry on success, null on error - private static ImportModuleProcedureNameTableEntry ParseImportModuleProcedureNameTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new ImportModuleProcedureNameTableEntry(); - - entry.Length = data.ReadByteValue(); - if (entry.Length > 0) - { - byte[] name = data.ReadBytes(entry.Length); - entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0'); - } - - return entry; - } - - /// - /// Parse a Stream into a per-page checksum table entry - /// - /// Stream to parse - /// Filled per-page checksum table entry on success, null on error - private static PerPageChecksumTableEntry ParsePerPageChecksumTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new PerPageChecksumTableEntry(); - - entry.Checksum = data.ReadUInt32(); - - return entry; - } - - /// - /// Parse a Stream into a non-resident names table entry - /// - /// Stream to parse - /// Filled non-resident names table entry on success, null on error - private static NonResidentNamesTableEntry ParseNonResidentNameTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - var entry = new NonResidentNamesTableEntry(); - - entry.Length = data.ReadByteValue(); - if (entry.Length > 0) - { - byte[] name = data.ReadBytes(entry.Length); - entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0'); - } - entry.OrdinalNumber = data.ReadUInt16(); - - return entry; - } - - /// - /// Parse a Stream into a debug information - /// - /// Stream to parse - /// Total size of the debug information - /// Filled debug information on success, null on error - private static DebugInformation ParseDebugInformation(Stream data, long size) - { - // TODO: Use marshalling here instead of building - var debugInformation = new DebugInformation(); - - byte[] signature = data.ReadBytes(3); - debugInformation.Signature = Encoding.ASCII.GetString(signature); - if (debugInformation.Signature != DebugInformationSignatureString) - return null; - - debugInformation.FormatType = (DebugFormatType)data.ReadByteValue(); - debugInformation.DebuggerData = data.ReadBytes((int)(size - 4)); - - return debugInformation; - } - - #endregion - } -} \ No newline at end of file diff --git a/BinaryObjectScanner.Builders/MSDOS.cs b/BinaryObjectScanner.Builders/MSDOS.cs deleted file mode 100644 index c86ba05b..00000000 --- a/BinaryObjectScanner.Builders/MSDOS.cs +++ /dev/null @@ -1,175 +0,0 @@ -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.MSDOS; -using static SabreTools.Models.MSDOS.Constants; - -namespace BinaryObjectScanner.Builders -{ - public static class MSDOS - { - #region Byte Data - - /// - /// Parse a byte array into an MS-DOS executable - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled executable on success, null on error - public static Executable ParseExecutable(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseExecutable(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into an MS-DOS executable - /// - /// Stream to parse - /// Filled executable on success, null on error - public static Executable ParseExecutable(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new executable to fill - var executable = new Executable(); - - #region Executable Header - - // Try to parse the executable header - var executableHeader = ParseExecutableHeader(data); - if (executableHeader == null) - return null; - - // Set the executable header - executable.Header = executableHeader; - - #endregion - - #region Relocation Table - - // If the offset for the relocation table doesn't exist - int tableAddress = initialOffset + executableHeader.RelocationTableAddr; - if (tableAddress >= data.Length) - return executable; - - // Try to parse the relocation table - data.Seek(tableAddress, SeekOrigin.Begin); - var relocationTable = ParseRelocationTable(data, executableHeader.RelocationItems); - if (relocationTable == null) - return null; - - // Set the relocation table - executable.RelocationTable = relocationTable; - - #endregion - - // Return the executable - return executable; - } - - /// - /// Parse a Stream into an MS-DOS executable header - /// - /// Stream to parse - /// Filled executable header on success, null on error - private static ExecutableHeader ParseExecutableHeader(Stream data) - { - // TODO: Use marshalling here instead of building - var header = new ExecutableHeader(); - - #region Standard Fields - - byte[] magic = data.ReadBytes(2); - header.Magic = Encoding.ASCII.GetString(magic); - if (header.Magic != SignatureString) - return null; - - header.LastPageBytes = data.ReadUInt16(); - header.Pages = data.ReadUInt16(); - header.RelocationItems = data.ReadUInt16(); - header.HeaderParagraphSize = data.ReadUInt16(); - header.MinimumExtraParagraphs = data.ReadUInt16(); - header.MaximumExtraParagraphs = data.ReadUInt16(); - header.InitialSSValue = data.ReadUInt16(); - header.InitialSPValue = data.ReadUInt16(); - header.Checksum = data.ReadUInt16(); - header.InitialIPValue = data.ReadUInt16(); - header.InitialCSValue = data.ReadUInt16(); - header.RelocationTableAddr = data.ReadUInt16(); - header.OverlayNumber = data.ReadUInt16(); - - #endregion - - // If we don't have enough data for PE extensions - if (data.Position >= data.Length || data.Length - data.Position < 36) - return header; - - #region PE Extensions - - header.Reserved1 = new ushort[4]; - for (int i = 0; i < header.Reserved1.Length; i++) - { - header.Reserved1[i] = data.ReadUInt16(); - } - header.OEMIdentifier = data.ReadUInt16(); - header.OEMInformation = data.ReadUInt16(); - header.Reserved2 = new ushort[10]; - for (int i = 0; i < header.Reserved2.Length; i++) - { - header.Reserved2[i] = data.ReadUInt16(); - } - header.NewExeHeaderAddr = data.ReadUInt32(); - - #endregion - - return header; - } - - /// - /// Parse a Stream into a relocation table - /// - /// Stream to parse - /// Number of relocation table entries to read - /// Filled relocation table on success, null on error - private static RelocationEntry[] ParseRelocationTable(Stream data, int count) - { - // TODO: Use marshalling here instead of building - var relocationTable = new RelocationEntry[count]; - - for (int i = 0; i < count; i++) - { - var entry = new RelocationEntry(); - entry.Offset = data.ReadUInt16(); - entry.Segment = data.ReadUInt16(); - relocationTable[i] = entry; - } - - return relocationTable; - } - - #endregion - } -} \ No newline at end of file diff --git a/BinaryObjectScanner.Builders/MicrosoftCabinet.cs b/BinaryObjectScanner.Builders/MicrosoftCabinet.cs deleted file mode 100644 index 5d08e952..00000000 --- a/BinaryObjectScanner.Builders/MicrosoftCabinet.cs +++ /dev/null @@ -1,258 +0,0 @@ -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.MicrosoftCabinet; -using static SabreTools.Models.MicrosoftCabinet.Constants; - -namespace BinaryObjectScanner.Builders -{ - // TODO: Add multi-cabinet reading - public class MicrosoftCabinet - { - #region Byte Data - - /// - /// Parse a byte array into a Microsoft Cabinet file - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled cabinet on success, null on error - public static Cabinet ParseCabinet(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseCabinet(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Microsoft Cabinet file - /// - /// Stream to parse - /// Filled cabinet on success, null on error - public static Cabinet ParseCabinet(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new cabinet to fill - var cabinet = new Cabinet(); - - #region Cabinet Header - - // Try to parse the cabinet header - var cabinetHeader = ParseCabinetHeader(data); - if (cabinetHeader == null) - return null; - - // Set the cabinet header - cabinet.Header = cabinetHeader; - - #endregion - - #region Folders - - // Set the folder array - cabinet.Folders = new CFFOLDER[cabinetHeader.FolderCount]; - - // Try to parse each folder, if we have any - for (int i = 0; i < cabinetHeader.FolderCount; i++) - { - var folder = ParseFolder(data, cabinetHeader); - if (folder == null) - return null; - - // Set the folder - cabinet.Folders[i] = folder; - } - - #endregion - - #region Files - - // Get the files offset - int filesOffset = (int)cabinetHeader.FilesOffset + initialOffset; - if (filesOffset > data.Length) - return null; - - // Seek to the offset - data.Seek(filesOffset, SeekOrigin.Begin); - - // Set the file array - cabinet.Files = new CFFILE[cabinetHeader.FileCount]; - - // Try to parse each file, if we have any - for (int i = 0; i < cabinetHeader.FileCount; i++) - { - var file = ParseFile(data); - if (file == null) - return null; - - // Set the file - cabinet.Files[i] = file; - } - - #endregion - - return cabinet; - } - - /// - /// Parse a Stream into a cabinet header - /// - /// Stream to parse - /// Filled cabinet header on success, null on error - private static CFHEADER ParseCabinetHeader(Stream data) - { - CFHEADER header = new CFHEADER(); - - byte[] signature = data.ReadBytes(4); - header.Signature = Encoding.ASCII.GetString(signature); - if (header.Signature != SignatureString) - return null; - - header.Reserved1 = data.ReadUInt32(); - header.CabinetSize = data.ReadUInt32(); - header.Reserved2 = data.ReadUInt32(); - header.FilesOffset = data.ReadUInt32(); - header.Reserved3 = data.ReadUInt32(); - header.VersionMinor = data.ReadByteValue(); - header.VersionMajor = data.ReadByteValue(); - header.FolderCount = data.ReadUInt16(); - header.FileCount = data.ReadUInt16(); - header.Flags = (HeaderFlags)data.ReadUInt16(); - header.SetID = data.ReadUInt16(); - header.CabinetIndex = data.ReadUInt16(); - - if (header.Flags.HasFlag(HeaderFlags.RESERVE_PRESENT)) - { - header.HeaderReservedSize = data.ReadUInt16(); - if (header.HeaderReservedSize > 60_000) - return null; - - header.FolderReservedSize = data.ReadByteValue(); - header.DataReservedSize = data.ReadByteValue(); - - if (header.HeaderReservedSize > 0) - header.ReservedData = data.ReadBytes(header.HeaderReservedSize); - } - - if (header.Flags.HasFlag(HeaderFlags.PREV_CABINET)) - { - header.CabinetPrev = data.ReadString(Encoding.ASCII); - header.DiskPrev = data.ReadString(Encoding.ASCII); - } - - if (header.Flags.HasFlag(HeaderFlags.NEXT_CABINET)) - { - header.CabinetNext = data.ReadString(Encoding.ASCII); - header.DiskNext = data.ReadString(Encoding.ASCII); - } - - return header; - } - - /// - /// Parse a Stream into a folder - /// - /// Stream to parse - /// Cabinet header to get flags and sizes from - /// Filled folder on success, null on error - private static CFFOLDER ParseFolder(Stream data, CFHEADER header) - { - CFFOLDER folder = new CFFOLDER(); - - folder.CabStartOffset = data.ReadUInt32(); - folder.DataCount = data.ReadUInt16(); - folder.CompressionType = (CompressionType)data.ReadUInt16(); - - if (header.FolderReservedSize > 0) - folder.ReservedData = data.ReadBytes(header.FolderReservedSize); - - if (folder.CabStartOffset > 0) - { - long currentPosition = data.Position; - data.Seek(folder.CabStartOffset, SeekOrigin.Begin); - - folder.DataBlocks = new CFDATA[folder.DataCount]; - for (int i = 0; i < folder.DataCount; i++) - { - CFDATA dataBlock = ParseDataBlock(data, header.DataReservedSize); - folder.DataBlocks[i] = dataBlock; - } - - data.Seek(currentPosition, SeekOrigin.Begin); - } - - return folder; - } - - /// - /// Parse a Stream into a data block - /// - /// Stream to parse - /// Reserved byte size for data blocks - /// Filled folder on success, null on error - private static CFDATA ParseDataBlock(Stream data, byte dataReservedSize) - { - CFDATA dataBlock = new CFDATA(); - - dataBlock.Checksum = data.ReadUInt32(); - dataBlock.CompressedSize = data.ReadUInt16(); - dataBlock.UncompressedSize = data.ReadUInt16(); - - if (dataReservedSize > 0) - dataBlock.ReservedData = data.ReadBytes(dataReservedSize); - - if (dataBlock.CompressedSize > 0) - dataBlock.CompressedData = data.ReadBytes(dataBlock.CompressedSize); - - return dataBlock; - } - - /// - /// Parse a Stream into a file - /// - /// Stream to parse - /// Filled file on success, null on error - private static CFFILE ParseFile(Stream data) - { - CFFILE file = new CFFILE(); - - file.FileSize = data.ReadUInt32(); - file.FolderStartOffset = data.ReadUInt32(); - file.FolderIndex = (FolderIndex)data.ReadUInt16(); - file.Date = data.ReadUInt16(); - file.Time = data.ReadUInt16(); - file.Attributes = (SabreTools.Models.MicrosoftCabinet.FileAttributes)data.ReadUInt16(); - - if (file.Attributes.HasFlag(SabreTools.Models.MicrosoftCabinet.FileAttributes.NAME_IS_UTF)) - file.Name = data.ReadString(Encoding.Unicode); - else - file.Name = data.ReadString(Encoding.ASCII); - - return file; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/MoPaQ.cs b/BinaryObjectScanner.Builders/MoPaQ.cs deleted file mode 100644 index 6ebb9f59..00000000 --- a/BinaryObjectScanner.Builders/MoPaQ.cs +++ /dev/null @@ -1,651 +0,0 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.MoPaQ; -using static SabreTools.Models.MoPaQ.Constants; - -namespace BinaryObjectScanner.Builders -{ - public class MoPaQ - { - #region Byte Data - - /// - /// Parse a byte array into a MoPaQ archive - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled archive on success, null on error - public static Archive ParseArchive(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseArchive(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a MoPaQ archive - /// - /// Stream to parse - /// Filled archive on success, null on error - public static Archive ParseArchive(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new archive to fill - var archive = new Archive(); - - #region User Data - - // Check for User Data - uint possibleSignature = data.ReadUInt32(); - data.Seek(-4, SeekOrigin.Current); - if (possibleSignature == 0x1B51504D) - { - // Save the current position for offset correction - long basePtr = data.Position; - - // Deserialize the user data, returning null if invalid - var userData = ParseUserData(data); - if (userData == null) - return null; - - // Set the user data - archive.UserData = userData; - - // Set the starting position according to the header offset - data.Seek(basePtr + (int)archive.UserData.HeaderOffset, SeekOrigin.Begin); - } - - #endregion - - #region Archive Header - - // Check for the Header - possibleSignature = data.ReadUInt32(); - data.Seek(-4, SeekOrigin.Current); - if (possibleSignature == 0x1A51504D) - { - // Try to parse the archive header - var archiveHeader = ParseArchiveHeader(data); - if (archiveHeader == null) - return null; - - // Set the archive header - archive.ArchiveHeader = archiveHeader; - } - else - { - return null; - } - - #endregion - - #region Hash Table - - // TODO: The hash table has to be be decrypted before reading - - // Version 1 - if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format1) - { - // If we have a hash table - long hashTableOffset = archive.ArchiveHeader.HashTablePosition; - if (hashTableOffset != 0) - { - // Seek to the offset - data.Seek(hashTableOffset, SeekOrigin.Begin); - - // Find the ending offset based on size - long hashTableEnd = hashTableOffset + archive.ArchiveHeader.HashTableSize; - - // Read in the hash table - var hashTable = new List(); - - while (data.Position < hashTableEnd) - { - var hashEntry = ParseHashEntry(data); - if (hashEntry == null) - return null; - - hashTable.Add(hashEntry); - } - - archive.HashTable = hashTable.ToArray(); - } - } - - // Version 2 and 3 - else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format2 || archive.ArchiveHeader.FormatVersion == FormatVersion.Format3) - { - // If we have a hash table - long hashTableOffset = ((uint)archive.ArchiveHeader.HashTablePositionHi << 23) | archive.ArchiveHeader.HashTablePosition; - if (hashTableOffset != 0) - { - // Seek to the offset - data.Seek(hashTableOffset, SeekOrigin.Begin); - - // Find the ending offset based on size - long hashTableEnd = hashTableOffset + archive.ArchiveHeader.HashTableSize; - - // Read in the hash table - var hashTable = new List(); - - while (data.Position < hashTableEnd) - { - var hashEntry = ParseHashEntry(data); - if (hashEntry == null) - return null; - - hashTable.Add(hashEntry); - } - - archive.HashTable = hashTable.ToArray(); - } - } - - // Version 4 - else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format4) - { - // If we have a hash table - long hashTableOffset = ((uint)archive.ArchiveHeader.HashTablePositionHi << 23) | archive.ArchiveHeader.HashTablePosition; - if (hashTableOffset != 0) - { - // Seek to the offset - data.Seek(hashTableOffset, SeekOrigin.Begin); - - // Find the ending offset based on size - long hashTableEnd = hashTableOffset + (long)archive.ArchiveHeader.HashTableSizeLong; - - // Read in the hash table - var hashTable = new List(); - - while (data.Position < hashTableEnd) - { - var hashEntry = ParseHashEntry(data); - if (hashEntry == null) - return null; - - hashTable.Add(hashEntry); - } - - archive.HashTable = hashTable.ToArray(); - } - } - - #endregion - - #region Block Table - - // Version 1 - if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format1) - { - // If we have a block table - long blockTableOffset = archive.ArchiveHeader.BlockTablePosition; - if (blockTableOffset != 0) - { - // Seek to the offset - data.Seek(blockTableOffset, SeekOrigin.Begin); - - // Find the ending offset based on size - long blockTableEnd = blockTableOffset + archive.ArchiveHeader.BlockTableSize; - - // Read in the block table - var blockTable = new List(); - - while (data.Position < blockTableEnd) - { - var blockEntry = ParseBlockEntry(data); - if (blockEntry == null) - return null; - - blockTable.Add(blockEntry); - } - - archive.BlockTable = blockTable.ToArray(); - } - } - - // Version 2 and 3 - else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format2 || archive.ArchiveHeader.FormatVersion == FormatVersion.Format3) - { - // If we have a block table - long blockTableOffset = ((uint)archive.ArchiveHeader.BlockTablePositionHi << 23) | archive.ArchiveHeader.BlockTablePosition; - if (blockTableOffset != 0) - { - // Seek to the offset - data.Seek(blockTableOffset, SeekOrigin.Begin); - - // Find the ending offset based on size - long blockTableEnd = blockTableOffset + archive.ArchiveHeader.BlockTableSize; - - // Read in the block table - var blockTable = new List(); - - while (data.Position < blockTableEnd) - { - var blockEntry = ParseBlockEntry(data); - if (blockEntry == null) - return null; - - blockTable.Add(blockEntry); - } - - archive.BlockTable = blockTable.ToArray(); - } - } - - // Version 4 - else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format4) - { - // If we have a block table - long blockTableOffset = ((uint)archive.ArchiveHeader.BlockTablePositionHi << 23) | archive.ArchiveHeader.BlockTablePosition; - if (blockTableOffset != 0) - { - // Seek to the offset - data.Seek(blockTableOffset, SeekOrigin.Begin); - - // Find the ending offset based on size - long blockTableEnd = blockTableOffset + (long)archive.ArchiveHeader.BlockTableSizeLong; - - // Read in the block table - var blockTable = new List(); - - while (data.Position < blockTableEnd) - { - var blockEntry = ParseBlockEntry(data); - if (blockEntry == null) - return null; - - blockTable.Add(blockEntry); - } - - archive.BlockTable = blockTable.ToArray(); - } - } - - #endregion - - #region Hi-Block Table - - // Version 2, 3, and 4 - if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format2) - { - // If we have a hi-block table - long hiBlockTableOffset = (long)archive.ArchiveHeader.HiBlockTablePosition; - if (hiBlockTableOffset != 0) - { - // Seek to the offset - data.Seek(hiBlockTableOffset, SeekOrigin.Begin); - - // Read in the hi-block table - var hiBlockTable = new List(); - - for (int i = 0; i < archive.BlockTable.Length; i++) - { - short hiBlockEntry = data.ReadInt16(); - hiBlockTable.Add(hiBlockEntry); - } - - archive.HiBlockTable = hiBlockTable.ToArray(); - } - } - - #endregion - - #region BET Table - - // Version 3 and 4 - if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format3) - { - // If we have a BET table - long betTableOffset = (long)archive.ArchiveHeader.BetTablePosition; - if (betTableOffset != 0) - { - // Seek to the offset - data.Seek(betTableOffset, SeekOrigin.Begin); - - // Read in the BET table - var betTable = ParseBetTable(data); - if (betTable != null) - return null; - - archive.BetTable = betTable; - } - } - - #endregion - - #region HET Table - - // Version 3 and 4 - if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format3) - { - // If we have a HET table - long hetTableOffset = (long)archive.ArchiveHeader.HetTablePosition; - if (hetTableOffset != 0) - { - // Seek to the offset - data.Seek(hetTableOffset, SeekOrigin.Begin); - - // Read in the HET table - var hetTable = ParseHetTable(data); - if (hetTable != null) - return null; - - archive.HetTable = hetTable; - } - } - - #endregion - - return archive; - } - - /// - /// Parse a Stream into a archive header - /// - /// Stream to parse - /// Filled archive header on success, null on error - private static ArchiveHeader ParseArchiveHeader(Stream data) - { - ArchiveHeader archiveHeader = new ArchiveHeader(); - - // V1 - Common - byte[] signature = data.ReadBytes(4); - archiveHeader.Signature = Encoding.ASCII.GetString(signature); - if (archiveHeader.Signature != ArchiveHeaderSignatureString) - return null; - - archiveHeader.HeaderSize = data.ReadUInt32(); - archiveHeader.ArchiveSize = data.ReadUInt32(); - archiveHeader.FormatVersion = (FormatVersion)data.ReadUInt16(); - archiveHeader.BlockSize = data.ReadUInt16(); - archiveHeader.HashTablePosition = data.ReadUInt32(); - archiveHeader.BlockTablePosition = data.ReadUInt32(); - archiveHeader.HashTableSize = data.ReadUInt32(); - archiveHeader.BlockTableSize = data.ReadUInt32(); - - // V2 - if (archiveHeader.FormatVersion >= FormatVersion.Format2) - { - archiveHeader.HiBlockTablePosition = data.ReadUInt64(); - archiveHeader.HashTablePositionHi = data.ReadUInt16(); - archiveHeader.BlockTablePositionHi = data.ReadUInt16(); - } - - // V3 - if (archiveHeader.FormatVersion >= FormatVersion.Format3) - { - archiveHeader.ArchiveSizeLong = data.ReadUInt64(); - archiveHeader.BetTablePosition = data.ReadUInt64(); - archiveHeader.HetTablePosition = data.ReadUInt64(); - } - - // V4 - if (archiveHeader.FormatVersion >= FormatVersion.Format4) - { - archiveHeader.HashTableSizeLong = data.ReadUInt64(); - archiveHeader.BlockTableSizeLong = data.ReadUInt64(); - archiveHeader.HiBlockTableSize = data.ReadUInt64(); - archiveHeader.HetTableSize = data.ReadUInt64(); - archiveHeader.BetTablesize = data.ReadUInt64(); - archiveHeader.RawChunkSize = data.ReadUInt32(); - - archiveHeader.BlockTableMD5 = data.ReadBytes(0x10); - archiveHeader.HashTableMD5 = data.ReadBytes(0x10); - archiveHeader.HiBlockTableMD5 = data.ReadBytes(0x10); - archiveHeader.BetTableMD5 = data.ReadBytes(0x10); - archiveHeader.HetTableMD5 = data.ReadBytes(0x10); - archiveHeader.HetTableMD5 = data.ReadBytes(0x10); - } - - return archiveHeader; - } - - /// - /// Parse a Stream into a user data object - /// - /// Stream to parse - /// Filled user data on success, null on error - private static UserData ParseUserData(Stream data) - { - UserData userData = new UserData(); - - byte[] signature = data.ReadBytes(4); - userData.Signature = Encoding.ASCII.GetString(signature); - if (userData.Signature != UserDataSignatureString) - return null; - - userData.UserDataSize = data.ReadUInt32(); - userData.HeaderOffset = data.ReadUInt32(); - userData.UserDataHeaderSize = data.ReadUInt32(); - - return userData; - } - - /// - /// Parse a Stream into a HET table - /// - /// Stream to parse - /// Filled HET table on success, null on error - private static HetTable ParseHetTable(Stream data) - { - HetTable hetTable = new HetTable(); - - // Common Headers - byte[] signature = data.ReadBytes(4); - hetTable.Signature = Encoding.ASCII.GetString(signature); - if (hetTable.Signature != HetTableSignatureString) - return null; - - hetTable.Version = data.ReadUInt32(); - hetTable.DataSize = data.ReadUInt32(); - - // HET-Specific - hetTable.TableSize = data.ReadUInt32(); - hetTable.MaxFileCount = data.ReadUInt32(); - hetTable.HashTableSize = data.ReadUInt32(); - hetTable.TotalIndexSize = data.ReadUInt32(); - hetTable.IndexSizeExtra = data.ReadUInt32(); - hetTable.IndexSize = data.ReadUInt32(); - hetTable.BlockTableSize = data.ReadUInt32(); - hetTable.HashTable = data.ReadBytes((int)hetTable.HashTableSize); - - // TODO: Populate the file indexes array - hetTable.FileIndexes = new byte[(int)hetTable.HashTableSize][]; - - return hetTable; - } - - /// - /// Parse a Stream into a BET table - /// - /// Stream to parse - /// Filled BET table on success, null on error - private static BetTable ParseBetTable(Stream data) - { - BetTable betTable = new BetTable(); - - // Common Headers - byte[] signature = data.ReadBytes(4); - betTable.Signature = Encoding.ASCII.GetString(signature); - if (betTable.Signature != BetTableSignatureString) - return null; - - betTable.Version = data.ReadUInt32(); - betTable.DataSize = data.ReadUInt32(); - - // BET-Specific - betTable.TableSize = data.ReadUInt32(); - betTable.FileCount = data.ReadUInt32(); - betTable.Unknown = data.ReadUInt32(); - betTable.TableEntrySize = data.ReadUInt32(); - - betTable.FilePositionBitIndex = data.ReadUInt32(); - betTable.FileSizeBitIndex = data.ReadUInt32(); - betTable.CompressedSizeBitIndex = data.ReadUInt32(); - betTable.FlagIndexBitIndex = data.ReadUInt32(); - betTable.UnknownBitIndex = data.ReadUInt32(); - - betTable.FilePositionBitCount = data.ReadUInt32(); - betTable.FileSizeBitCount = data.ReadUInt32(); - betTable.CompressedSizeBitCount = data.ReadUInt32(); - betTable.FlagIndexBitCount = data.ReadUInt32(); - betTable.UnknownBitCount = data.ReadUInt32(); - - betTable.TotalBetHashSize = data.ReadUInt32(); - betTable.BetHashSizeExtra = data.ReadUInt32(); - betTable.BetHashSize = data.ReadUInt32(); - betTable.BetHashArraySize = data.ReadUInt32(); - betTable.FlagCount = data.ReadUInt32(); - - betTable.FlagsArray = new uint[betTable.FlagCount]; - byte[] flagsArray = data.ReadBytes((int)betTable.FlagCount * 4); - Buffer.BlockCopy(flagsArray, 0, betTable.FlagsArray, 0, (int)betTable.FlagCount * 4); - - // TODO: Populate the file table - // TODO: Populate the hash table - - return betTable; - } - - /// - /// Parse a Stream into a hash entry - /// - /// Stream to parse - /// Filled hash entry on success, null on error - private static HashEntry ParseHashEntry(Stream data) - { - // TODO: Use marshalling here instead of building - HashEntry hashEntry = new HashEntry(); - - hashEntry.NameHashPartA = data.ReadUInt32(); - hashEntry.NameHashPartB = data.ReadUInt32(); - hashEntry.Locale = (Locale)data.ReadUInt16(); - hashEntry.Platform = data.ReadUInt16(); - hashEntry.BlockIndex = data.ReadUInt32(); - - return hashEntry; - } - - /// - /// Parse a Stream into a block entry - /// - /// Stream to parse - /// Filled block entry on success, null on error - private static BlockEntry ParseBlockEntry(Stream data) - { - BlockEntry blockEntry = new BlockEntry(); - - blockEntry.FilePosition = data.ReadUInt32(); - blockEntry.CompressedSize = data.ReadUInt32(); - blockEntry.UncompressedSize = data.ReadUInt32(); - blockEntry.Flags = (FileFlags)data.ReadUInt32(); - - return blockEntry; - } - - /// - /// Parse a Stream into a patch info - /// - /// Stream to parse - /// Filled patch info on success, null on error - private static PatchInfo ParsePatchInfo(Stream data) - { - // TODO: Use marshalling here instead of building - PatchInfo patchInfo = new PatchInfo(); - - patchInfo.Length = data.ReadUInt32(); - patchInfo.Flags = data.ReadUInt32(); - patchInfo.DataSize = data.ReadUInt32(); - patchInfo.MD5 = data.ReadBytes(0x10); - - // TODO: Fill the sector offset table - - return patchInfo; - } - - #endregion - - #region Helpers - - /// - /// Buffer for encryption and decryption - /// - private uint[] _stormBuffer = new uint[STORM_BUFFER_SIZE]; - - /// - /// Prepare the encryption table - /// - private void PrepareCryptTable() - { - uint seed = 0x00100001; - for (uint index1 = 0; index1 < 0x100; index1++) - { - for (uint index2 = index1, i = 0; i < 5; i++, index2 += 0x100) - { - seed = (seed * 125 + 3) % 0x2AAAAB; - uint temp1 = (seed & 0xFFFF) << 0x10; - - seed = (seed * 125 + 3) % 0x2AAAAB; - uint temp2 = (seed & 0xFFFF); - - _stormBuffer[index2] = (temp1 | temp2); - } - } - } - - /// - /// Decrypt a single block of data - /// - private unsafe byte[] DecryptBlock(byte[] block, uint length, uint key) - { - uint seed = 0xEEEEEEEE; - - uint[] castBlock = new uint[length / 4]; - Buffer.BlockCopy(block, 0, castBlock, 0, (int)length); - int castBlockPtr = 0; - - // Round to uints - length >>= 2; - - while (length-- > 0) - { - seed += _stormBuffer[MPQ_HASH_KEY2_MIX + (key & 0xFF)]; - uint ch = castBlock[castBlockPtr] ^ (key + seed); - - key = ((~key << 0x15) + 0x11111111) | (key >> 0x0B); - seed = ch + seed + (seed << 5) + 3; - castBlock[castBlockPtr++] = ch; - } - - Buffer.BlockCopy(castBlock, 0, block, 0, (int)length); - return block; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/N3DS.cs b/BinaryObjectScanner.Builders/N3DS.cs deleted file mode 100644 index 8e399d4e..00000000 --- a/BinaryObjectScanner.Builders/N3DS.cs +++ /dev/null @@ -1,1224 +0,0 @@ -using System; -using System.IO; -using System.Text; -using BinaryObjectScanner.Utilities; -using SabreTools.IO; -using SabreTools.Models.N3DS; -using static SabreTools.Models.N3DS.Constants; - -namespace BinaryObjectScanner.Builders -{ - public class N3DS - { - #region Byte Data - - /// - /// Parse a byte array into a 3DS cart image - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled cart image on success, null on error - public static Cart ParseCart(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseCart(dataStream); - } - - /// - /// Parse a byte array into a CIA archive - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled CIA archive on success, null on error - public static CIA ParseCIA(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseCIA(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a 3DS cart image - /// - /// Stream to parse - /// Filled cart image on success, null on error - public static Cart ParseCart(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new cart image to fill - var cart = new Cart(); - - #region NCSD Header - - // Try to parse the header - var header = ParseNCSDHeader(data); - if (header == null) - return null; - - // Set the cart image header - cart.Header = header; - - #endregion - - #region Card Info Header - - // Try to parse the card info header - var cardInfoHeader = ParseCardInfoHeader(data); - if (cardInfoHeader == null) - return null; - - // Set the card info header - cart.CardInfoHeader = cardInfoHeader; - - #endregion - - #region Development Card Info Header - - // Try to parse the development card info header - var developmentCardInfoHeader = ParseDevelopmentCardInfoHeader(data); - if (developmentCardInfoHeader == null) - return null; - - // Set the development card info header - cart.DevelopmentCardInfoHeader = developmentCardInfoHeader; - - #endregion - - #region Partitions - - // Create the partition table - cart.Partitions = new NCCHHeader[8]; - - // Iterate and build the partitions - for (int i = 0; i < 8; i++) - { - cart.Partitions[i] = ParseNCCHHeader(data); - } - - #endregion - - // Cache the media unit size for further use - long mediaUnitSize = (uint)(0x200 * Math.Pow(2, header.PartitionFlags[(int)NCSDFlags.MediaUnitSize])); - - #region Extended Headers - - // Create the extended header table - cart.ExtendedHeaders = new NCCHExtendedHeader[8]; - - // Iterate and build the extended headers - for (int i = 0; i < 8; i++) - { - // If we have an encrypted or invalid partition - if (cart.Partitions[i].MagicID != NCCHMagicNumber) - continue; - - // Get the extended header offset - long offset = (cart.Header.PartitionsTable[i].Offset * mediaUnitSize) + 0x200; - if (offset < 0 || offset >= data.Length) - continue; - - // Seek to the extended header - data.Seek(offset, SeekOrigin.Begin); - - // Parse the extended header - cart.ExtendedHeaders[i] = ParseNCCHExtendedHeader(data); - } - - #endregion - - #region ExeFS Headers - - // Create the ExeFS header table - cart.ExeFSHeaders = new ExeFSHeader[8]; - - // Iterate and build the ExeFS headers - for (int i = 0; i < 8; i++) - { - // If we have an encrypted or invalid partition - if (cart.Partitions[i].MagicID != NCCHMagicNumber) - continue; - - // Get the ExeFS header offset - long offset = (cart.Header.PartitionsTable[i].Offset + cart.Partitions[i].ExeFSOffsetInMediaUnits) * mediaUnitSize; - if (offset < 0 || offset >= data.Length) - continue; - - // Seek to the ExeFS header - data.Seek(offset, SeekOrigin.Begin); - - // Parse the ExeFS header - cart.ExeFSHeaders[i] = ParseExeFSHeader(data); - } - - #endregion - - #region RomFS Headers - - // Create the RomFS header table - cart.RomFSHeaders = new RomFSHeader[8]; - - // Iterate and build the RomFS headers - for (int i = 0; i < 8; i++) - { - // If we have an encrypted or invalid partition - if (cart.Partitions[i].MagicID != NCCHMagicNumber) - continue; - - // Get the RomFS header offset - long offset = (cart.Header.PartitionsTable[i].Offset + cart.Partitions[i].RomFSOffsetInMediaUnits) * mediaUnitSize; - if (offset < 0 || offset >= data.Length) - continue; - - // Seek to the RomFS header - data.Seek(offset, SeekOrigin.Begin); - - // Parse the RomFS header - cart.RomFSHeaders[i] = ParseRomFSHeader(data); - } - - #endregion - - return cart; - } - - /// - /// Parse a Stream into a CIA archive - /// - /// Stream to parse - /// Filled CIA archive on success, null on error - public static CIA ParseCIA(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new CIA archive to fill - var cia = new CIA(); - - #region CIA Header - - // Try to parse the header - var header = ParseCIAHeader(data); - if (header == null) - return null; - - // Set the CIA archive header - cia.Header = header; - - #endregion - - // Align to 64-byte boundary, if needed - while (data.Position < data.Length - 1 && data.Position % 64 != 0) - { - _ = data.ReadByteValue(); - } - - #region Certificate Chain - - // Create the certificate chain - cia.CertificateChain = new Certificate[3]; - - // Try to parse the certificates - for (int i = 0; i < 3; i++) - { - var certificate = ParseCertificate(data); - if (certificate == null) - return null; - - cia.CertificateChain[i] = certificate; - } - - #endregion - - // Align to 64-byte boundary, if needed - while (data.Position < data.Length - 1 && data.Position % 64 != 0) - { - _ = data.ReadByteValue(); - } - - #region Ticket - - // Try to parse the ticket - var ticket = ParseTicket(data); - if (ticket == null) - return null; - - // Set the ticket - cia.Ticket = ticket; - - #endregion - - // Align to 64-byte boundary, if needed - while (data.Position < data.Length - 1 && data.Position % 64 != 0) - { - _ = data.ReadByteValue(); - } - - #region Title Metadata - - // Try to parse the title metadata - var titleMetadata = ParseTitleMetadata(data); - if (titleMetadata == null) - return null; - - // Set the title metadata - cia.TMDFileData = titleMetadata; - - #endregion - - // Align to 64-byte boundary, if needed - while (data.Position < data.Length - 1 && data.Position % 64 != 0) - { - _ = data.ReadByteValue(); - } - - #region Content File Data - - // Create the partition table - cia.Partitions = new NCCHHeader[8]; - - // Iterate and build the partitions - for (int i = 0; i < 8; i++) - { - cia.Partitions[i] = ParseNCCHHeader(data); - } - - #endregion - - // Align to 64-byte boundary, if needed - while (data.Position < data.Length - 1 && data.Position % 64 != 0) - { - _ = data.ReadByteValue(); - } - - #region Meta Data - - // If we have a meta data - if (header.MetaSize > 0) - { - // Try to parse the meta - var meta = ParseMetaData(data); - if (meta == null) - return null; - - // Set the meta - cia.MetaData = meta; - } - - #endregion - - return cia; - } - - /// - /// Parse a Stream into an NCSD header - /// - /// Stream to parse - /// Filled NCSD header on success, null on error - private static NCSDHeader ParseNCSDHeader(Stream data) - { - // TODO: Use marshalling here instead of building - NCSDHeader header = new NCSDHeader(); - - header.RSA2048Signature = data.ReadBytes(0x100); - byte[] magicNumber = data.ReadBytes(4); - header.MagicNumber = Encoding.ASCII.GetString(magicNumber).TrimEnd('\0'); ; - if (header.MagicNumber != NCSDMagicNumber) - return null; - - header.ImageSizeInMediaUnits = data.ReadUInt32(); - header.MediaId = data.ReadBytes(8); - header.PartitionsFSType = (FilesystemType)data.ReadUInt64(); - header.PartitionsCryptType = data.ReadBytes(8); - - header.PartitionsTable = new PartitionTableEntry[8]; - for (int i = 0; i < 8; i++) - { - header.PartitionsTable[i] = ParsePartitionTableEntry(data); - } - - if (header.PartitionsFSType == FilesystemType.Normal || header.PartitionsFSType == FilesystemType.None) - { - header.ExheaderHash = data.ReadBytes(0x20); - header.AdditionalHeaderSize = data.ReadUInt32(); - header.SectorZeroOffset = data.ReadUInt32(); - header.PartitionFlags = data.ReadBytes(8); - - header.PartitionIdTable = new ulong[8]; - for (int i = 0; i < 8; i++) - { - header.PartitionIdTable[i] = data.ReadUInt64(); - } - - header.Reserved1 = data.ReadBytes(0x20); - header.Reserved2 = data.ReadBytes(0x0E); - header.FirmUpdateByte1 = data.ReadByteValue(); - header.FirmUpdateByte2 = data.ReadByteValue(); - } - else if (header.PartitionsFSType == FilesystemType.FIRM) - { - header.Unknown = data.ReadBytes(0x5E); - header.EncryptedMBR = data.ReadBytes(0x42); - } - - return header; - } - - /// - /// Parse a Stream into a partition table entry - /// - /// Stream to parse - /// Filled partition table entry on success, null on error - private static PartitionTableEntry ParsePartitionTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - PartitionTableEntry partitionTableEntry = new PartitionTableEntry(); - - partitionTableEntry.Offset = data.ReadUInt32(); - partitionTableEntry.Length = data.ReadUInt32(); - - return partitionTableEntry; - } - - /// - /// Parse a Stream into a card info header - /// - /// Stream to parse - /// Filled card info header on success, null on error - private static CardInfoHeader ParseCardInfoHeader(Stream data) - { - // TODO: Use marshalling here instead of building - CardInfoHeader cardInfoHeader = new CardInfoHeader(); - - cardInfoHeader.WritableAddressMediaUnits = data.ReadUInt32(); - cardInfoHeader.CardInfoBitmask = data.ReadUInt32(); - cardInfoHeader.Reserved1 = data.ReadBytes(0xF8); - cardInfoHeader.FilledSize = data.ReadUInt32(); - cardInfoHeader.Reserved2 = data.ReadBytes(0x0C); - cardInfoHeader.TitleVersion = data.ReadUInt16(); - cardInfoHeader.CardRevision = data.ReadUInt16(); - cardInfoHeader.Reserved3 = data.ReadBytes(0x0C); - cardInfoHeader.CVerTitleID = data.ReadBytes(8); - cardInfoHeader.CVerVersionNumber = data.ReadUInt16(); - cardInfoHeader.Reserved4 = data.ReadBytes(0xCD6); - - return cardInfoHeader; - } - - /// - /// Parse a Stream into a development card info header - /// - /// Stream to parse - /// Filled development card info header on success, null on error - private static DevelopmentCardInfoHeader ParseDevelopmentCardInfoHeader(Stream data) - { - // TODO: Use marshalling here instead of building - DevelopmentCardInfoHeader developmentCardInfoHeader = new DevelopmentCardInfoHeader(); - - developmentCardInfoHeader.InitialData = ParseInitialData(data); - if (developmentCardInfoHeader.InitialData == null) - return null; - - developmentCardInfoHeader.CardDeviceReserved1 = data.ReadBytes(0x200); - developmentCardInfoHeader.TitleKey = data.ReadBytes(0x10); - developmentCardInfoHeader.CardDeviceReserved2 = data.ReadBytes(0x1BF0); - - developmentCardInfoHeader.TestData = ParseTestData(data); - if (developmentCardInfoHeader.TestData == null) - return null; - - return developmentCardInfoHeader; - } - - /// - /// Parse a Stream into an initial data - /// - /// Stream to parse - /// Filled initial data on success, null on error - private static InitialData ParseInitialData(Stream data) - { - // TODO: Use marshalling here instead of building - InitialData initialData = new InitialData(); - - initialData.CardSeedKeyY = data.ReadBytes(0x10); - initialData.EncryptedCardSeed = data.ReadBytes(0x10); - initialData.CardSeedAESMAC = data.ReadBytes(0x10); - initialData.CardSeedNonce = data.ReadBytes(0xC); - initialData.Reserved = data.ReadBytes(0xC4); - initialData.BackupHeader = ParseNCCHHeader(data, true); - if (initialData.BackupHeader == null) - return null; - - return initialData; - } - - /// - /// Parse a Stream into an NCCH header - /// - /// Stream to parse - /// Indicates if the signature should be skipped - /// Filled NCCH header on success, null on error - private static NCCHHeader ParseNCCHHeader(Stream data, bool skipSignature = false) - { - // TODO: Use marshalling here instead of building - NCCHHeader header = new NCCHHeader(); - - if (!skipSignature) - header.RSA2048Signature = data.ReadBytes(0x100); - - byte[] magicId = data.ReadBytes(4); - header.MagicID = Encoding.ASCII.GetString(magicId).TrimEnd('\0'); - header.ContentSizeInMediaUnits = data.ReadUInt32(); - header.PartitionId = data.ReadUInt64(); - header.MakerCode = data.ReadUInt16(); - header.Version = data.ReadUInt16(); - header.VerificationHash = data.ReadUInt32(); - header.ProgramId = data.ReadBytes(8); - header.Reserved1 = data.ReadBytes(0x10); - header.LogoRegionHash = data.ReadBytes(0x20); - byte[] productCode = data.ReadBytes(0x10); - header.ProductCode = Encoding.ASCII.GetString(productCode).TrimEnd('\0'); - header.ExtendedHeaderHash = data.ReadBytes(0x20); - header.ExtendedHeaderSizeInBytes = data.ReadUInt32(); - header.Reserved2 = data.ReadBytes(4); - header.Flags = ParseNCCHHeaderFlags(data); - header.PlainRegionOffsetInMediaUnits = data.ReadUInt32(); - header.PlainRegionSizeInMediaUnits = data.ReadUInt32(); - header.LogoRegionOffsetInMediaUnits = data.ReadUInt32(); - header.LogoRegionSizeInMediaUnits = data.ReadUInt32(); - header.ExeFSOffsetInMediaUnits = data.ReadUInt32(); - header.ExeFSSizeInMediaUnits = data.ReadUInt32(); - header.ExeFSHashRegionSizeInMediaUnits = data.ReadUInt32(); - header.Reserved3 = data.ReadBytes(4); - header.RomFSOffsetInMediaUnits = data.ReadUInt32(); - header.RomFSSizeInMediaUnits = data.ReadUInt32(); - header.RomFSHashRegionSizeInMediaUnits = data.ReadUInt32(); - header.Reserved4 = data.ReadBytes(4); - header.ExeFSSuperblockHash = data.ReadBytes(0x20); - header.RomFSSuperblockHash = data.ReadBytes(0x20); - - return header; - } - - /// - /// Parse a Stream into an NCCH header flags - /// - /// Stream to parse - /// Filled NCCH header flags on success, null on error - private static NCCHHeaderFlags ParseNCCHHeaderFlags(Stream data) - { - // TODO: Use marshalling here instead of building - NCCHHeaderFlags headerFlags = new NCCHHeaderFlags(); - - headerFlags.Reserved0 = data.ReadByteValue(); - headerFlags.Reserved1 = data.ReadByteValue(); - headerFlags.Reserved2 = data.ReadByteValue(); - headerFlags.CryptoMethod = (CryptoMethod)data.ReadByteValue(); - headerFlags.ContentPlatform = (ContentPlatform)data.ReadByteValue(); - headerFlags.MediaPlatformIndex = (ContentType)data.ReadByteValue(); - headerFlags.ContentUnitSize = data.ReadByteValue(); - headerFlags.BitMasks = (BitMasks)data.ReadByteValue(); - - return headerFlags; - } - - /// - /// Parse a Stream into an initial data - /// - /// Stream to parse - /// Filled initial data on success, null on error - private static TestData ParseTestData(Stream data) - { - // TODO: Use marshalling here instead of building - TestData testData = new TestData(); - - // TODO: Validate some of the values - testData.Signature = data.ReadBytes(8); - testData.AscendingByteSequence = data.ReadBytes(0x1F8); - testData.DescendingByteSequence = data.ReadBytes(0x200); - testData.Filled00 = data.ReadBytes(0x200); - testData.FilledFF = data.ReadBytes(0x200); - testData.Filled0F = data.ReadBytes(0x200); - testData.FilledF0 = data.ReadBytes(0x200); - testData.Filled55 = data.ReadBytes(0x200); - testData.FilledAA = data.ReadBytes(0x1FF); - testData.FinalByte = data.ReadByteValue(); - - return testData; - } - - /// - /// Parse a Stream into an NCCH extended header - /// - /// Stream to parse - /// Filled NCCH extended header on success, null on error - private static NCCHExtendedHeader ParseNCCHExtendedHeader(Stream data) - { - // TODO: Use marshalling here instead of building - NCCHExtendedHeader extendedHeader = new NCCHExtendedHeader(); - - extendedHeader.SCI = ParseSystemControlInfo(data); - if (extendedHeader.SCI == null) - return null; - - extendedHeader.ACI = ParseAccessControlInfo(data); - if (extendedHeader.ACI == null) - return null; - - extendedHeader.AccessDescSignature = data.ReadBytes(0x100); - extendedHeader.NCCHHDRPublicKey = data.ReadBytes(0x100); - - extendedHeader.ACIForLimitations = ParseAccessControlInfo(data); - if (extendedHeader.ACI == null) - return null; - - return extendedHeader; - } - - /// - /// Parse a Stream into a system control info - /// - /// Stream to parse - /// Filled system control info on success, null on error - private static SystemControlInfo ParseSystemControlInfo(Stream data) - { - // TODO: Use marshalling here instead of building - SystemControlInfo systemControlInfo = new SystemControlInfo(); - - byte[] applicationTitle = data.ReadBytes(8); - systemControlInfo.ApplicationTitle = Encoding.ASCII.GetString(applicationTitle).TrimEnd('\0'); - systemControlInfo.Reserved1 = data.ReadBytes(5); - systemControlInfo.Flag = data.ReadByteValue(); - systemControlInfo.RemasterVersion = data.ReadUInt16(); - systemControlInfo.TextCodeSetInfo = ParseCodeSetInfo(data); - systemControlInfo.StackSize = data.ReadUInt32(); - systemControlInfo.ReadOnlyCodeSetInfo = ParseCodeSetInfo(data); - systemControlInfo.Reserved2 = data.ReadBytes(4); - systemControlInfo.DataCodeSetInfo = ParseCodeSetInfo(data); - systemControlInfo.BSSSize = data.ReadUInt32(); - systemControlInfo.DependencyModuleList = new ulong[48]; - for (int i = 0; i < 48; i++) - { - systemControlInfo.DependencyModuleList[i] = data.ReadUInt64(); - } - systemControlInfo.SystemInfo = ParseSystemInfo(data); - - return systemControlInfo; - } - - /// - /// Parse a Stream into a code set info - /// - /// Stream to parse - /// Filled code set info on success, null on error - private static CodeSetInfo ParseCodeSetInfo(Stream data) - { - // TODO: Use marshalling here instead of building - CodeSetInfo codeSetInfo = new CodeSetInfo(); - - codeSetInfo.Address = data.ReadUInt32(); - codeSetInfo.PhysicalRegionSizeInPages = data.ReadUInt32(); - codeSetInfo.SizeInBytes = data.ReadUInt32(); - - return codeSetInfo; - } - - /// - /// Parse a Stream into a system info - /// - /// Stream to parse - /// Filled system info on success, null on error - private static SystemInfo ParseSystemInfo(Stream data) - { - // TODO: Use marshalling here instead of building - SystemInfo systemInfo = new SystemInfo(); - - systemInfo.SaveDataSize = data.ReadUInt64(); - systemInfo.JumpID = data.ReadUInt64(); - systemInfo.Reserved = data.ReadBytes(0x30); - - return systemInfo; - } - - /// - /// Parse a Stream into an access control info - /// - /// Stream to parse - /// Filled access control info on success, null on error - private static AccessControlInfo ParseAccessControlInfo(Stream data) - { - // TODO: Use marshalling here instead of building - AccessControlInfo accessControlInfo = new AccessControlInfo(); - - accessControlInfo.ARM11LocalSystemCapabilities = ParseARM11LocalSystemCapabilities(data); - accessControlInfo.ARM11KernelCapabilities = ParseARM11KernelCapabilities(data); - accessControlInfo.ARM9AccessControl = ParseARM9AccessControl(data); - - return accessControlInfo; - } - - /// - /// Parse a Stream into an ARM11 local system capabilities - /// - /// Stream to parse - /// Filled ARM11 local system capabilities on success, null on error - private static ARM11LocalSystemCapabilities ParseARM11LocalSystemCapabilities(Stream data) - { - // TODO: Use marshalling here instead of building - ARM11LocalSystemCapabilities arm11LocalSystemCapabilities = new ARM11LocalSystemCapabilities(); - - arm11LocalSystemCapabilities.ProgramID = data.ReadUInt64(); - arm11LocalSystemCapabilities.CoreVersion = data.ReadUInt32(); - arm11LocalSystemCapabilities.Flag1 = (ARM11LSCFlag1)data.ReadByteValue(); - arm11LocalSystemCapabilities.Flag2 = (ARM11LSCFlag2)data.ReadByteValue(); - arm11LocalSystemCapabilities.Flag0 = (ARM11LSCFlag0)data.ReadByteValue(); - arm11LocalSystemCapabilities.Priority = data.ReadByteValue(); - arm11LocalSystemCapabilities.ResourceLimitDescriptors = new ushort[16]; - for (int i = 0; i < 16; i++) - { - arm11LocalSystemCapabilities.ResourceLimitDescriptors[i] = data.ReadUInt16(); - } - arm11LocalSystemCapabilities.StorageInfo = ParseStorageInfo(data); - arm11LocalSystemCapabilities.ServiceAccessControl = new ulong[32]; - for (int i = 0; i < 32; i++) - { - arm11LocalSystemCapabilities.ServiceAccessControl[i] = data.ReadUInt64(); - } - arm11LocalSystemCapabilities.ExtendedServiceAccessControl = new ulong[2]; - for (int i = 0; i < 2; i++) - { - arm11LocalSystemCapabilities.ExtendedServiceAccessControl[i] = data.ReadUInt64(); - } - arm11LocalSystemCapabilities.Reserved = data.ReadBytes(0x0F); - arm11LocalSystemCapabilities.ResourceLimitCategory = (ResourceLimitCategory)data.ReadByteValue(); - - return arm11LocalSystemCapabilities; - } - - /// - /// Parse a Stream into a storage info - /// - /// Stream to parse - /// Filled storage info on success, null on error - private static StorageInfo ParseStorageInfo(Stream data) - { - // TODO: Use marshalling here instead of building - StorageInfo storageInfo = new StorageInfo(); - - storageInfo.ExtdataID = data.ReadUInt64(); - storageInfo.SystemSavedataIDs = data.ReadBytes(8); - storageInfo.StorageAccessibleUniqueIDs = data.ReadBytes(8); - storageInfo.FileSystemAccessInfo = data.ReadBytes(7); - storageInfo.OtherAttributes = (StorageInfoOtherAttributes)data.ReadByteValue(); - - return storageInfo; - } - - /// - /// Parse a Stream into an ARM11 kernel capabilities - /// - /// Stream to parse - /// Filled ARM11 kernel capabilities on success, null on error - private static ARM11KernelCapabilities ParseARM11KernelCapabilities(Stream data) - { - // TODO: Use marshalling here instead of building - ARM11KernelCapabilities arm11KernelCapabilities = new ARM11KernelCapabilities(); - - arm11KernelCapabilities.Descriptors = new uint[28]; - for (int i = 0; i < 28; i++) - { - arm11KernelCapabilities.Descriptors[i] = data.ReadUInt32(); - } - arm11KernelCapabilities.Reserved = data.ReadBytes(0x10); - - return arm11KernelCapabilities; - } - - /// - /// Parse a Stream into an ARM11 access control - /// - /// Stream to parse - /// Filled ARM11 access control on success, null on error - private static ARM9AccessControl ParseARM9AccessControl(Stream data) - { - // TODO: Use marshalling here instead of building - ARM9AccessControl arm9AccessControl = new ARM9AccessControl(); - - arm9AccessControl.Descriptors = data.ReadBytes(15); - arm9AccessControl.DescriptorVersion = data.ReadByteValue(); - - return arm9AccessControl; - } - - /// - /// Parse a Stream into an ExeFS header - /// - /// Stream to parse - /// Filled ExeFS header on success, null on error - private static ExeFSHeader ParseExeFSHeader(Stream data) - { - // TODO: Use marshalling here instead of building - ExeFSHeader exeFSHeader = new ExeFSHeader(); - - exeFSHeader.FileHeaders = new ExeFSFileHeader[10]; - for (int i = 0; i < 10; i++) - { - exeFSHeader.FileHeaders[i] = ParseExeFSFileHeader(data); - } - exeFSHeader.Reserved = data.ReadBytes(0x20); - exeFSHeader.FileHashes = new byte[10][]; - for (int i = 0; i < 10; i++) - { - exeFSHeader.FileHashes[i] = data.ReadBytes(0x20); - } - - return exeFSHeader; - } - - /// - /// Parse a Stream into an ExeFS file header - /// - /// Stream to parse - /// Filled ExeFS file header on success, null on error - private static ExeFSFileHeader ParseExeFSFileHeader(Stream data) - { - // TODO: Use marshalling here instead of building - ExeFSFileHeader exeFSFileHeader = new ExeFSFileHeader(); - - byte[] fileName = data.ReadBytes(8); - exeFSFileHeader.FileName = Encoding.ASCII.GetString(fileName).TrimEnd('\0'); - exeFSFileHeader.FileOffset = data.ReadUInt32(); - exeFSFileHeader.FileSize = data.ReadUInt32(); - - return exeFSFileHeader; - } - - /// - /// Parse a Stream into an RomFS header - /// - /// Stream to parse - /// Filled RomFS header on success, null on error - private static RomFSHeader ParseRomFSHeader(Stream data) - { - // TODO: Use marshalling here instead of building - RomFSHeader romFSHeader = new RomFSHeader(); - - byte[] magicString = data.ReadBytes(4); - romFSHeader.MagicString = Encoding.ASCII.GetString(magicString).TrimEnd('\0'); - if (romFSHeader.MagicString != RomFSMagicNumber) - return null; - - romFSHeader.MagicNumber = data.ReadUInt32(); - if (romFSHeader.MagicNumber != RomFSSecondMagicNumber) - return null; - - romFSHeader.MasterHashSize = data.ReadUInt32(); - romFSHeader.Level1LogicalOffset = data.ReadUInt64(); - romFSHeader.Level1HashdataSize = data.ReadUInt64(); - romFSHeader.Level1BlockSizeLog2 = data.ReadUInt32(); - romFSHeader.Reserved1 = data.ReadBytes(4); - romFSHeader.Level2LogicalOffset = data.ReadUInt64(); - romFSHeader.Level2HashdataSize = data.ReadUInt64(); - romFSHeader.Level2BlockSizeLog2 = data.ReadUInt32(); - romFSHeader.Reserved2 = data.ReadBytes(4); - romFSHeader.Level3LogicalOffset = data.ReadUInt64(); - romFSHeader.Level3HashdataSize = data.ReadUInt64(); - romFSHeader.Level3BlockSizeLog2 = data.ReadUInt32(); - romFSHeader.Reserved3 = data.ReadBytes(4); - romFSHeader.Reserved4 = data.ReadBytes(4); - romFSHeader.OptionalInfoSize = data.ReadUInt32(); - - return romFSHeader; - } - - /// - /// Parse a Stream into a CIA header - /// - /// Stream to parse - /// Filled CIA header on success, null on error - private static CIAHeader ParseCIAHeader(Stream data) - { - // TODO: Use marshalling here instead of building - CIAHeader ciaHeader = new CIAHeader(); - - ciaHeader.HeaderSize = data.ReadUInt32(); - ciaHeader.Type = data.ReadUInt16(); - ciaHeader.Version = data.ReadUInt16(); - ciaHeader.CertificateChainSize = data.ReadUInt32(); - ciaHeader.TicketSize = data.ReadUInt32(); - ciaHeader.TMDFileSize = data.ReadUInt32(); - ciaHeader.MetaSize = data.ReadUInt32(); - ciaHeader.ContentSize = data.ReadUInt64(); - ciaHeader.ContentIndex = data.ReadBytes(0x2000); - - return ciaHeader; - } - - /// - /// Parse a Stream into a certificate - /// - /// Stream to parse - /// Filled certificate on success, null on error - private static Certificate ParseCertificate(Stream data) - { - // TODO: Use marshalling here instead of building - Certificate certificate = new Certificate(); - - certificate.SignatureType = (SignatureType)data.ReadUInt32(); - switch (certificate.SignatureType) - { - case SignatureType.RSA_4096_SHA1: - certificate.SignatureSize = 0x200; - certificate.PaddingSize = 0x3C; - break; - case SignatureType.RSA_2048_SHA1: - certificate.SignatureSize = 0x100; - certificate.PaddingSize = 0x3C; - break; - case SignatureType.ECDSA_SHA1: - certificate.SignatureSize = 0x3C; - certificate.PaddingSize = 0x40; - break; - case SignatureType.RSA_4096_SHA256: - certificate.SignatureSize = 0x200; - certificate.PaddingSize = 0x3C; - break; - case SignatureType.RSA_2048_SHA256: - certificate.SignatureSize = 0x100; - certificate.PaddingSize = 0x3C; - break; - case SignatureType.ECDSA_SHA256: - certificate.SignatureSize = 0x3C; - certificate.PaddingSize = 0x40; - break; - default: - return null; - } - - certificate.Signature = data.ReadBytes(certificate.SignatureSize); - certificate.Padding = data.ReadBytes(certificate.PaddingSize); - byte[] issuer = data.ReadBytes(0x40); - certificate.Issuer = Encoding.ASCII.GetString(issuer).TrimEnd('\0'); - certificate.KeyType = (PublicKeyType)data.ReadUInt32(); - byte[] name = data.ReadBytes(0x40); - certificate.Name = Encoding.ASCII.GetString(name).TrimEnd('\0'); - certificate.ExpirationTime = data.ReadUInt32(); - - switch (certificate.KeyType) - { - case PublicKeyType.RSA_4096: - certificate.RSAModulus = data.ReadBytes(0x200); - certificate.RSAPublicExponent = data.ReadUInt32(); - certificate.RSAPadding = data.ReadBytes(0x34); - break; - case PublicKeyType.RSA_2048: - certificate.RSAModulus = data.ReadBytes(0x100); - certificate.RSAPublicExponent = data.ReadUInt32(); - certificate.RSAPadding = data.ReadBytes(0x34); - break; - case PublicKeyType.EllipticCurve: - certificate.ECCPublicKey = data.ReadBytes(0x3C); - certificate.ECCPadding = data.ReadBytes(0x3C); - break; - default: - return null; - } - - return certificate; - } - - /// - /// Parse a Stream into a ticket - /// - /// Stream to parse - /// Indicates if the ticket is from CDN - /// Filled ticket on success, null on error - private static Ticket ParseTicket(Stream data, bool fromCdn = false) - { - // TODO: Use marshalling here instead of building - Ticket ticket = new Ticket(); - - ticket.SignatureType = (SignatureType)data.ReadUInt32(); - switch (ticket.SignatureType) - { - case SignatureType.RSA_4096_SHA1: - ticket.SignatureSize = 0x200; - ticket.PaddingSize = 0x3C; - break; - case SignatureType.RSA_2048_SHA1: - ticket.SignatureSize = 0x100; - ticket.PaddingSize = 0x3C; - break; - case SignatureType.ECDSA_SHA1: - ticket.SignatureSize = 0x3C; - ticket.PaddingSize = 0x40; - break; - case SignatureType.RSA_4096_SHA256: - ticket.SignatureSize = 0x200; - ticket.PaddingSize = 0x3C; - break; - case SignatureType.RSA_2048_SHA256: - ticket.SignatureSize = 0x100; - ticket.PaddingSize = 0x3C; - break; - case SignatureType.ECDSA_SHA256: - ticket.SignatureSize = 0x3C; - ticket.PaddingSize = 0x40; - break; - default: - return null; - } - - ticket.Signature = data.ReadBytes(ticket.SignatureSize); - ticket.Padding = data.ReadBytes(ticket.PaddingSize); - byte[] issuer = data.ReadBytes(0x40); - ticket.Issuer = Encoding.ASCII.GetString(issuer).TrimEnd('\0'); - ticket.ECCPublicKey = data.ReadBytes(0x3C); - ticket.Version = data.ReadByteValue(); - ticket.CaCrlVersion = data.ReadByteValue(); - ticket.SignerCrlVersion = data.ReadByteValue(); - ticket.TitleKey = data.ReadBytes(0x10); - ticket.Reserved1 = data.ReadByteValue(); - ticket.TicketID = data.ReadUInt64(); - ticket.ConsoleID = data.ReadUInt32(); - ticket.TitleID = data.ReadUInt64(); - ticket.Reserved2 = data.ReadBytes(2); - ticket.TicketTitleVersion = data.ReadUInt16(); - ticket.Reserved3 = data.ReadBytes(8); - ticket.LicenseType = data.ReadByteValue(); - ticket.CommonKeyYIndex = data.ReadByteValue(); - ticket.Reserved4 = data.ReadBytes(0x2A); - ticket.eShopAccountID = data.ReadUInt32(); - ticket.Reserved5 = data.ReadByteValue(); - ticket.Audit = data.ReadByteValue(); - ticket.Reserved6 = data.ReadBytes(0x42); - ticket.Limits = new uint[0x10]; - for (int i = 0; i < ticket.Limits.Length; i++) - { - ticket.Limits[i] = data.ReadUInt32(); - } - - // Seek to the content index size - data.Seek(4, SeekOrigin.Current); - - // Read the size (big-endian) - byte[] contentIndexSize = data.ReadBytes(4); - Array.Reverse(contentIndexSize); - ticket.ContentIndexSize = BitConverter.ToUInt32(contentIndexSize, 0); - - // Seek back to the start of the content index - data.Seek(-8, SeekOrigin.Current); - - ticket.ContentIndex = data.ReadBytes((int)ticket.ContentIndexSize); - - // Certificates only exist in standalone CETK files - if (fromCdn) - { - ticket.CertificateChain = new Certificate[2]; - for (int i = 0; i < 2; i++) - { - var certificate = ParseCertificate(data); - if (certificate == null) - return null; - - ticket.CertificateChain[i] = certificate; - } - } - - return ticket; - } - - /// - /// Parse a Stream into a title metadata - /// - /// Stream to parse - /// Indicates if the ticket is from CDN - /// Filled title metadata on success, null on error - private static TitleMetadata ParseTitleMetadata(Stream data, bool fromCdn = false) - { - // TODO: Use marshalling here instead of building - TitleMetadata titleMetadata = new TitleMetadata(); - - titleMetadata.SignatureType = (SignatureType)data.ReadUInt32(); - switch (titleMetadata.SignatureType) - { - case SignatureType.RSA_4096_SHA1: - titleMetadata.SignatureSize = 0x200; - titleMetadata.PaddingSize = 0x3C; - break; - case SignatureType.RSA_2048_SHA1: - titleMetadata.SignatureSize = 0x100; - titleMetadata.PaddingSize = 0x3C; - break; - case SignatureType.ECDSA_SHA1: - titleMetadata.SignatureSize = 0x3C; - titleMetadata.PaddingSize = 0x40; - break; - case SignatureType.RSA_4096_SHA256: - titleMetadata.SignatureSize = 0x200; - titleMetadata.PaddingSize = 0x3C; - break; - case SignatureType.RSA_2048_SHA256: - titleMetadata.SignatureSize = 0x100; - titleMetadata.PaddingSize = 0x3C; - break; - case SignatureType.ECDSA_SHA256: - titleMetadata.SignatureSize = 0x3C; - titleMetadata.PaddingSize = 0x40; - break; - default: - return null; - } - - titleMetadata.Signature = data.ReadBytes(titleMetadata.SignatureSize); - titleMetadata.Padding1 = data.ReadBytes(titleMetadata.PaddingSize); - byte[] issuer = data.ReadBytes(0x40); - titleMetadata.Issuer = Encoding.ASCII.GetString(issuer).TrimEnd('\0'); - titleMetadata.Version = data.ReadByteValue(); - titleMetadata.CaCrlVersion = data.ReadByteValue(); - titleMetadata.SignerCrlVersion = data.ReadByteValue(); - titleMetadata.Reserved1 = data.ReadByteValue(); - titleMetadata.SystemVersion = data.ReadUInt64(); - titleMetadata.TitleID = data.ReadUInt64(); - titleMetadata.TitleType = data.ReadUInt32(); - titleMetadata.GroupID = data.ReadUInt16(); - titleMetadata.SaveDataSize = data.ReadUInt32(); - titleMetadata.SRLPrivateSaveDataSize = data.ReadUInt32(); - titleMetadata.Reserved2 = data.ReadBytes(4); - titleMetadata.SRLFlag = data.ReadByteValue(); - titleMetadata.Reserved3 = data.ReadBytes(0x31); - titleMetadata.AccessRights = data.ReadUInt32(); - titleMetadata.TitleVersion = data.ReadUInt16(); - - // Read the content count (big-endian) - byte[] contentCount = data.ReadBytes(2); - Array.Reverse(contentCount); - titleMetadata.ContentCount = BitConverter.ToUInt16(contentCount, 0); - - titleMetadata.BootContent = data.ReadUInt16(); - titleMetadata.Padding2 = data.ReadBytes(2); - titleMetadata.SHA256HashContentInfoRecords = data.ReadBytes(0x20); - titleMetadata.ContentInfoRecords = new ContentInfoRecord[64]; - for (int i = 0; i < 64; i++) - { - titleMetadata.ContentInfoRecords[i] = ParseContentInfoRecord(data); - } - titleMetadata.ContentChunkRecords = new ContentChunkRecord[titleMetadata.ContentCount]; - for (int i = 0; i < titleMetadata.ContentCount; i++) - { - titleMetadata.ContentChunkRecords[i] = ParseContentChunkRecord(data); - } - - // Certificates only exist in standalone TMD files - if (fromCdn) - { - titleMetadata.CertificateChain = new Certificate[2]; - for (int i = 0; i < 2; i++) - { - var certificate = ParseCertificate(data); - if (certificate == null) - return null; - - titleMetadata.CertificateChain[i] = certificate; - } - } - - return titleMetadata; - } - - /// - /// Parse a Stream into a content info record - /// - /// Stream to parse - /// Filled content info record on success, null on error - private static ContentInfoRecord ParseContentInfoRecord(Stream data) - { - // TODO: Use marshalling here instead of building - ContentInfoRecord contentInfoRecord = new ContentInfoRecord(); - - contentInfoRecord.ContentIndexOffset = data.ReadUInt16(); - contentInfoRecord.ContentCommandCount = data.ReadUInt16(); - contentInfoRecord.UnhashedContentRecordsSHA256Hash = data.ReadBytes(0x20); - - return contentInfoRecord; - } - - /// - /// Parse a Stream into a content chunk record - /// - /// Stream to parse - /// Filled content chunk record on success, null on error - private static ContentChunkRecord ParseContentChunkRecord(Stream data) - { - // TODO: Use marshalling here instead of building - ContentChunkRecord contentChunkRecord = new ContentChunkRecord(); - - contentChunkRecord.ContentId = data.ReadUInt32(); - contentChunkRecord.ContentIndex = (ContentIndex)data.ReadUInt16(); - contentChunkRecord.ContentType = (TMDContentType)data.ReadUInt16(); - contentChunkRecord.ContentSize = data.ReadUInt64(); - contentChunkRecord.SHA256Hash = data.ReadBytes(0x20); - - return contentChunkRecord; - } - - /// - /// Parse a Stream into a meta data - /// - /// Stream to parse - /// Filled meta data on success, null on error - private static MetaData ParseMetaData(Stream data) - { - // TODO: Use marshalling here instead of building - MetaData metaData = new MetaData(); - - metaData.TitleIDDependencyList = data.ReadBytes(0x180); - metaData.Reserved1 = data.ReadBytes(0x180); - metaData.CoreVersion = data.ReadUInt32(); - metaData.Reserved2 = data.ReadBytes(0xFC); - metaData.IconData = data.ReadBytes(0x36C0); - - return metaData; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/NCF.cs b/BinaryObjectScanner.Builders/NCF.cs deleted file mode 100644 index 8a8aeec3..00000000 --- a/BinaryObjectScanner.Builders/NCF.cs +++ /dev/null @@ -1,544 +0,0 @@ -using System.Collections.Generic; -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.NCF; - -namespace BinaryObjectScanner.Builders -{ - public static class NCF - { - #region Byte Data - - /// - /// Parse a byte array into a Half-Life No Cache - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled Half-Life No Cache on success, null on error - public static SabreTools.Models.NCF.File ParseFile(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseFile(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Half-Life No Cache - /// - /// Stream to parse - /// Filled Half-Life No Cache on success, null on error - public static SabreTools.Models.NCF.File ParseFile(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - long initialOffset = data.Position; - - // Create a new Half-Life No Cache to fill - var file = new SabreTools.Models.NCF.File(); - - #region Header - - // Try to parse the header - var header = ParseHeader(data); - if (header == null) - return null; - - // Set the no cache header - file.Header = header; - - #endregion - - // Cache the current offset - initialOffset = data.Position; - - #region Directory Header - - // Try to parse the directory header - var directoryHeader = ParseDirectoryHeader(data); - if (directoryHeader == null) - return null; - - // Set the game cache directory header - file.DirectoryHeader = directoryHeader; - - #endregion - - #region Directory Entries - - // Create the directory entry array - file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount]; - - // Try to parse the directory entries - for (int i = 0; i < directoryHeader.ItemCount; i++) - { - var directoryEntry = ParseDirectoryEntry(data); - file.DirectoryEntries[i] = directoryEntry; - } - - #endregion - - #region Directory Names - - if (directoryHeader.NameSize > 0) - { - // Get the current offset for adjustment - long directoryNamesStart = data.Position; - - // Get the ending offset - long directoryNamesEnd = data.Position + directoryHeader.NameSize; - - // Create the string dictionary - file.DirectoryNames = new Dictionary(); - - // Loop and read the null-terminated strings - while (data.Position < directoryNamesEnd) - { - long nameOffset = data.Position - directoryNamesStart; - string directoryName = data.ReadString(Encoding.ASCII); - if (data.Position > directoryNamesEnd) - { - data.Seek(-directoryName.Length, SeekOrigin.Current); - byte[] endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position)); - if (endingData != null) - directoryName = Encoding.ASCII.GetString(endingData); - else - directoryName = null; - } - - file.DirectoryNames[nameOffset] = directoryName; - } - - // Loop and assign to entries - foreach (var directoryEntry in file.DirectoryEntries) - { - directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset]; - } - } - - #endregion - - #region Directory Info 1 Entries - - // Create the directory info 1 entry array - file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count]; - - // Try to parse the directory info 1 entries - for (int i = 0; i < directoryHeader.Info1Count; i++) - { - var directoryInfo1Entry = ParseDirectoryInfo1Entry(data); - file.DirectoryInfo1Entries[i] = directoryInfo1Entry; - } - - #endregion - - #region Directory Info 2 Entries - - // Create the directory info 2 entry array - file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount]; - - // Try to parse the directory info 2 entries - for (int i = 0; i < directoryHeader.ItemCount; i++) - { - var directoryInfo2Entry = ParseDirectoryInfo2Entry(data); - file.DirectoryInfo2Entries[i] = directoryInfo2Entry; - } - - #endregion - - #region Directory Copy Entries - - // Create the directory copy entry array - file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount]; - - // Try to parse the directory copy entries - for (int i = 0; i < directoryHeader.CopyCount; i++) - { - var directoryCopyEntry = ParseDirectoryCopyEntry(data); - file.DirectoryCopyEntries[i] = directoryCopyEntry; - } - - #endregion - - #region Directory Local Entries - - // Create the directory local entry array - file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount]; - - // Try to parse the directory local entries - for (int i = 0; i < directoryHeader.LocalCount; i++) - { - var directoryLocalEntry = ParseDirectoryLocalEntry(data); - file.DirectoryLocalEntries[i] = directoryLocalEntry; - } - - #endregion - - // Seek to end of directory section, just in case - data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin); - - #region Unknown Header - - // Try to parse the unknown header - var unknownHeader = ParseUnknownHeader(data); - if (unknownHeader == null) - return null; - - // Set the game cache unknown header - file.UnknownHeader = unknownHeader; - - #endregion - - #region Unknown Entries - - // Create the unknown entry array - file.UnknownEntries = new UnknownEntry[directoryHeader.ItemCount]; - - // Try to parse the unknown entries - for (int i = 0; i < directoryHeader.ItemCount; i++) - { - var unknownEntry = ParseUnknownEntry(data); - file.UnknownEntries[i] = unknownEntry; - } - - #endregion - - #region Checksum Header - - // Try to parse the checksum header - var checksumHeader = ParseChecksumHeader(data); - if (checksumHeader == null) - return null; - - // Set the game cache checksum header - file.ChecksumHeader = checksumHeader; - - #endregion - - // Cache the current offset - initialOffset = data.Position; - - #region Checksum Map Header - - // Try to parse the checksum map header - var checksumMapHeader = ParseChecksumMapHeader(data); - if (checksumMapHeader == null) - return null; - - // Set the game cache checksum map header - file.ChecksumMapHeader = checksumMapHeader; - - #endregion - - #region Checksum Map Entries - - // Create the checksum map entry array - file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount]; - - // Try to parse the checksum map entries - for (int i = 0; i < checksumMapHeader.ItemCount; i++) - { - var checksumMapEntry = ParseChecksumMapEntry(data); - file.ChecksumMapEntries[i] = checksumMapEntry; - } - - #endregion - - #region Checksum Entries - - // Create the checksum entry array - file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount]; - - // Try to parse the checksum entries - for (int i = 0; i < checksumMapHeader.ChecksumCount; i++) - { - var checksumEntry = ParseChecksumEntry(data); - file.ChecksumEntries[i] = checksumEntry; - } - - #endregion - - // Seek to end of checksum section, just in case - data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin); - - return file; - } - - /// - /// Parse a Stream into a Half-Life No Cache header - /// - /// Stream to parse - /// Filled Half-Life No Cache header on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - Header header = new Header(); - - header.Dummy0 = data.ReadUInt32(); - if (header.Dummy0 != 0x00000001) - return null; - - header.MajorVersion = data.ReadUInt32(); - if (header.MajorVersion != 0x00000002) - return null; - - header.MinorVersion = data.ReadUInt32(); - if (header.MinorVersion != 1) - return null; - - header.CacheID = data.ReadUInt32(); - header.LastVersionPlayed = data.ReadUInt32(); - header.Dummy1 = data.ReadUInt32(); - header.Dummy2 = data.ReadUInt32(); - header.FileSize = data.ReadUInt32(); - header.BlockSize = data.ReadUInt32(); - header.BlockCount = data.ReadUInt32(); - header.Dummy3 = data.ReadUInt32(); - - return header; - } - - /// - /// Parse a Stream into a Half-Life No Cache directory header - /// - /// Stream to parse - /// Filled Half-Life No Cache directory header on success, null on error - private static DirectoryHeader ParseDirectoryHeader(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryHeader directoryHeader = new DirectoryHeader(); - - directoryHeader.Dummy0 = data.ReadUInt32(); - if (directoryHeader.Dummy0 != 0x00000004) - return null; - - directoryHeader.CacheID = data.ReadUInt32(); - directoryHeader.LastVersionPlayed = data.ReadUInt32(); - directoryHeader.ItemCount = data.ReadUInt32(); - directoryHeader.FileCount = data.ReadUInt32(); - directoryHeader.ChecksumDataLength = data.ReadUInt32(); - directoryHeader.DirectorySize = data.ReadUInt32(); - directoryHeader.NameSize = data.ReadUInt32(); - directoryHeader.Info1Count = data.ReadUInt32(); - directoryHeader.CopyCount = data.ReadUInt32(); - directoryHeader.LocalCount = data.ReadUInt32(); - directoryHeader.Dummy1 = data.ReadUInt32(); - directoryHeader.Dummy2 = data.ReadUInt32(); - directoryHeader.Checksum = data.ReadUInt32(); - - return directoryHeader; - } - - /// - /// Parse a Stream into a Half-Life No Cache directory entry - /// - /// Stream to parse - /// Filled Half-Life No Cache directory entry on success, null on error - private static DirectoryEntry ParseDirectoryEntry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryEntry directoryEntry = new DirectoryEntry(); - - directoryEntry.NameOffset = data.ReadUInt32(); - directoryEntry.ItemSize = data.ReadUInt32(); - directoryEntry.ChecksumIndex = data.ReadUInt32(); - directoryEntry.DirectoryFlags = (HL_NCF_FLAG)data.ReadUInt32(); - directoryEntry.ParentIndex = data.ReadUInt32(); - directoryEntry.NextIndex = data.ReadUInt32(); - directoryEntry.FirstIndex = data.ReadUInt32(); - - return directoryEntry; - } - - /// - /// Parse a Stream into a Half-Life No Cache directory info 1 entry - /// - /// Stream to parse - /// Filled Half-Life No Cache directory info 1 entry on success, null on error - private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry(); - - directoryInfo1Entry.Dummy0 = data.ReadUInt32(); - - return directoryInfo1Entry; - } - - /// - /// Parse a Stream into a Half-Life No Cache directory info 2 entry - /// - /// Stream to parse - /// Filled Half-Life No Cache directory info 2 entry on success, null on error - private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry(); - - directoryInfo2Entry.Dummy0 = data.ReadUInt32(); - - return directoryInfo2Entry; - } - - /// - /// Parse a Stream into a Half-Life No Cache directory copy entry - /// - /// Stream to parse - /// Filled Half-Life No Cache directory copy entry on success, null on error - private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry(); - - directoryCopyEntry.DirectoryIndex = data.ReadUInt32(); - - return directoryCopyEntry; - } - - /// - /// Parse a Stream into a Half-Life No Cache directory local entry - /// - /// Stream to parse - /// Filled Half-Life No Cache directory local entry on success, null on error - private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry(); - - directoryLocalEntry.DirectoryIndex = data.ReadUInt32(); - - return directoryLocalEntry; - } - - /// - /// Parse a Stream into a Half-Life No Cache unknown header - /// - /// Stream to parse - /// Filled Half-Life No Cache unknown header on success, null on error - private static UnknownHeader ParseUnknownHeader(Stream data) - { - // TODO: Use marshalling here instead of building - UnknownHeader unknownHeader = new UnknownHeader(); - - unknownHeader.Dummy0 = data.ReadUInt32(); - if (unknownHeader.Dummy0 != 0x00000001) - return null; - - unknownHeader.Dummy1 = data.ReadUInt32(); - if (unknownHeader.Dummy1 != 0x00000000) - return null; - - return unknownHeader; - } - - /// - /// Parse a Stream into a Half-Life No Cache unknown entry - /// - /// Stream to parse - /// Filled Half-Life No Cacheunknown entry on success, null on error - private static UnknownEntry ParseUnknownEntry(Stream data) - { - // TODO: Use marshalling here instead of building - UnknownEntry unknownEntry = new UnknownEntry(); - - unknownEntry.Dummy0 = data.ReadUInt32(); - - return unknownEntry; - } - - /// - /// Parse a Stream into a Half-Life No Cache checksum header - /// - /// Stream to parse - /// Filled Half-Life No Cache checksum header on success, null on error - private static ChecksumHeader ParseChecksumHeader(Stream data) - { - // TODO: Use marshalling here instead of building - ChecksumHeader checksumHeader = new ChecksumHeader(); - - checksumHeader.Dummy0 = data.ReadUInt32(); - if (checksumHeader.Dummy0 != 0x00000001) - return null; - - checksumHeader.ChecksumSize = data.ReadUInt32(); - - return checksumHeader; - } - - /// - /// Parse a Stream into a Half-Life No Cache checksum map header - /// - /// Stream to parse - /// Filled Half-Life No Cache checksum map header on success, null on error - private static ChecksumMapHeader ParseChecksumMapHeader(Stream data) - { - // TODO: Use marshalling here instead of building - ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader(); - - checksumMapHeader.Dummy0 = data.ReadUInt32(); - if (checksumMapHeader.Dummy0 != 0x14893721) - return null; - - checksumMapHeader.Dummy1 = data.ReadUInt32(); - if (checksumMapHeader.Dummy1 != 0x00000001) - return null; - - checksumMapHeader.ItemCount = data.ReadUInt32(); - checksumMapHeader.ChecksumCount = data.ReadUInt32(); - - return checksumMapHeader; - } - - /// - /// Parse a Stream into a Half-Life No Cache checksum map entry - /// - /// Stream to parse - /// Filled Half-Life No Cache checksum map entry on success, null on error - private static ChecksumMapEntry ParseChecksumMapEntry(Stream data) - { - // TODO: Use marshalling here instead of building - ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry(); - - checksumMapEntry.ChecksumCount = data.ReadUInt32(); - checksumMapEntry.FirstChecksumIndex = data.ReadUInt32(); - - return checksumMapEntry; - } - - /// - /// Parse a Stream into a Half-Life No Cache checksum entry - /// - /// Stream to parse - /// Filled Half-Life No Cache checksum entry on success, null on error - private static ChecksumEntry ParseChecksumEntry(Stream data) - { - // TODO: Use marshalling here instead of building - ChecksumEntry checksumEntry = new ChecksumEntry(); - - checksumEntry.Checksum = data.ReadUInt32(); - - return checksumEntry; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/NewExecutable.cs b/BinaryObjectScanner.Builders/NewExecutable.cs deleted file mode 100644 index ce465c12..00000000 --- a/BinaryObjectScanner.Builders/NewExecutable.cs +++ /dev/null @@ -1,508 +0,0 @@ -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.NewExecutable; -using static SabreTools.Models.NewExecutable.Constants; - -namespace BinaryObjectScanner.Builders -{ - public static class NewExecutable - { - #region Byte Data - - /// - /// Parse a byte array into a New Executable - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled executable on success, null on error - public static Executable ParseExecutable(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseExecutable(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a New Executable - /// - /// Stream to parse - /// Filled executable on success, null on error - public static Executable ParseExecutable(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new executable to fill - var executable = new Executable(); - - #region MS-DOS Stub - - // Parse the MS-DOS stub - var stub = MSDOS.ParseExecutable(data); - if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0) - return null; - - // Set the MS-DOS stub - executable.Stub = stub; - - #endregion - - #region Executable Header - - // Try to parse the executable header - data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin); - var executableHeader = ParseExecutableHeader(data); - if (executableHeader == null) - return null; - - // Set the executable header - executable.Header = executableHeader; - - #endregion - - #region Segment Table - - // If the offset for the segment table doesn't exist - int tableAddress = initialOffset - + (int)stub.Header.NewExeHeaderAddr - + executableHeader.SegmentTableOffset; - if (tableAddress >= data.Length) - return executable; - - // Try to parse the segment table - data.Seek(tableAddress, SeekOrigin.Begin); - var segmentTable = ParseSegmentTable(data, executableHeader.FileSegmentCount); - if (segmentTable == null) - return null; - - // Set the segment table - executable.SegmentTable = segmentTable; - - #endregion - - #region Resource Table - - // If the offset for the segment table doesn't exist - tableAddress = initialOffset - + (int)stub.Header.NewExeHeaderAddr - + executableHeader.SegmentTableOffset; - if (tableAddress >= data.Length) - return executable; - - // Try to parse the resource table - data.Seek(tableAddress, SeekOrigin.Begin); - var resourceTable = ParseResourceTable(data, executableHeader.ResourceEntriesCount); - if (resourceTable == null) - return null; - - // Set the resource table - executable.ResourceTable = resourceTable; - - #endregion - - #region Resident-Name Table - - // If the offset for the resident-name table doesn't exist - tableAddress = initialOffset - + (int)stub.Header.NewExeHeaderAddr - + executableHeader.ResidentNameTableOffset; - int endOffset = initialOffset - + (int)stub.Header.NewExeHeaderAddr - + executableHeader.ModuleReferenceTableOffset; - if (tableAddress >= data.Length) - return executable; - - // Try to parse the resident-name table - data.Seek(tableAddress, SeekOrigin.Begin); - var residentNameTable = ParseResidentNameTable(data, endOffset); - if (residentNameTable == null) - return null; - - // Set the resident-name table - executable.ResidentNameTable = residentNameTable; - - #endregion - - #region Module-Reference Table - - // If the offset for the module-reference table doesn't exist - tableAddress = initialOffset - + (int)stub.Header.NewExeHeaderAddr - + executableHeader.ModuleReferenceTableOffset; - if (tableAddress >= data.Length) - return executable; - - // Try to parse the module-reference table - data.Seek(tableAddress, SeekOrigin.Begin); - var moduleReferenceTable = ParseModuleReferenceTable(data, executableHeader.ModuleReferenceTableSize); - if (moduleReferenceTable == null) - return null; - - // Set the module-reference table - executable.ModuleReferenceTable = moduleReferenceTable; - - #endregion - - #region Imported-Name Table - - // If the offset for the imported-name table doesn't exist - tableAddress = initialOffset - + (int)stub.Header.NewExeHeaderAddr - + executableHeader.ImportedNamesTableOffset; - endOffset = initialOffset - + (int)stub.Header.NewExeHeaderAddr - + executableHeader.EntryTableOffset; - if (tableAddress >= data.Length) - return executable; - - // Try to parse the imported-name table - data.Seek(tableAddress, SeekOrigin.Begin); - var importedNameTable = ParseImportedNameTable(data, endOffset); - if (importedNameTable == null) - return null; - - // Set the imported-name table - executable.ImportedNameTable = importedNameTable; - - #endregion - - #region Entry Table - - // If the offset for the imported-name table doesn't exist - tableAddress = initialOffset - + (int)stub.Header.NewExeHeaderAddr - + executableHeader.EntryTableOffset; - endOffset = initialOffset - + (int)stub.Header.NewExeHeaderAddr - + executableHeader.EntryTableOffset - + executableHeader.EntryTableSize; - if (tableAddress >= data.Length) - return executable; - - // Try to parse the imported-name table - data.Seek(tableAddress, SeekOrigin.Begin); - var entryTable = ParseEntryTable(data, endOffset); - if (entryTable == null) - return null; - - // Set the entry table - executable.EntryTable = entryTable; - - #endregion - - #region Nonresident-Name Table - - // If the offset for the nonresident-name table doesn't exist - tableAddress = initialOffset - + (int)executableHeader.NonResidentNamesTableOffset; - endOffset = initialOffset - + (int)executableHeader.NonResidentNamesTableOffset - + executableHeader.NonResidentNameTableSize; - if (tableAddress >= data.Length) - return executable; - - // Try to parse the nonresident-name table - data.Seek(tableAddress, SeekOrigin.Begin); - var nonResidentNameTable = ParseNonResidentNameTable(data, endOffset); - if (nonResidentNameTable == null) - return null; - - // Set the nonresident-name table - executable.NonResidentNameTable = nonResidentNameTable; - - #endregion - - return executable; - } - - /// - /// Parse a Stream into a New Executable header - /// - /// Stream to parse - /// Filled executable header on success, null on error - private static ExecutableHeader ParseExecutableHeader(Stream data) - { - // TODO: Use marshalling here instead of building - var header = new ExecutableHeader(); - - byte[] magic = data.ReadBytes(2); - header.Magic = Encoding.ASCII.GetString(magic); - if (header.Magic != SignatureString) - return null; - - header.LinkerVersion = data.ReadByteValue(); - header.LinkerRevision = data.ReadByteValue(); - header.EntryTableOffset = data.ReadUInt16(); - header.EntryTableSize = data.ReadUInt16(); - header.CrcChecksum = data.ReadUInt32(); - header.FlagWord = (HeaderFlag)data.ReadUInt16(); - header.AutomaticDataSegmentNumber = data.ReadUInt16(); - header.InitialHeapAlloc = data.ReadUInt16(); - header.InitialStackAlloc = data.ReadUInt16(); - header.InitialCSIPSetting = data.ReadUInt32(); - header.InitialSSSPSetting = data.ReadUInt32(); - header.FileSegmentCount = data.ReadUInt16(); - header.ModuleReferenceTableSize = data.ReadUInt16(); - header.NonResidentNameTableSize = data.ReadUInt16(); - header.SegmentTableOffset = data.ReadUInt16(); - header.ResourceTableOffset = data.ReadUInt16(); - header.ResidentNameTableOffset = data.ReadUInt16(); - header.ModuleReferenceTableOffset = data.ReadUInt16(); - header.ImportedNamesTableOffset = data.ReadUInt16(); - header.NonResidentNamesTableOffset = data.ReadUInt32(); - header.MovableEntriesCount = data.ReadUInt16(); - header.SegmentAlignmentShiftCount = data.ReadUInt16(); - header.ResourceEntriesCount = data.ReadUInt16(); - header.TargetOperatingSystem = (OperatingSystem)data.ReadByteValue(); - header.AdditionalFlags = (OS2Flag)data.ReadByteValue(); - header.ReturnThunkOffset = data.ReadUInt16(); - header.SegmentReferenceThunkOffset = data.ReadUInt16(); - header.MinCodeSwapAreaSize = data.ReadUInt16(); - header.WindowsSDKRevision = data.ReadByteValue(); - header.WindowsSDKVersion = data.ReadByteValue(); - - return header; - } - - /// - /// Parse a Stream into a segment table - /// - /// Stream to parse - /// Number of segment table entries to read - /// Filled segment table on success, null on error - private static SegmentTableEntry[] ParseSegmentTable(Stream data, int count) - { - // TODO: Use marshalling here instead of building - var segmentTable = new SegmentTableEntry[count]; - - for (int i = 0; i < count; i++) - { - var entry = new SegmentTableEntry(); - entry.Offset = data.ReadUInt16(); - entry.Length = data.ReadUInt16(); - entry.FlagWord = (SegmentTableEntryFlag)data.ReadUInt16(); - entry.MinimumAllocationSize = data.ReadUInt16(); - segmentTable[i] = entry; - } - - return segmentTable; - } - - /// - /// Parse a Stream into a resource table - /// - /// Stream to parse - /// Number of resource table entries to read - /// Filled resource table on success, null on error - private static ResourceTable ParseResourceTable(Stream data, int count) - { - long initialOffset = data.Position; - - // TODO: Use marshalling here instead of building - var resourceTable = new ResourceTable(); - - resourceTable.AlignmentShiftCount = data.ReadUInt16(); - resourceTable.ResourceTypes = new ResourceTypeInformationEntry[count]; - for (int i = 0; i < resourceTable.ResourceTypes.Length; i++) - { - var entry = new ResourceTypeInformationEntry(); - entry.TypeID = data.ReadUInt16(); - entry.ResourceCount = data.ReadUInt16(); - entry.Reserved = data.ReadUInt32(); - entry.Resources = new ResourceTypeResourceEntry[entry.ResourceCount]; - for (int j = 0; j < entry.ResourceCount; j++) - { - // TODO: Should we read and store the resource data? - var resource = new ResourceTypeResourceEntry(); - resource.Offset = data.ReadUInt16(); - resource.Length = data.ReadUInt16(); - resource.FlagWord = (ResourceTypeResourceFlag)data.ReadUInt16(); - resource.ResourceID = data.ReadUInt16(); - resource.Reserved = data.ReadUInt32(); - entry.Resources[j] = resource; - } - resourceTable.ResourceTypes[i] = entry; - } - - // Get the full list of unique string offsets - var stringOffsets = resourceTable.ResourceTypes - .Where(rt => rt.IsIntegerType() == false) - .Select(rt => rt.TypeID) - .Union(resourceTable.ResourceTypes - .SelectMany(rt => rt.Resources) - .Where(r => r.IsIntegerType() == false) - .Select(r => r.ResourceID)) - .Distinct() - .OrderBy(o => o) - .ToList(); - - // Populate the type and name string dictionary - resourceTable.TypeAndNameStrings = new Dictionary(); - for (int i = 0; i < stringOffsets.Count; i++) - { - int stringOffset = (int)(stringOffsets[i] + initialOffset); - data.Seek(stringOffset, SeekOrigin.Begin); - var str = new ResourceTypeAndNameString(); - str.Length = data.ReadByteValue(); - str.Text = data.ReadBytes(str.Length); - resourceTable.TypeAndNameStrings[stringOffsets[i]] = str; - } - - return resourceTable; - } - - /// - /// Parse a Stream into a resident-name table - /// - /// Stream to parse - /// First address not part of the resident-name table - /// Filled resident-name table on success, null on error - private static ResidentNameTableEntry[] ParseResidentNameTable(Stream data, int endOffset) - { - // TODO: Use marshalling here instead of building - var residentNameTable = new List(); - - while (data.Position < endOffset) - { - var entry = new ResidentNameTableEntry(); - entry.Length = data.ReadByteValue(); - entry.NameString = data.ReadBytes(entry.Length); - entry.OrdinalNumber = data.ReadUInt16(); - residentNameTable.Add(entry); - } - - return residentNameTable.ToArray(); - } - - /// - /// Parse a Stream into a module-reference table - /// - /// Stream to parse - /// Number of module-reference table entries to read - /// Filled module-reference table on success, null on error - private static ModuleReferenceTableEntry[] ParseModuleReferenceTable(Stream data, int count) - { - // TODO: Use marshalling here instead of building - var moduleReferenceTable = new ModuleReferenceTableEntry[count]; - - for (int i = 0; i < count; i++) - { - var entry = new ModuleReferenceTableEntry(); - entry.Offset = data.ReadUInt16(); - moduleReferenceTable[i] = entry; - } - - return moduleReferenceTable; - } - - /// - /// Parse a Stream into an imported-name table - /// - /// Stream to parse - /// First address not part of the imported-name table - /// Filled imported-name table on success, null on error - private static Dictionary ParseImportedNameTable(Stream data, int endOffset) - { - // TODO: Use marshalling here instead of building - var importedNameTable = new Dictionary(); - - while (data.Position < endOffset) - { - ushort currentOffset = (ushort)data.Position; - var entry = new ImportedNameTableEntry(); - entry.Length = data.ReadByteValue(); - entry.NameString = data.ReadBytes(entry.Length); - importedNameTable[currentOffset] = entry; - } - - return importedNameTable; - } - - /// - /// Parse a Stream into an entry table - /// - /// Stream to parse - /// First address not part of the entry table - /// Filled entry table on success, null on error - private static EntryTableBundle[] ParseEntryTable(Stream data, int endOffset) - { - // TODO: Use marshalling here instead of building - var entryTable = new List(); - - while (data.Position < endOffset) - { - var entry = new EntryTableBundle(); - entry.EntryCount = data.ReadByteValue(); - entry.SegmentIndicator = data.ReadByteValue(); - switch (entry.GetEntryType()) - { - case SegmentEntryType.Unused: - break; - - case SegmentEntryType.FixedSegment: - entry.FixedFlagWord = (FixedSegmentEntryFlag)data.ReadByteValue(); - entry.FixedOffset = data.ReadUInt16(); - break; - - case SegmentEntryType.MoveableSegment: - entry.MoveableFlagWord = (MoveableSegmentEntryFlag)data.ReadByteValue(); - entry.MoveableReserved = data.ReadUInt16(); - entry.MoveableSegmentNumber = data.ReadByteValue(); - entry.MoveableOffset = data.ReadUInt16(); - break; - } - entryTable.Add(entry); - } - - return entryTable.ToArray(); - } - - /// - /// Parse a Stream into a nonresident-name table - /// - /// Stream to parse - /// First address not part of the nonresident-name table - /// Filled nonresident-name table on success, null on error - private static NonResidentNameTableEntry[] ParseNonResidentNameTable(Stream data, int endOffset) - { - // TODO: Use marshalling here instead of building - var residentNameTable = new List(); - - while (data.Position < endOffset) - { - var entry = new NonResidentNameTableEntry(); - entry.Length = data.ReadByteValue(); - entry.NameString = data.ReadBytes(entry.Length); - entry.OrdinalNumber = data.ReadUInt16(); - residentNameTable.Add(entry); - } - - return residentNameTable.ToArray(); - } - - #endregion - } -} \ No newline at end of file diff --git a/BinaryObjectScanner.Builders/Nitro.cs b/BinaryObjectScanner.Builders/Nitro.cs deleted file mode 100644 index 9d6f7ec2..00000000 --- a/BinaryObjectScanner.Builders/Nitro.cs +++ /dev/null @@ -1,393 +0,0 @@ -using System.Collections.Generic; -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.Nitro; - -namespace BinaryObjectScanner.Builders -{ - public class Nitro - { - #region Byte Data - - /// - /// Parse a byte array into a NDS cart image - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled cart image on success, null on error - public static Cart ParseCart(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseCart(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a NDS cart image - /// - /// Stream to parse - /// Filled cart image on success, null on error - public static Cart ParseCart(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new cart image to fill - var cart = new Cart(); - - #region Header - - // Try to parse the header - var header = ParseCommonHeader(data); - if (header == null) - return null; - - // Set the cart image header - cart.CommonHeader = header; - - #endregion - - #region Extended DSi Header - - // If we have a DSi-compatible cartridge - if (header.UnitCode == Unitcode.NDSPlusDSi || header.UnitCode == Unitcode.DSi) - { - var extendedDSiHeader = ParseExtendedDSiHeader(data); - if (extendedDSiHeader == null) - return null; - - cart.ExtendedDSiHeader = extendedDSiHeader; - } - - #endregion - - #region Secure Area - - // Try to get the secure area offset - long secureAreaOffset = 0x4000; - if (secureAreaOffset > data.Length) - return null; - - // Seek to the secure area - data.Seek(secureAreaOffset, SeekOrigin.Begin); - - // Read the secure area without processing - cart.SecureArea = data.ReadBytes(0x800); - - #endregion - - #region Name Table - - // Try to get the name table offset - long nameTableOffset = header.FileNameTableOffset; - if (nameTableOffset < 0 || nameTableOffset > data.Length) - return null; - - // Seek to the name table - data.Seek(nameTableOffset, SeekOrigin.Begin); - - // Try to parse the name table - var nameTable = ParseNameTable(data); - if (nameTable == null) - return null; - - // Set the name table - cart.NameTable = nameTable; - - #endregion - - #region File Allocation Table - - // Try to get the file allocation table offset - long fileAllocationTableOffset = header.FileAllocationTableOffset; - if (fileAllocationTableOffset < 0 || fileAllocationTableOffset > data.Length) - return null; - - // Seek to the file allocation table - data.Seek(fileAllocationTableOffset, SeekOrigin.Begin); - - // Create the file allocation table - var fileAllocationTable = new List(); - - // Try to parse the file allocation table - while (data.Position - fileAllocationTableOffset < header.FileAllocationTableLength) - { - var entry = ParseFileAllocationTableEntry(data); - fileAllocationTable.Add(entry); - } - - // Set the file allocation table - cart.FileAllocationTable = fileAllocationTable.ToArray(); - - #endregion - - // TODO: Read and optionally parse out the other areas - // Look for offsets and lengths in the header pieces - - return cart; - } - - /// - /// Parse a Stream into a common header - /// - /// Stream to parse - /// Filled common header on success, null on error - private static CommonHeader ParseCommonHeader(Stream data) - { - // TODO: Use marshalling here instead of building - CommonHeader commonHeader = new CommonHeader(); - - byte[] gameTitle = data.ReadBytes(12); - commonHeader.GameTitle = Encoding.ASCII.GetString(gameTitle).TrimEnd('\0'); - commonHeader.GameCode = data.ReadUInt32(); - byte[] makerCode = data.ReadBytes(2); - commonHeader.MakerCode = Encoding.ASCII.GetString(bytes: makerCode).TrimEnd('\0'); - commonHeader.UnitCode = (Unitcode)data.ReadByteValue(); - commonHeader.EncryptionSeedSelect = data.ReadByteValue(); - commonHeader.DeviceCapacity = data.ReadByteValue(); - commonHeader.Reserved1 = data.ReadBytes(7); - commonHeader.GameRevision = data.ReadUInt16(); - commonHeader.RomVersion = data.ReadByteValue(); - commonHeader.InternalFlags = data.ReadByteValue(); - commonHeader.ARM9RomOffset = data.ReadUInt32(); - commonHeader.ARM9EntryAddress = data.ReadUInt32(); - commonHeader.ARM9LoadAddress = data.ReadUInt32(); - commonHeader.ARM9Size = data.ReadUInt32(); - commonHeader.ARM7RomOffset = data.ReadUInt32(); - commonHeader.ARM7EntryAddress = data.ReadUInt32(); - commonHeader.ARM7LoadAddress = data.ReadUInt32(); - commonHeader.ARM7Size = data.ReadUInt32(); - commonHeader.FileNameTableOffset = data.ReadUInt32(); - commonHeader.FileNameTableLength = data.ReadUInt32(); - commonHeader.FileAllocationTableOffset = data.ReadUInt32(); - commonHeader.FileAllocationTableLength = data.ReadUInt32(); - commonHeader.ARM9OverlayOffset = data.ReadUInt32(); - commonHeader.ARM9OverlayLength = data.ReadUInt32(); - commonHeader.ARM7OverlayOffset = data.ReadUInt32(); - commonHeader.ARM7OverlayLength = data.ReadUInt32(); - commonHeader.NormalCardControlRegisterSettings = data.ReadUInt32(); - commonHeader.SecureCardControlRegisterSettings = data.ReadUInt32(); - commonHeader.IconBannerOffset = data.ReadUInt32(); - commonHeader.SecureAreaCRC = data.ReadUInt16(); - commonHeader.SecureTransferTimeout = data.ReadUInt16(); - commonHeader.ARM9Autoload = data.ReadUInt32(); - commonHeader.ARM7Autoload = data.ReadUInt32(); - commonHeader.SecureDisable = data.ReadBytes(8); - commonHeader.NTRRegionRomSize = data.ReadUInt32(); - commonHeader.HeaderSize = data.ReadUInt32(); - commonHeader.Reserved2 = data.ReadBytes(56); - commonHeader.NintendoLogo = data.ReadBytes(156); - commonHeader.NintendoLogoCRC = data.ReadUInt16(); - commonHeader.HeaderCRC = data.ReadUInt16(); - commonHeader.DebuggerReserved = data.ReadBytes(0x20); - - return commonHeader; - } - - /// - /// Parse a Stream into an extended DSi header - /// - /// Stream to parse - /// Filled extended DSi header on success, null on error - private static ExtendedDSiHeader ParseExtendedDSiHeader(Stream data) - { - // TODO: Use marshalling here instead of building - ExtendedDSiHeader extendedDSiHeader = new ExtendedDSiHeader(); - - extendedDSiHeader.GlobalMBK15Settings = new uint[5]; - for (int i = 0; i < 5; i++) - { - extendedDSiHeader.GlobalMBK15Settings[i] = data.ReadUInt32(); - } - extendedDSiHeader.LocalMBK68SettingsARM9 = new uint[3]; - for (int i = 0; i < 3; i++) - { - extendedDSiHeader.LocalMBK68SettingsARM9[i] = data.ReadUInt32(); - } - extendedDSiHeader.LocalMBK68SettingsARM7 = new uint[3]; - for (int i = 0; i < 3; i++) - { - extendedDSiHeader.LocalMBK68SettingsARM7[i] = data.ReadUInt32(); - } - extendedDSiHeader.GlobalMBK9Setting = data.ReadUInt32(); - extendedDSiHeader.RegionFlags = data.ReadUInt32(); - extendedDSiHeader.AccessControl = data.ReadUInt32(); - extendedDSiHeader.ARM7SCFGEXTMask = data.ReadUInt32(); - extendedDSiHeader.ReservedFlags = data.ReadUInt32(); - extendedDSiHeader.ARM9iRomOffset = data.ReadUInt32(); - extendedDSiHeader.Reserved3 = data.ReadUInt32(); - extendedDSiHeader.ARM9iLoadAddress = data.ReadUInt32(); - extendedDSiHeader.ARM9iSize = data.ReadUInt32(); - extendedDSiHeader.ARM7iRomOffset = data.ReadUInt32(); - extendedDSiHeader.Reserved4 = data.ReadUInt32(); - extendedDSiHeader.ARM7iLoadAddress = data.ReadUInt32(); - extendedDSiHeader.ARM7iSize = data.ReadUInt32(); - extendedDSiHeader.DigestNTRRegionOffset = data.ReadUInt32(); - extendedDSiHeader.DigestNTRRegionLength = data.ReadUInt32(); - extendedDSiHeader.DigestTWLRegionOffset = data.ReadUInt32(); - extendedDSiHeader.DigestTWLRegionLength = data.ReadUInt32(); - extendedDSiHeader.DigestSectorHashtableRegionOffset = data.ReadUInt32(); - extendedDSiHeader.DigestSectorHashtableRegionLength = data.ReadUInt32(); - extendedDSiHeader.DigestBlockHashtableRegionOffset = data.ReadUInt32(); - extendedDSiHeader.DigestBlockHashtableRegionLength = data.ReadUInt32(); - extendedDSiHeader.DigestSectorSize = data.ReadUInt32(); - extendedDSiHeader.DigestBlockSectorCount = data.ReadUInt32(); - extendedDSiHeader.IconBannerSize = data.ReadUInt32(); - extendedDSiHeader.Unknown1 = data.ReadUInt32(); - extendedDSiHeader.ModcryptArea1Offset = data.ReadUInt32(); - extendedDSiHeader.ModcryptArea1Size = data.ReadUInt32(); - extendedDSiHeader.ModcryptArea2Offset = data.ReadUInt32(); - extendedDSiHeader.ModcryptArea2Size = data.ReadUInt32(); - extendedDSiHeader.TitleID = data.ReadBytes(8); - extendedDSiHeader.DSiWarePublicSavSize = data.ReadUInt32(); - extendedDSiHeader.DSiWarePrivateSavSize = data.ReadUInt32(); - extendedDSiHeader.ReservedZero = data.ReadBytes(176); - extendedDSiHeader.Unknown2 = data.ReadBytes(0x10); - extendedDSiHeader.ARM9WithSecureAreaSHA1HMACHash = data.ReadBytes(20); - extendedDSiHeader.ARM7SHA1HMACHash = data.ReadBytes(20); - extendedDSiHeader.DigestMasterSHA1HMACHash = data.ReadBytes(20); - extendedDSiHeader.BannerSHA1HMACHash = data.ReadBytes(20); - extendedDSiHeader.ARM9iDecryptedSHA1HMACHash = data.ReadBytes(20); - extendedDSiHeader.ARM7iDecryptedSHA1HMACHash = data.ReadBytes(20); - extendedDSiHeader.Reserved5 = data.ReadBytes(40); - extendedDSiHeader.ARM9NoSecureAreaSHA1HMACHash = data.ReadBytes(20); - extendedDSiHeader.Reserved6 = data.ReadBytes(2636); - extendedDSiHeader.ReservedAndUnchecked = data.ReadBytes(0x180); - extendedDSiHeader.RSASignature = data.ReadBytes(0x80); - - return extendedDSiHeader; - } - - /// - /// Parse a Stream into a name table - /// - /// Stream to parse - /// Filled name table on success, null on error - private static NameTable ParseNameTable(Stream data) - { - // TODO: Use marshalling here instead of building - NameTable nameTable = new NameTable(); - - // Create a variable-length table - var folderAllocationTable = new List(); - int entryCount = int.MaxValue; - while (entryCount > 0) - { - var entry = ParseFolderAllocationTableEntry(data); - folderAllocationTable.Add(entry); - - // If we have the root entry - if (entryCount == int.MaxValue) - entryCount = (entry.Unknown << 8) | entry.ParentFolderIndex; - - // Decrement the entry count - entryCount--; - } - - // Assign the folder allocation table - nameTable.FolderAllocationTable = folderAllocationTable.ToArray(); - - // Create a variable-length table - var nameList = new List(); - while (true) - { - var entry = ParseNameListEntry(data); - if (entry == null) - break; - - nameList.Add(entry); - } - - // Assign the name list - nameTable.NameList = nameList.ToArray(); - - return nameTable; - } - - /// - /// Parse a Stream into a folder allocation table entry - /// - /// Stream to parse - /// Filled folder allocation table entry on success, null on error - private static FolderAllocationTableEntry ParseFolderAllocationTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - FolderAllocationTableEntry entry = new FolderAllocationTableEntry(); - - entry.StartOffset = data.ReadUInt32(); - entry.FirstFileIndex = data.ReadUInt16(); - entry.ParentFolderIndex = data.ReadByteValue(); - entry.Unknown = data.ReadByteValue(); - - return entry; - } - - /// - /// Parse a Stream into a name list entry - /// - /// Stream to parse - /// Filled name list entry on success, null on error - private static NameListEntry ParseNameListEntry(Stream data) - { - // TODO: Use marshalling here instead of building - NameListEntry entry = new NameListEntry(); - - byte flagAndSize = data.ReadByteValue(); - if (flagAndSize == 0xFF) - return null; - - entry.Folder = (flagAndSize & 0x80) != 0; - - byte size = (byte)(flagAndSize & ~0x80); - if (size > 0) - { - byte[] name = data.ReadBytes(size); - entry.Name = Encoding.UTF8.GetString(name); - } - - if (entry.Folder) - entry.Index = data.ReadUInt16(); - - return entry; - } - - /// - /// Parse a Stream into a name list entry - /// - /// Stream to parse - /// Filled name list entry on success, null on error - private static FileAllocationTableEntry ParseFileAllocationTableEntry(Stream data) - { - // TODO: Use marshalling here instead of building - FileAllocationTableEntry entry = new FileAllocationTableEntry(); - - entry.StartOffset = data.ReadUInt32(); - entry.EndOffset = data.ReadUInt32(); - - return entry; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/PAK.cs b/BinaryObjectScanner.Builders/PAK.cs deleted file mode 100644 index bf293d52..00000000 --- a/BinaryObjectScanner.Builders/PAK.cs +++ /dev/null @@ -1,137 +0,0 @@ -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.PAK; -using static SabreTools.Models.PAK.Constants; - -namespace BinaryObjectScanner.Builders -{ - public static class PAK - { - #region Byte Data - - /// - /// Parse a byte array into a Half-Life Package - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled Half-Life Package on success, null on error - public static SabreTools.Models.PAK.File ParseFile(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseFile(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Half-Life Package - /// - /// Stream to parse - /// Filled Half-Life Package on success, null on error - public static SabreTools.Models.PAK.File ParseFile(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - long initialOffset = data.Position; - - // Create a new Half-Life Package to fill - var file = new SabreTools.Models.PAK.File(); - - #region Header - - // Try to parse the header - var header = ParseHeader(data); - if (header == null) - return null; - - // Set the package header - file.Header = header; - - #endregion - - #region Directory Items - - // Get the directory items offset - uint directoryItemsOffset = header.DirectoryOffset; - if (directoryItemsOffset < 0 || directoryItemsOffset >= data.Length) - return null; - - // Seek to the directory items - data.Seek(directoryItemsOffset, SeekOrigin.Begin); - - // Create the directory item array - file.DirectoryItems = new DirectoryItem[header.DirectoryLength / 64]; - - // Try to parse the directory items - for (int i = 0; i < file.DirectoryItems.Length; i++) - { - var directoryItem = ParseDirectoryItem(data); - file.DirectoryItems[i] = directoryItem; - } - - #endregion - - return file; - } - - /// - /// Parse a Stream into a Half-Life Package header - /// - /// Stream to parse - /// Filled Half-Life Package header on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - Header header = new Header(); - - byte[] signature = data.ReadBytes(4); - header.Signature = Encoding.ASCII.GetString(signature); - if (header.Signature != SignatureString) - return null; - - header.DirectoryOffset = data.ReadUInt32(); - header.DirectoryLength = data.ReadUInt32(); - - return header; - } - - /// - /// Parse a Stream into a Half-Life Package directory item - /// - /// Stream to parse - /// Filled Half-Life Package directory item on success, null on error - private static DirectoryItem ParseDirectoryItem(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryItem directoryItem = new DirectoryItem(); - - byte[] itemName = data.ReadBytes(56); - directoryItem.ItemName = Encoding.ASCII.GetString(itemName).TrimEnd('\0'); - directoryItem.ItemOffset = data.ReadUInt32(); - directoryItem.ItemLength = data.ReadUInt32(); - - return directoryItem; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/PFF.cs b/BinaryObjectScanner.Builders/PFF.cs deleted file mode 100644 index bdad2b8b..00000000 --- a/BinaryObjectScanner.Builders/PFF.cs +++ /dev/null @@ -1,211 +0,0 @@ -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.PFF; -using static SabreTools.Models.PFF.Constants; - -namespace BinaryObjectScanner.Builders -{ - public class PFF - { - #region Byte Data - - /// - /// Parse a byte array into a PFF archive - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled archive on success, null on error - public static Archive ParseArchive(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseArchive(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a PFF archive - /// - /// Stream to parse - /// Filled archive on success, null on error - public static Archive ParseArchive(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new archive to fill - var archive = new Archive(); - - #region Header - - // Try to parse the header - var header = ParseHeader(data); - if (header == null) - return null; - - // Set the archive header - archive.Header = header; - - #endregion - - #region Segments - - // Get the segments - long offset = header.FileListOffset; - if (offset < 0 || offset >= data.Length) - return null; - - // Seek to the segments - data.Seek(offset, SeekOrigin.Begin); - - // Create the segments array - archive.Segments = new Segment[header.NumberOfFiles]; - - // Read all segments in turn - for (int i = 0; i < header.NumberOfFiles; i++) - { - var file = ParseSegment(data, header.FileSegmentSize); - if (file == null) - return null; - - archive.Segments[i] = file; - } - - #endregion - - #region Footer - - // Get the footer offset - offset = header.FileListOffset + (header.FileSegmentSize * header.NumberOfFiles); - if (offset < 0 || offset >= data.Length) - return null; - - // Seek to the footer - data.Seek(offset, SeekOrigin.Begin); - - // Try to parse the footer - var footer = ParseFooter(data); - if (footer == null) - return null; - - // Set the archive footer - archive.Footer = footer; - - #endregion - - return archive; - } - - /// - /// Parse a Stream into a header - /// - /// Stream to parse - /// Filled header on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - Header header = new Header(); - - header.HeaderSize = data.ReadUInt32(); - byte[] signature = data.ReadBytes(4); - header.Signature = Encoding.ASCII.GetString(signature); - header.NumberOfFiles = data.ReadUInt32(); - header.FileSegmentSize = data.ReadUInt32(); - switch (header.Signature) - { - case Version0SignatureString: - if (header.FileSegmentSize != Version0HSegmentSize) - return null; - break; - - case Version2SignatureString: - if (header.FileSegmentSize != Version2SegmentSize) - return null; - break; - - // Version 3 can sometimes have Version 2 segment sizes - case Version3SignatureString: - if (header.FileSegmentSize != Version2SegmentSize && header.FileSegmentSize != Version3SegmentSize) - return null; - break; - - case Version4SignatureString: - if (header.FileSegmentSize != Version4SegmentSize) - return null; - break; - - default: - return null; - } - - header.FileListOffset = data.ReadUInt32(); - - return header; - } - - /// - /// Parse a Stream into a footer - /// - /// Stream to parse - /// Filled footer on success, null on error - private static Footer ParseFooter(Stream data) - { - // TODO: Use marshalling here instead of building - Footer footer = new Footer(); - - footer.SystemIP = data.ReadUInt32(); - footer.Reserved = data.ReadUInt32(); - byte[] kingTag = data.ReadBytes(4); - footer.KingTag = Encoding.ASCII.GetString(kingTag); - - return footer; - } - - /// - /// Parse a Stream into a file entry - /// - /// Stream to parse - /// PFF segment size - /// Filled file entry on success, null on error - private static Segment ParseSegment(Stream data, uint segmentSize) - { - // TODO: Use marshalling here instead of building - Segment segment = new Segment(); - - segment.Deleted = data.ReadUInt32(); - segment.FileLocation = data.ReadUInt32(); - segment.FileSize = data.ReadUInt32(); - segment.PackedDate = data.ReadUInt32(); - byte[] fileName = data.ReadBytes(0x10); - segment.FileName = Encoding.ASCII.GetString(fileName).TrimEnd('\0'); - if (segmentSize > Version2SegmentSize) - segment.ModifiedDate = data.ReadUInt32(); - if (segmentSize > Version3SegmentSize) - segment.CompressionLevel = data.ReadUInt32(); - - return segment; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/PlayJ.cs b/BinaryObjectScanner.Builders/PlayJ.cs deleted file mode 100644 index e4c17aaf..00000000 --- a/BinaryObjectScanner.Builders/PlayJ.cs +++ /dev/null @@ -1,463 +0,0 @@ -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.PlayJ; -using static SabreTools.Models.PlayJ.Constants; - -namespace BinaryObjectScanner.Builders -{ - public class PlayJ - { - #region Byte Data - - /// - /// Parse a byte array into a PlayJ playlist - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled playlist on success, null on error - public static Playlist ParsePlaylist(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParsePlaylist(dataStream); - } - - /// - /// Parse a byte array into a PlayJ audio file - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled audio file on success, null on error - public static AudioFile ParseAudioFile(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseAudioFile(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a PlayJ playlist - /// - /// Stream to parse - /// Filled playlist on success, null on error - public static Playlist ParsePlaylist(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new playlist to fill - var playlist = new Playlist(); - - #region Playlist Header - - // Try to parse the playlist header - var playlistHeader = ParsePlaylistHeader(data); - if (playlistHeader == null) - return null; - - // Set the playlist header - playlist.Header = playlistHeader; - - #endregion - - #region Audio Files - - // Create the audio files array - playlist.AudioFiles = new AudioFile[playlistHeader.TrackCount]; - - // Try to parse the audio files - for (int i = 0; i < playlist.AudioFiles.Length; i++) - { - long currentOffset = data.Position; - var entryHeader = ParseAudioFile(data, currentOffset); - if (entryHeader == null) - return null; - - playlist.AudioFiles[i] = entryHeader; - } - - #endregion - - return playlist; - } - - /// - /// Parse a Stream into a PlayJ audio file - /// - /// Stream to parse - /// Offset to adjust all seeking by - /// Filled audio file on success, null on error - public static AudioFile ParseAudioFile(Stream data, long adjust = 0) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new audio file to fill - var audioFile = new AudioFile(); - - #region Audio Header - - // Try to parse the audio header - var audioHeader = ParseAudioHeader(data); - if (audioHeader == null) - return null; - - // Set the audio header - audioFile.Header = audioHeader; - - #endregion - - #region Unknown Block 1 - - uint unknownOffset1 = (audioHeader.Version == 0x00000000) - ? (audioHeader as AudioHeaderV1).UnknownOffset1 - : (audioHeader as AudioHeaderV2).UnknownOffset1 + 0x54; - - // If we have an unknown block 1 offset - if (unknownOffset1 > 0) - { - // Get the unknown block 1 offset - long offset = unknownOffset1 + adjust; - if (offset < 0 || offset >= data.Length) - return null; - - // Seek to the unknown block 1 - data.Seek(offset, SeekOrigin.Begin); - } - - // Try to parse the unknown block 1 - var unknownBlock1 = ParseUnknownBlock1(data); - if (unknownBlock1 == null) - return null; - - // Set the unknown block 1 - audioFile.UnknownBlock1 = unknownBlock1; - - #endregion - - #region V1 Only - - // If we have a V1 file - if (audioHeader.Version == 0x00000000) - { - #region Unknown Value 2 - - // Get the V1 unknown offset 2 - uint? unknownOffset2 = (audioHeader as AudioHeaderV1)?.UnknownOffset2; - - // If we have an unknown value 2 offset - if (unknownOffset2 != null && unknownOffset2 > 0) - { - // Get the unknown value 2 offset - long offset = unknownOffset2.Value + adjust; - if (offset < 0 || offset >= data.Length) - return null; - - // Seek to the unknown value 2 - data.Seek(offset, SeekOrigin.Begin); - } - - // Set the unknown value 2 - audioFile.UnknownValue2 = data.ReadUInt32(); - - #endregion - - #region Unknown Block 3 - - // Get the V1 unknown offset 3 - uint? unknownOffset3 = (audioHeader as AudioHeaderV1)?.UnknownOffset3; - - // If we have an unknown block 3 offset - if (unknownOffset3 != null && unknownOffset3 > 0) - { - // Get the unknown block 3 offset - long offset = unknownOffset3.Value + adjust; - if (offset < 0 || offset >= data.Length) - return null; - - // Seek to the unknown block 3 - data.Seek(offset, SeekOrigin.Begin); - } - - // Try to parse the unknown block 3 - var unknownBlock3 = ParseUnknownBlock3(data); - if (unknownBlock3 == null) - return null; - - // Set the unknown block 3 - audioFile.UnknownBlock3 = unknownBlock3; - - #endregion - } - - #endregion - - #region V2 Only - - // If we have a V2 file - if (audioHeader.Version == 0x0000000A) - { - #region Data Files Count - - // Set the data files count - audioFile.DataFilesCount = data.ReadUInt32(); - - #endregion - - #region Data Files - - // Create the data files array - audioFile.DataFiles = new DataFile[audioFile.DataFilesCount]; - - // Try to parse the data files - for (int i = 0; i < audioFile.DataFiles.Length; i++) - { - var dataFile = ParseDataFile(data); - if (dataFile == null) - return null; - - audioFile.DataFiles[i] = dataFile; - } - - - #endregion - } - - #endregion - - return audioFile; - } - - /// - /// Parse a Stream into a playlist header - /// - /// Stream to parse - /// Filled playlist header on success, null on error - private static PlaylistHeader ParsePlaylistHeader(Stream data) - { - // TODO: Use marshalling here instead of building - PlaylistHeader playlistHeader = new PlaylistHeader(); - - playlistHeader.TrackCount = data.ReadUInt32(); - playlistHeader.Data = data.ReadBytes(52); - - return playlistHeader; - } - - /// - /// Parse a Stream into an audio header - /// - /// Stream to parse - /// Filled audio header on success, null on error - private static AudioHeader ParseAudioHeader(Stream data) - { - // Cache the current offset - long initialOffset = data.Position; - - // TODO: Use marshalling here instead of building - AudioHeader audioHeader; - - // Get the common header pieces - uint signature = data.ReadUInt32(); - if (signature != SignatureUInt32) - return null; - - uint version = data.ReadUInt32(); - - // Build the header according to version - uint unknownOffset1; - switch (version) - { - // Version 1 - case 0x00000000: - AudioHeaderV1 v1 = new AudioHeaderV1(); - - v1.Signature = signature; - v1.Version = version; - v1.TrackID = data.ReadUInt32(); - v1.UnknownOffset1 = data.ReadUInt32(); - v1.UnknownOffset2 = data.ReadUInt32(); - v1.UnknownOffset3 = data.ReadUInt32(); - v1.Unknown1 = data.ReadUInt32(); - v1.Unknown2 = data.ReadUInt32(); - v1.Year = data.ReadUInt32(); - v1.TrackNumber = data.ReadByteValue(); - v1.Subgenre = (Subgenre)data.ReadByteValue(); - v1.Duration = data.ReadUInt32(); - - audioHeader = v1; - unknownOffset1 = v1.UnknownOffset1; - break; - - // Version 2 - case 0x0000000A: - AudioHeaderV2 v2 = new AudioHeaderV2(); - - v2.Signature = signature; - v2.Version = version; - v2.Unknown1 = data.ReadUInt32(); - v2.Unknown2 = data.ReadUInt32(); - v2.Unknown3 = data.ReadUInt32(); - v2.Unknown4 = data.ReadUInt32(); - v2.Unknown5 = data.ReadUInt32(); - v2.Unknown6 = data.ReadUInt32(); - v2.UnknownOffset1 = data.ReadUInt32(); - v2.Unknown7 = data.ReadUInt32(); - v2.Unknown8 = data.ReadUInt32(); - v2.Unknown9 = data.ReadUInt32(); - v2.UnknownOffset2 = data.ReadUInt32(); - v2.Unknown10 = data.ReadUInt32(); - v2.Unknown11 = data.ReadUInt32(); - v2.Unknown12 = data.ReadUInt32(); - v2.Unknown13 = data.ReadUInt32(); - v2.Unknown14 = data.ReadUInt32(); - v2.Unknown15 = data.ReadUInt32(); - v2.Unknown16 = data.ReadUInt32(); - v2.Unknown17 = data.ReadUInt32(); - v2.TrackID = data.ReadUInt32(); - v2.Year = data.ReadUInt32(); - v2.TrackNumber = data.ReadUInt32(); - v2.Unknown18 = data.ReadUInt32(); - - audioHeader = v2; - unknownOffset1 = v2.UnknownOffset1 + 0x54; - break; - - // No other version are recognized - default: - return null; - } - - audioHeader.TrackLength = data.ReadUInt16(); - byte[] track = data.ReadBytes(audioHeader.TrackLength); - if (track != null) - audioHeader.Track = Encoding.ASCII.GetString(track); - - audioHeader.ArtistLength = data.ReadUInt16(); - byte[] artist = data.ReadBytes(audioHeader.ArtistLength); - if (artist != null) - audioHeader.Artist = Encoding.ASCII.GetString(artist); - - audioHeader.AlbumLength = data.ReadUInt16(); - byte[] album = data.ReadBytes(audioHeader.AlbumLength); - if (album != null) - audioHeader.Album = Encoding.ASCII.GetString(album); - - audioHeader.WriterLength = data.ReadUInt16(); - byte[] writer = data.ReadBytes(audioHeader.WriterLength); - if (writer != null) - audioHeader.Writer = Encoding.ASCII.GetString(writer); - - audioHeader.PublisherLength = data.ReadUInt16(); - byte[] publisher = data.ReadBytes(audioHeader.PublisherLength); - if (publisher != null) - audioHeader.Publisher = Encoding.ASCII.GetString(publisher); - - audioHeader.LabelLength = data.ReadUInt16(); - byte[] label = data.ReadBytes(audioHeader.LabelLength); - if (label != null) - audioHeader.Label = Encoding.ASCII.GetString(label); - - if (data.Position - initialOffset < unknownOffset1) - { - audioHeader.CommentsLength = data.ReadUInt16(); - byte[] comments = data.ReadBytes(audioHeader.CommentsLength); - if (comments != null) - audioHeader.Comments = Encoding.ASCII.GetString(comments); - } - - return audioHeader; - } - - /// - /// Parse a Stream into an unknown block 1 - /// - /// Stream to parse - /// Filled unknown block 1 on success, null on error - private static UnknownBlock1 ParseUnknownBlock1(Stream data) - { - // TODO: Use marshalling here instead of building - UnknownBlock1 unknownBlock1 = new UnknownBlock1(); - - unknownBlock1.Length = data.ReadUInt32(); - unknownBlock1.Data = data.ReadBytes((int)unknownBlock1.Length); - - return unknownBlock1; - } - - /// - /// Parse a Stream into an unknown block 3 - /// - /// Stream to parse - /// Filled unknown block 3 on success, null on error - private static UnknownBlock3 ParseUnknownBlock3(Stream data) - { - // TODO: Use marshalling here instead of building - UnknownBlock3 unknownBlock3 = new UnknownBlock3(); - - // No-op because we don't even know the length - - return unknownBlock3; - } - - /// - /// Parse a Stream into a data file - /// - /// Stream to parse - /// Filled data file on success, null on error - private static DataFile ParseDataFile(Stream data) - { - // TODO: Use marshalling here instead of building - DataFile dataFile = new DataFile(); - - dataFile.FileNameLength = data.ReadUInt16(); - byte[] fileName = data.ReadBytes(dataFile.FileNameLength); - if (fileName != null) - dataFile.FileName = Encoding.ASCII.GetString(fileName); - - dataFile.DataLength = data.ReadUInt32(); - dataFile.Data = data.ReadBytes((int)dataFile.DataLength); - - return dataFile; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/PortableExecutable.cs b/BinaryObjectScanner.Builders/PortableExecutable.cs deleted file mode 100644 index e87c5736..00000000 --- a/BinaryObjectScanner.Builders/PortableExecutable.cs +++ /dev/null @@ -1,1344 +0,0 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; -using BinaryObjectScanner.Utilities; -using SabreTools.IO; -using SabreTools.Models.PortableExecutable; -using static SabreTools.Models.PortableExecutable.Constants; - -namespace BinaryObjectScanner.Builders -{ - public static class PortableExecutable - { - #region Byte Data - - /// - /// Parse a byte array into a Portable Executable - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled executable on success, null on error - public static Executable ParseExecutable(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseExecutable(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Portable Executable - /// - /// Stream to parse - /// Filled executable on success, null on error - public static Executable ParseExecutable(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new executable to fill - var executable = new Executable(); - - #region MS-DOS Stub - - // Parse the MS-DOS stub - var stub = MSDOS.ParseExecutable(data); - if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0) - return null; - - // Set the MS-DOS stub - executable.Stub = stub; - - #endregion - - #region Signature - - data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin); - byte[] signature = data.ReadBytes(4); - executable.Signature = Encoding.ASCII.GetString(signature); - if (executable.Signature != SignatureString) - return null; - - #endregion - - #region COFF File Header - - // Try to parse the COFF file header - var coffFileHeader = ParseCOFFFileHeader(data); - if (coffFileHeader == null) - return null; - - // Set the COFF file header - executable.COFFFileHeader = coffFileHeader; - - #endregion - - #region Optional Header - - // Try to parse the optional header - var optionalHeader = ParseOptionalHeader(data, coffFileHeader.SizeOfOptionalHeader); - if (optionalHeader == null) - return null; - - // Set the optional header - executable.OptionalHeader = optionalHeader; - - #endregion - - #region Section Table - - // Try to parse the section table - var sectionTable = ParseSectionTable(data, coffFileHeader.NumberOfSections); - if (sectionTable == null) - return null; - - // Set the section table - executable.SectionTable = sectionTable; - - #endregion - - #region COFF Symbol Table and COFF String Table - - // TODO: Validate that this is correct with an "old" PE - if (coffFileHeader.PointerToSymbolTable.ConvertVirtualAddress(executable.SectionTable) != 0) - { - // If the offset for the COFF symbol table doesn't exist - int symbolTableAddress = initialOffset - + (int)coffFileHeader.PointerToSymbolTable.ConvertVirtualAddress(executable.SectionTable); - if (symbolTableAddress >= data.Length) - return executable; - - // Try to parse the COFF symbol table - data.Seek(symbolTableAddress, SeekOrigin.Begin); - var coffSymbolTable = ParseCOFFSymbolTable(data, coffFileHeader.NumberOfSymbols); - if (coffSymbolTable == null) - return null; - - // Set the COFF symbol table - executable.COFFSymbolTable = coffSymbolTable; - - // Try to parse the COFF string table - var coffStringTable = ParseCOFFStringTable(data); - if (coffStringTable == null) - return null; - - // Set the COFF string table - executable.COFFStringTable = coffStringTable; - } - - #endregion - - #region Attribute Certificate Table - - if (optionalHeader.CertificateTable != null && optionalHeader.CertificateTable.VirtualAddress != 0) - { - // If the offset for the attribute certificate table doesn't exist - int certificateTableAddress = initialOffset - + (int)optionalHeader.CertificateTable.VirtualAddress; - if (certificateTableAddress >= data.Length) - return executable; - - // Try to parse the attribute certificate table - data.Seek(certificateTableAddress, SeekOrigin.Begin); - int endOffset = (int)(certificateTableAddress + optionalHeader.CertificateTable.Size); - var attributeCertificateTable = ParseAttributeCertificateTable(data, endOffset); - if (attributeCertificateTable == null) - return null; - - // Set the attribute certificate table - executable.AttributeCertificateTable = attributeCertificateTable; - } - - #endregion - - #region Delay-Load Directory Table - - if (optionalHeader.DelayImportDescriptor != null && optionalHeader.DelayImportDescriptor.VirtualAddress.ConvertVirtualAddress(executable.SectionTable) != 0) - { - // If the offset for the delay-load directory table doesn't exist - int delayLoadDirectoryTableAddress = initialOffset - + (int)optionalHeader.DelayImportDescriptor.VirtualAddress.ConvertVirtualAddress(executable.SectionTable); - if (delayLoadDirectoryTableAddress >= data.Length) - return executable; - - // Try to parse the delay-load directory table - data.Seek(delayLoadDirectoryTableAddress, SeekOrigin.Begin); - var delayLoadDirectoryTable = ParseDelayLoadDirectoryTable(data); - if (delayLoadDirectoryTable == null) - return null; - - // Set the delay-load directory table - executable.DelayLoadDirectoryTable = delayLoadDirectoryTable; - } - - #endregion - - #region Base Relocation Table - - // Should also be in a '.reloc' section - if (optionalHeader.BaseRelocationTable != null && optionalHeader.BaseRelocationTable.VirtualAddress.ConvertVirtualAddress(executable.SectionTable) != 0) - { - // If the offset for the base relocation table doesn't exist - int baseRelocationTableAddress = initialOffset - + (int)optionalHeader.BaseRelocationTable.VirtualAddress.ConvertVirtualAddress(executable.SectionTable); - if (baseRelocationTableAddress >= data.Length) - return executable; - - // Try to parse the base relocation table - data.Seek(baseRelocationTableAddress, SeekOrigin.Begin); - int endOffset = (int)(baseRelocationTableAddress + optionalHeader.BaseRelocationTable.Size); - var baseRelocationTable = ParseBaseRelocationTable(data, endOffset, executable.SectionTable); - if (baseRelocationTable == null) - return null; - - // Set the base relocation table - executable.BaseRelocationTable = baseRelocationTable; - } - - #endregion - - #region Debug Table - - // Should also be in a '.debug' section - if (optionalHeader.Debug != null && optionalHeader.Debug.VirtualAddress.ConvertVirtualAddress(executable.SectionTable) != 0) - { - // If the offset for the debug table doesn't exist - int debugTableAddress = initialOffset - + (int)optionalHeader.Debug.VirtualAddress.ConvertVirtualAddress(executable.SectionTable); - if (debugTableAddress >= data.Length) - return executable; - - // Try to parse the debug table - data.Seek(debugTableAddress, SeekOrigin.Begin); - int endOffset = (int)(debugTableAddress + optionalHeader.Debug.Size); - var debugTable = ParseDebugTable(data, endOffset, executable.SectionTable); - if (debugTable == null) - return null; - - // Set the debug table - executable.DebugTable = debugTable; - } - - #endregion - - #region Export Table - - // Should also be in a '.edata' section - if (optionalHeader.ExportTable != null && optionalHeader.ExportTable.VirtualAddress.ConvertVirtualAddress(executable.SectionTable) != 0) - { - // If the offset for the export table doesn't exist - int exportTableAddress = initialOffset - + (int)optionalHeader.ExportTable.VirtualAddress.ConvertVirtualAddress(executable.SectionTable); - if (exportTableAddress >= data.Length) - return executable; - - // Try to parse the export table - data.Seek(exportTableAddress, SeekOrigin.Begin); - var exportTable = ParseExportTable(data, executable.SectionTable); - if (exportTable == null) - return null; - - // Set the export table - executable.ExportTable = exportTable; - } - - #endregion - - #region Import Table - - // Should also be in a '.idata' section - if (optionalHeader.ImportTable != null && optionalHeader.ImportTable.VirtualAddress.ConvertVirtualAddress(executable.SectionTable) != 0) - { - // If the offset for the import table doesn't exist - int importTableAddress = initialOffset - + (int)optionalHeader.ImportTable.VirtualAddress.ConvertVirtualAddress(executable.SectionTable); - if (importTableAddress >= data.Length) - return executable; - - // Try to parse the import table - data.Seek(importTableAddress, SeekOrigin.Begin); - var importTable = ParseImportTable(data, optionalHeader.Magic, executable.SectionTable); - if (importTable == null) - return null; - - // Set the import table - executable.ImportTable = importTable; - } - - #endregion - - #region Resource Directory Table - - // Should also be in a '.rsrc' section - if (optionalHeader.ResourceTable != null && optionalHeader.ResourceTable.VirtualAddress.ConvertVirtualAddress(executable.SectionTable) != 0) - { - // If the offset for the resource directory table doesn't exist - int resourceTableAddress = initialOffset - + (int)optionalHeader.ResourceTable.VirtualAddress.ConvertVirtualAddress(executable.SectionTable); - if (resourceTableAddress >= data.Length) - return executable; - - // Try to parse the resource directory table - data.Seek(resourceTableAddress, SeekOrigin.Begin); - var resourceDirectoryTable = ParseResourceDirectoryTable(data, data.Position, executable.SectionTable, true); - if (resourceDirectoryTable == null) - return null; - - // Set the resource directory table - executable.ResourceDirectoryTable = resourceDirectoryTable; - } - - #endregion - - // TODO: Finish implementing PE parsing - return executable; - } - - /// - /// Parse a Stream into a Portable Executable COFF file header - /// - /// Stream to parse - /// Filled executable header on success, null on error - private static COFFFileHeader ParseCOFFFileHeader(Stream data) - { - // TODO: Use marshalling here instead of building - var fileHeader = new COFFFileHeader(); - - fileHeader.Machine = (MachineType)data.ReadUInt16(); - fileHeader.NumberOfSections = data.ReadUInt16(); - fileHeader.TimeDateStamp = data.ReadUInt32(); - fileHeader.PointerToSymbolTable = data.ReadUInt32(); - fileHeader.NumberOfSymbols = data.ReadUInt32(); - fileHeader.SizeOfOptionalHeader = data.ReadUInt16(); - fileHeader.Characteristics = (Characteristics)data.ReadUInt16(); - - return fileHeader; - } - - /// - /// Parse a Stream into an optional header - /// - /// Stream to parse - /// Size of the optional header - /// Filled optional header on success, null on error - private static OptionalHeader ParseOptionalHeader(Stream data, int optionalSize) - { - long initialOffset = data.Position; - - // TODO: Use marshalling here instead of building - var optionalHeader = new OptionalHeader(); - - #region Standard Fields - - optionalHeader.Magic = (OptionalHeaderMagicNumber)data.ReadUInt16(); - optionalHeader.MajorLinkerVersion = data.ReadByteValue(); - optionalHeader.MinorLinkerVersion = data.ReadByteValue(); - optionalHeader.SizeOfCode = data.ReadUInt32(); - optionalHeader.SizeOfInitializedData = data.ReadUInt32(); - optionalHeader.SizeOfUninitializedData = data.ReadUInt32(); - optionalHeader.AddressOfEntryPoint = data.ReadUInt32(); - optionalHeader.BaseOfCode = data.ReadUInt32(); - - if (optionalHeader.Magic == OptionalHeaderMagicNumber.PE32) - optionalHeader.BaseOfData = data.ReadUInt32(); - - #endregion - - #region Windows-Specific Fields - - if (optionalHeader.Magic == OptionalHeaderMagicNumber.PE32) - optionalHeader.ImageBase_PE32 = data.ReadUInt32(); - else if (optionalHeader.Magic == OptionalHeaderMagicNumber.PE32Plus) - optionalHeader.ImageBase_PE32Plus = data.ReadUInt64(); - optionalHeader.SectionAlignment = data.ReadUInt32(); - optionalHeader.FileAlignment = data.ReadUInt32(); - optionalHeader.MajorOperatingSystemVersion = data.ReadUInt16(); - optionalHeader.MinorOperatingSystemVersion = data.ReadUInt16(); - optionalHeader.MajorImageVersion = data.ReadUInt16(); - optionalHeader.MinorImageVersion = data.ReadUInt16(); - optionalHeader.MajorSubsystemVersion = data.ReadUInt16(); - optionalHeader.MinorSubsystemVersion = data.ReadUInt16(); - optionalHeader.Win32VersionValue = data.ReadUInt32(); - optionalHeader.SizeOfImage = data.ReadUInt32(); - optionalHeader.SizeOfHeaders = data.ReadUInt32(); - optionalHeader.CheckSum = data.ReadUInt32(); - optionalHeader.Subsystem = (WindowsSubsystem)data.ReadUInt16(); - optionalHeader.DllCharacteristics = (DllCharacteristics)data.ReadUInt16(); - if (optionalHeader.Magic == OptionalHeaderMagicNumber.PE32) - optionalHeader.SizeOfStackReserve_PE32 = data.ReadUInt32(); - else if (optionalHeader.Magic == OptionalHeaderMagicNumber.PE32Plus) - optionalHeader.SizeOfStackReserve_PE32Plus = data.ReadUInt64(); - if (optionalHeader.Magic == OptionalHeaderMagicNumber.PE32) - optionalHeader.SizeOfStackCommit_PE32 = data.ReadUInt32(); - else if (optionalHeader.Magic == OptionalHeaderMagicNumber.PE32Plus) - optionalHeader.SizeOfStackCommit_PE32Plus = data.ReadUInt64(); - if (optionalHeader.Magic == OptionalHeaderMagicNumber.PE32) - optionalHeader.SizeOfHeapReserve_PE32 = data.ReadUInt32(); - else if (optionalHeader.Magic == OptionalHeaderMagicNumber.PE32Plus) - optionalHeader.SizeOfHeapReserve_PE32Plus = data.ReadUInt64(); - if (optionalHeader.Magic == OptionalHeaderMagicNumber.PE32) - optionalHeader.SizeOfHeapCommit_PE32 = data.ReadUInt32(); - else if (optionalHeader.Magic == OptionalHeaderMagicNumber.PE32Plus) - optionalHeader.SizeOfHeapCommit_PE32Plus = data.ReadUInt64(); - optionalHeader.LoaderFlags = data.ReadUInt32(); - optionalHeader.NumberOfRvaAndSizes = data.ReadUInt32(); - - #endregion - - #region Data Directories - - if (optionalHeader.NumberOfRvaAndSizes >= 1 && data.Position - initialOffset < optionalSize) - { - optionalHeader.ExportTable = new DataDirectory(); - optionalHeader.ExportTable.VirtualAddress = data.ReadUInt32(); - optionalHeader.ExportTable.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 2 && data.Position - initialOffset < optionalSize) - { - optionalHeader.ImportTable = new DataDirectory(); - optionalHeader.ImportTable.VirtualAddress = data.ReadUInt32(); - optionalHeader.ImportTable.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 3 && data.Position - initialOffset < optionalSize) - { - optionalHeader.ResourceTable = new DataDirectory(); - optionalHeader.ResourceTable.VirtualAddress = data.ReadUInt32(); - optionalHeader.ResourceTable.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 4 && data.Position - initialOffset < optionalSize) - { - optionalHeader.ExceptionTable = new DataDirectory(); - optionalHeader.ExceptionTable.VirtualAddress = data.ReadUInt32(); - optionalHeader.ExceptionTable.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 5 && data.Position - initialOffset < optionalSize) - { - optionalHeader.CertificateTable = new DataDirectory(); - optionalHeader.CertificateTable.VirtualAddress = data.ReadUInt32(); - optionalHeader.CertificateTable.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 6 && data.Position - initialOffset < optionalSize) - { - optionalHeader.BaseRelocationTable = new DataDirectory(); - optionalHeader.BaseRelocationTable.VirtualAddress = data.ReadUInt32(); - optionalHeader.BaseRelocationTable.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 7 && data.Position - initialOffset < optionalSize) - { - optionalHeader.Debug = new DataDirectory(); - optionalHeader.Debug.VirtualAddress = data.ReadUInt32(); - optionalHeader.Debug.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 8 && data.Position - initialOffset < optionalSize) - { - optionalHeader.Architecture = data.ReadUInt64(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 9 && data.Position - initialOffset < optionalSize) - { - optionalHeader.GlobalPtr = new DataDirectory(); - optionalHeader.GlobalPtr.VirtualAddress = data.ReadUInt32(); - optionalHeader.GlobalPtr.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 10 && data.Position - initialOffset < optionalSize) - { - optionalHeader.ThreadLocalStorageTable = new DataDirectory(); - optionalHeader.ThreadLocalStorageTable.VirtualAddress = data.ReadUInt32(); - optionalHeader.ThreadLocalStorageTable.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 11 && data.Position - initialOffset < optionalSize) - { - optionalHeader.LoadConfigTable = new DataDirectory(); - optionalHeader.LoadConfigTable.VirtualAddress = data.ReadUInt32(); - optionalHeader.LoadConfigTable.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 12 && data.Position - initialOffset < optionalSize) - { - optionalHeader.BoundImport = new DataDirectory(); - optionalHeader.BoundImport.VirtualAddress = data.ReadUInt32(); - optionalHeader.BoundImport.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 13 && data.Position - initialOffset < optionalSize) - { - optionalHeader.ImportAddressTable = new DataDirectory(); - optionalHeader.ImportAddressTable.VirtualAddress = data.ReadUInt32(); - optionalHeader.ImportAddressTable.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 14 && data.Position - initialOffset < optionalSize) - { - optionalHeader.DelayImportDescriptor = new DataDirectory(); - optionalHeader.DelayImportDescriptor.VirtualAddress = data.ReadUInt32(); - optionalHeader.DelayImportDescriptor.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 15 && data.Position - initialOffset < optionalSize) - { - optionalHeader.CLRRuntimeHeader = new DataDirectory(); - optionalHeader.CLRRuntimeHeader.VirtualAddress = data.ReadUInt32(); - optionalHeader.CLRRuntimeHeader.Size = data.ReadUInt32(); - } - if (optionalHeader.NumberOfRvaAndSizes >= 16 && data.Position - initialOffset < optionalSize) - { - optionalHeader.Reserved = data.ReadUInt64(); - } - - #endregion - - return optionalHeader; - } - - /// - /// Parse a Stream into a section table - /// - /// Stream to parse - /// Number of section table entries to read - /// Filled section table on success, null on error - private static SectionHeader[] ParseSectionTable(Stream data, int count) - { - // TODO: Use marshalling here instead of building - var sectionTable = new SectionHeader[count]; - - for (int i = 0; i < count; i++) - { - var entry = new SectionHeader(); - entry.Name = data.ReadBytes(8); - entry.VirtualSize = data.ReadUInt32(); - entry.VirtualAddress = data.ReadUInt32(); - entry.SizeOfRawData = data.ReadUInt32(); - entry.PointerToRawData = data.ReadUInt32(); - entry.PointerToRelocations = data.ReadUInt32(); - entry.PointerToLinenumbers = data.ReadUInt32(); - entry.NumberOfRelocations = data.ReadUInt16(); - entry.NumberOfLinenumbers = data.ReadUInt16(); - entry.Characteristics = (SectionFlags)data.ReadUInt32(); - entry.COFFRelocations = new COFFRelocation[entry.NumberOfRelocations]; - for (int j = 0; j < entry.NumberOfRelocations; j++) - { - // TODO: Seek to correct location and read data - } - entry.COFFLineNumbers = new COFFLineNumber[entry.NumberOfLinenumbers]; - for (int j = 0; j < entry.NumberOfLinenumbers; j++) - { - // TODO: Seek to correct location and read data - } - sectionTable[i] = entry; - } - - return sectionTable; - } - - /// - /// Parse a Stream into a COFF symbol table - /// - /// Stream to parse - /// Number of COFF symbol table entries to read - /// Filled COFF symbol table on success, null on error - private static COFFSymbolTableEntry[] ParseCOFFSymbolTable(Stream data, uint count) - { - // TODO: Use marshalling here instead of building - var coffSymbolTable = new COFFSymbolTableEntry[count]; - - int auxSymbolsRemaining = 0; - int currentSymbolType = 0; - - for (int i = 0; i < count; i++) - { - // Standard COFF Symbol Table Entry - if (currentSymbolType == 0) - { - var entry = new COFFSymbolTableEntry(); - entry.ShortName = data.ReadBytes(8); - entry.Zeroes = BitConverter.ToUInt32(entry.ShortName, 0); - if (entry.Zeroes == 0) - { - entry.Offset = BitConverter.ToUInt32(entry.ShortName, 4); - entry.ShortName = null; - } - entry.Value = data.ReadUInt32(); - entry.SectionNumber = data.ReadUInt16(); - entry.SymbolType = (SymbolType)data.ReadUInt16(); - entry.StorageClass = (StorageClass)data.ReadByte(); - entry.NumberOfAuxSymbols = data.ReadByteValue(); - coffSymbolTable[i] = entry; - - auxSymbolsRemaining = entry.NumberOfAuxSymbols; - if (auxSymbolsRemaining == 0) - continue; - - if (entry.StorageClass == StorageClass.IMAGE_SYM_CLASS_EXTERNAL - && entry.SymbolType == SymbolType.IMAGE_SYM_TYPE_FUNC - && entry.SectionNumber > 0) - { - currentSymbolType = 1; - } - else if (entry.StorageClass == StorageClass.IMAGE_SYM_CLASS_FUNCTION - && entry.ShortName != null - && ((entry.ShortName[0] == 0x2E && entry.ShortName[1] == 0x62 && entry.ShortName[2] == 0x66) // .bf - || (entry.ShortName[0] == 0x2E && entry.ShortName[1] == 0x65 && entry.ShortName[2] == 0x66))) // .ef - { - currentSymbolType = 2; - } - else if (entry.StorageClass == StorageClass.IMAGE_SYM_CLASS_EXTERNAL - && entry.SectionNumber == (ushort)SectionNumber.IMAGE_SYM_UNDEFINED - && entry.Value == 0) - { - currentSymbolType = 3; - } - else if (entry.StorageClass == StorageClass.IMAGE_SYM_CLASS_FILE) - { - // TODO: Symbol name should be ".file" - currentSymbolType = 4; - } - else if (entry.StorageClass == StorageClass.IMAGE_SYM_CLASS_STATIC) - { - // TODO: Should have the name of a section (like ".text") - currentSymbolType = 5; - } - else if (entry.StorageClass == StorageClass.IMAGE_SYM_CLASS_CLR_TOKEN) - { - currentSymbolType = 6; - } - } - - // Auxiliary Format 1: Function Definitions - else if (currentSymbolType == 1) - { - var entry = new COFFSymbolTableEntry(); - entry.AuxFormat1TagIndex = data.ReadUInt32(); - entry.AuxFormat1TotalSize = data.ReadUInt32(); - entry.AuxFormat1PointerToLinenumber = data.ReadUInt32(); - entry.AuxFormat1PointerToNextFunction = data.ReadUInt32(); - entry.AuxFormat1Unused = data.ReadUInt16(); - coffSymbolTable[i] = entry; - auxSymbolsRemaining--; - } - - // Auxiliary Format 2: .bf and .ef Symbols - else if (currentSymbolType == 2) - { - var entry = new COFFSymbolTableEntry(); - entry.AuxFormat2Unused1 = data.ReadUInt32(); - entry.AuxFormat2Linenumber = data.ReadUInt16(); - entry.AuxFormat2Unused2 = data.ReadBytes(6); - entry.AuxFormat2PointerToNextFunction = data.ReadUInt32(); - entry.AuxFormat2Unused3 = data.ReadUInt16(); - coffSymbolTable[i] = entry; - auxSymbolsRemaining--; - } - - // Auxiliary Format 3: Weak Externals - else if (currentSymbolType == 3) - { - var entry = new COFFSymbolTableEntry(); - entry.AuxFormat3TagIndex = data.ReadUInt32(); - entry.AuxFormat3Characteristics = data.ReadUInt32(); - entry.AuxFormat3Unused = data.ReadBytes(10); - coffSymbolTable[i] = entry; - auxSymbolsRemaining--; - } - - // Auxiliary Format 4: Files - else if (currentSymbolType == 4) - { - var entry = new COFFSymbolTableEntry(); - entry.AuxFormat4FileName = data.ReadBytes(18); - coffSymbolTable[i] = entry; - auxSymbolsRemaining--; - } - - // Auxiliary Format 5: Section Definitions - else if (currentSymbolType == 5) - { - var entry = new COFFSymbolTableEntry(); - entry.AuxFormat5Length = data.ReadUInt32(); - entry.AuxFormat5NumberOfRelocations = data.ReadUInt16(); - entry.AuxFormat5NumberOfLinenumbers = data.ReadUInt16(); - entry.AuxFormat5CheckSum = data.ReadUInt32(); - entry.AuxFormat5Number = data.ReadUInt16(); - entry.AuxFormat5Selection = data.ReadByteValue(); - entry.AuxFormat5Unused = data.ReadBytes(3); - coffSymbolTable[i] = entry; - auxSymbolsRemaining--; - } - - // Auxiliary Format 6: CLR Token Definition - else if (currentSymbolType == 6) - { - var entry = new COFFSymbolTableEntry(); - entry.AuxFormat6AuxType = data.ReadByteValue(); - entry.AuxFormat6Reserved1 = data.ReadByteValue(); - entry.AuxFormat6SymbolTableIndex = data.ReadUInt32(); - entry.AuxFormat6Reserved2 = data.ReadBytes(12); - coffSymbolTable[i] = entry; - auxSymbolsRemaining--; - } - - // If we hit the last aux symbol, go back to normal format - if (auxSymbolsRemaining == 0) - currentSymbolType = 0; - } - - return coffSymbolTable; - } - - /// - /// Parse a Stream into a COFF string table - /// - /// Stream to parse - /// Filled COFF string table on success, null on error - private static COFFStringTable ParseCOFFStringTable(Stream data) - { - // TODO: Use marshalling here instead of building - var coffStringTable = new COFFStringTable(); - - coffStringTable.TotalSize = data.ReadUInt32(); - if (coffStringTable.TotalSize <= 4) - return coffStringTable; - - var strings = new List(); - - uint totalSize = coffStringTable.TotalSize; - while (totalSize > 0 && data.Position < data.Length) - { - long initialPosition = data.Position; - string str = data.ReadString(); - strings.Add(str); - totalSize -= (uint)(data.Position - initialPosition); - } - - coffStringTable.Strings = strings.ToArray(); - - return coffStringTable; - } - - /// - /// Parse a Stream into an attribute certificate table - /// - /// Stream to parse - /// First address not part of the attribute certificate table - /// Filled attribute certificate on success, null on error - private static AttributeCertificateTableEntry[] ParseAttributeCertificateTable(Stream data, int endOffset) - { - var attributeCertificateTable = new List(); - - while (data.Position < endOffset && data.Position != data.Length) - { - var entry = new AttributeCertificateTableEntry(); - - entry.Length = data.ReadUInt32(); - entry.Revision = (WindowsCertificateRevision)data.ReadUInt16(); - entry.CertificateType = (WindowsCertificateType)data.ReadUInt16(); - - int certificateDataLength = (int)(entry.Length - 8); - if (certificateDataLength > 0) - entry.Certificate = data.ReadBytes(certificateDataLength); - - attributeCertificateTable.Add(entry); - - // Align to the 8-byte boundary - while ((data.Position % 8) != 0 && data.Position < endOffset && data.Position != data.Length) - _ = data.ReadByteValue(); - } - - return attributeCertificateTable.ToArray(); - } - - /// - /// Parse a byte array into a delay-load directory table - /// - /// Stream to parse - /// Filled delay-load directory table on success, null on error - private static DelayLoadDirectoryTable ParseDelayLoadDirectoryTable(Stream data) - { - // TODO: Use marshalling here instead of building - var delayLoadDirectoryTable = new DelayLoadDirectoryTable(); - - delayLoadDirectoryTable.Attributes = data.ReadUInt32(); - delayLoadDirectoryTable.Name = data.ReadUInt32(); - delayLoadDirectoryTable.ModuleHandle = data.ReadUInt32(); - delayLoadDirectoryTable.DelayImportAddressTable = data.ReadUInt32(); - delayLoadDirectoryTable.DelayImportNameTable = data.ReadUInt32(); - delayLoadDirectoryTable.BoundDelayImportTable = data.ReadUInt32(); - delayLoadDirectoryTable.UnloadDelayImportTable = data.ReadUInt32(); - delayLoadDirectoryTable.TimeStamp = data.ReadUInt32(); - - return delayLoadDirectoryTable; - } - - /// - /// Parse a Stream into a base relocation table - /// - /// Stream to parse - /// First address not part of the base relocation table - /// Section table to use for virtual address translation - /// Filled base relocation table on success, null on error - private static BaseRelocationBlock[] ParseBaseRelocationTable(Stream data, int endOffset, SectionHeader[] sections) - { - // TODO: Use marshalling here instead of building - var baseRelocationTable = new List(); - - while (data.Position < endOffset) - { - var baseRelocationBlock = new BaseRelocationBlock(); - - baseRelocationBlock.PageRVA = data.ReadUInt32(); - baseRelocationBlock.BlockSize = data.ReadUInt32(); - - var typeOffsetFieldEntries = new List(); - int totalSize = 8; - while (totalSize < baseRelocationBlock.BlockSize && data.Position < data.Length) - { - var baseRelocationTypeOffsetFieldEntry = new BaseRelocationTypeOffsetFieldEntry(); - - ushort typeAndOffsetField = data.ReadUInt16(); - baseRelocationTypeOffsetFieldEntry.BaseRelocationType = (BaseRelocationTypes)(typeAndOffsetField >> 12); - baseRelocationTypeOffsetFieldEntry.Offset = (ushort)(typeAndOffsetField & 0x0FFF); - - typeOffsetFieldEntries.Add(baseRelocationTypeOffsetFieldEntry); - totalSize += 2; - } - - baseRelocationBlock.TypeOffsetFieldEntries = typeOffsetFieldEntries.ToArray(); - - baseRelocationTable.Add(baseRelocationBlock); - } - - return baseRelocationTable.ToArray(); - } - - /// - /// Parse a Stream into a debug table - /// - /// Stream to parse - /// First address not part of the debug table - /// Section table to use for virtual address translation - /// Filled debug table on success, null on error - private static DebugTable ParseDebugTable(Stream data, int endOffset, SectionHeader[] sections) - { - // TODO: Use marshalling here instead of building - var debugTable = new DebugTable(); - - var debugDirectoryTable = new List(); - - while (data.Position < endOffset) - { - var debugDirectoryEntry = new DebugDirectoryEntry(); - - debugDirectoryEntry.Characteristics = data.ReadUInt32(); - debugDirectoryEntry.TimeDateStamp = data.ReadUInt32(); - debugDirectoryEntry.MajorVersion = data.ReadUInt16(); - debugDirectoryEntry.MinorVersion = data.ReadUInt16(); - debugDirectoryEntry.DebugType = (DebugType)data.ReadUInt32(); - debugDirectoryEntry.SizeOfData = data.ReadUInt32(); - debugDirectoryEntry.AddressOfRawData = data.ReadUInt32(); - debugDirectoryEntry.PointerToRawData = data.ReadUInt32(); - - debugDirectoryTable.Add(debugDirectoryEntry); - } - - debugTable.DebugDirectoryTable = debugDirectoryTable.ToArray(); - - // TODO: Should we read the debug data in? Most of it is unformatted or undocumented - // TODO: Implement .debug$F (Object Only) / IMAGE_DEBUG_TYPE_FPO - - return debugTable; - } - - /// - /// Parse a Stream into a export table - /// - /// Stream to parse - /// Section table to use for virtual address translation - /// Filled export table on success, null on error - private static ExportTable ParseExportTable(Stream data, SectionHeader[] sections) - { - // TODO: Use marshalling here instead of building - var exportTable = new ExportTable(); - - var exportDirectoryTable = new ExportDirectoryTable(); - - exportDirectoryTable.ExportFlags = data.ReadUInt32(); - exportDirectoryTable.TimeDateStamp = data.ReadUInt32(); - exportDirectoryTable.MajorVersion = data.ReadUInt16(); - exportDirectoryTable.MinorVersion = data.ReadUInt16(); - exportDirectoryTable.NameRVA = data.ReadUInt32(); - exportDirectoryTable.OrdinalBase = data.ReadUInt32(); - exportDirectoryTable.AddressTableEntries = data.ReadUInt32(); - exportDirectoryTable.NumberOfNamePointers = data.ReadUInt32(); - exportDirectoryTable.ExportAddressTableRVA = data.ReadUInt32(); - exportDirectoryTable.NamePointerRVA = data.ReadUInt32(); - exportDirectoryTable.OrdinalTableRVA = data.ReadUInt32(); - - exportTable.ExportDirectoryTable = exportDirectoryTable; - - // Name - if (exportDirectoryTable.NameRVA.ConvertVirtualAddress(sections) != 0) - { - uint nameAddress = exportDirectoryTable.NameRVA.ConvertVirtualAddress(sections); - data.Seek(nameAddress, SeekOrigin.Begin); - - string name = data.ReadString(Encoding.ASCII); - exportDirectoryTable.Name = name; - } - - // Address table - if (exportDirectoryTable.AddressTableEntries != 0 && exportDirectoryTable.ExportAddressTableRVA.ConvertVirtualAddress(sections) != 0) - { - uint exportAddressTableAddress = exportDirectoryTable.ExportAddressTableRVA.ConvertVirtualAddress(sections); - data.Seek(exportAddressTableAddress, SeekOrigin.Begin); - - var exportAddressTable = new ExportAddressTableEntry[exportDirectoryTable.AddressTableEntries]; - - for (int i = 0; i < exportDirectoryTable.AddressTableEntries; i++) - { - var addressTableEntry = new ExportAddressTableEntry(); - - // TODO: Use the optional header address and length to determine if export or forwarder - addressTableEntry.ExportRVA = data.ReadUInt32(); - addressTableEntry.ForwarderRVA = addressTableEntry.ExportRVA; - - exportAddressTable[i] = addressTableEntry; - } - - exportTable.ExportAddressTable = exportAddressTable; - } - - // Name pointer table - if (exportDirectoryTable.NumberOfNamePointers != 0 && exportDirectoryTable.NamePointerRVA.ConvertVirtualAddress(sections) != 0) - { - uint namePointerTableAddress = exportDirectoryTable.NamePointerRVA.ConvertVirtualAddress(sections); - data.Seek(namePointerTableAddress, SeekOrigin.Begin); - - var namePointerTable = new ExportNamePointerTable(); - - namePointerTable.Pointers = new uint[exportDirectoryTable.NumberOfNamePointers]; - for (int i = 0; i < exportDirectoryTable.NumberOfNamePointers; i++) - { - uint pointer = data.ReadUInt32(); - namePointerTable.Pointers[i] = pointer; - } - - exportTable.NamePointerTable = namePointerTable; - } - - // Ordinal table - if (exportDirectoryTable.NumberOfNamePointers != 0 && exportDirectoryTable.OrdinalTableRVA.ConvertVirtualAddress(sections) != 0) - { - uint ordinalTableAddress = exportDirectoryTable.OrdinalTableRVA.ConvertVirtualAddress(sections); - data.Seek(ordinalTableAddress, SeekOrigin.Begin); - - var exportOrdinalTable = new ExportOrdinalTable(); - - exportOrdinalTable.Indexes = new ushort[exportDirectoryTable.NumberOfNamePointers]; - for (int i = 0; i < exportDirectoryTable.NumberOfNamePointers; i++) - { - ushort pointer = data.ReadUInt16(); - exportOrdinalTable.Indexes[i] = pointer; - } - - exportTable.OrdinalTable = exportOrdinalTable; - } - - // Name table - if (exportDirectoryTable.NumberOfNamePointers != 0 && exportDirectoryTable.NameRVA.ConvertVirtualAddress(sections) != 0) - { - uint nameTableAddress = exportDirectoryTable.NameRVA.ConvertVirtualAddress(sections); - data.Seek(nameTableAddress, SeekOrigin.Begin); - - var exportNameTable = new ExportNameTable(); - - exportNameTable.Strings = new string[exportDirectoryTable.NumberOfNamePointers]; - for (int i = 0; i < exportDirectoryTable.NumberOfNamePointers; i++) - { - string str = data.ReadString(Encoding.ASCII); - exportNameTable.Strings[i] = str; - } - - exportTable.ExportNameTable = exportNameTable; - } - - return exportTable; - } - - /// - /// Parse a Stream into a import table - /// - /// Stream to parse - /// Optional header magic number indicating PE32 or PE32+ - /// Section table to use for virtual address translation - /// Filled import table on success, null on error - private static ImportTable ParseImportTable(Stream data, OptionalHeaderMagicNumber magic, SectionHeader[] sections) - { - // TODO: Use marshalling here instead of building - var importTable = new ImportTable(); - - // Import directory table - var importDirectoryTable = new List(); - - // Loop until the last item (all nulls) are found - while (true) - { - var importDirectoryTableEntry = new ImportDirectoryTableEntry(); - - importDirectoryTableEntry.ImportLookupTableRVA = data.ReadUInt32(); - importDirectoryTableEntry.TimeDateStamp = data.ReadUInt32(); - importDirectoryTableEntry.ForwarderChain = data.ReadUInt32(); - importDirectoryTableEntry.NameRVA = data.ReadUInt32(); - importDirectoryTableEntry.ImportAddressTableRVA = data.ReadUInt32(); - - importDirectoryTable.Add(importDirectoryTableEntry); - - // All zero values means the last entry - if (importDirectoryTableEntry.ImportLookupTableRVA == 0 - && importDirectoryTableEntry.TimeDateStamp == 0 - && importDirectoryTableEntry.ForwarderChain == 0 - && importDirectoryTableEntry.NameRVA == 0 - && importDirectoryTableEntry.ImportAddressTableRVA == 0) - break; - } - - importTable.ImportDirectoryTable = importDirectoryTable.ToArray(); - - // Names - for (int i = 0; i < importTable.ImportDirectoryTable.Length; i++) - { - var importDirectoryTableEntry = importTable.ImportDirectoryTable[i]; - if (importDirectoryTableEntry.NameRVA.ConvertVirtualAddress(sections) == 0) - continue; - - uint nameAddress = importDirectoryTableEntry.NameRVA.ConvertVirtualAddress(sections); - data.Seek(nameAddress, SeekOrigin.Begin); - - string name = data.ReadString(Encoding.ASCII); - importDirectoryTableEntry.Name = name; - } - - // Lookup tables - var importLookupTables = new Dictionary(); - - for (int i = 0; i < importTable.ImportDirectoryTable.Length; i++) - { - var importDirectoryTableEntry = importTable.ImportDirectoryTable[i]; - if (importDirectoryTableEntry.ImportLookupTableRVA.ConvertVirtualAddress(sections) == 0) - continue; - - uint tableAddress = importDirectoryTableEntry.ImportLookupTableRVA.ConvertVirtualAddress(sections); - data.Seek(tableAddress, SeekOrigin.Begin); - - var entryLookupTable = new List(); - - while (true) - { - var entryLookupTableEntry = new ImportLookupTableEntry(); - - if (magic == OptionalHeaderMagicNumber.PE32) - { - uint entryValue = data.ReadUInt32(); - entryLookupTableEntry.OrdinalNameFlag = (entryValue & 0x80000000) != 0; - if (entryLookupTableEntry.OrdinalNameFlag) - entryLookupTableEntry.OrdinalNumber = (ushort)(entryValue & ~0x80000000); - else - entryLookupTableEntry.HintNameTableRVA = (uint)(entryValue & ~0x80000000); - } - else if (magic == OptionalHeaderMagicNumber.PE32Plus) - { - ulong entryValue = data.ReadUInt64(); - entryLookupTableEntry.OrdinalNameFlag = (entryValue & 0x8000000000000000) != 0; - if (entryLookupTableEntry.OrdinalNameFlag) - entryLookupTableEntry.OrdinalNumber = (ushort)(entryValue & ~0x8000000000000000); - else - entryLookupTableEntry.HintNameTableRVA = (uint)(entryValue & ~0x8000000000000000); - } - - entryLookupTable.Add(entryLookupTableEntry); - - // All zero values means the last entry - if (entryLookupTableEntry.OrdinalNameFlag == false - && entryLookupTableEntry.OrdinalNumber == 0 - && entryLookupTableEntry.HintNameTableRVA == 0) - break; - } - - importLookupTables[i] = entryLookupTable.ToArray(); - } - - importTable.ImportLookupTables = importLookupTables; - - // Address tables - var importAddressTables = new Dictionary(); - - for (int i = 0; i < importTable.ImportDirectoryTable.Length; i++) - { - var importDirectoryTableEntry = importTable.ImportDirectoryTable[i]; - if (importDirectoryTableEntry.ImportAddressTableRVA.ConvertVirtualAddress(sections) == 0) - continue; - - uint tableAddress = importDirectoryTableEntry.ImportAddressTableRVA.ConvertVirtualAddress(sections); - data.Seek(tableAddress, SeekOrigin.Begin); - - var addressLookupTable = new List(); - - while (true) - { - var addressLookupTableEntry = new ImportAddressTableEntry(); - - if (magic == OptionalHeaderMagicNumber.PE32) - { - uint entryValue = data.ReadUInt32(); - addressLookupTableEntry.OrdinalNameFlag = (entryValue & 0x80000000) != 0; - if (addressLookupTableEntry.OrdinalNameFlag) - addressLookupTableEntry.OrdinalNumber = (ushort)(entryValue & ~0x80000000); - else - addressLookupTableEntry.HintNameTableRVA = (uint)(entryValue & ~0x80000000); - } - else if (magic == OptionalHeaderMagicNumber.PE32Plus) - { - ulong entryValue = data.ReadUInt64(); - addressLookupTableEntry.OrdinalNameFlag = (entryValue & 0x8000000000000000) != 0; - if (addressLookupTableEntry.OrdinalNameFlag) - addressLookupTableEntry.OrdinalNumber = (ushort)(entryValue & ~0x8000000000000000); - else - addressLookupTableEntry.HintNameTableRVA = (uint)(entryValue & ~0x8000000000000000); - } - - addressLookupTable.Add(addressLookupTableEntry); - - // All zero values means the last entry - if (addressLookupTableEntry.OrdinalNameFlag == false - && addressLookupTableEntry.OrdinalNumber == 0 - && addressLookupTableEntry.HintNameTableRVA == 0) - break; - } - - importAddressTables[i] = addressLookupTable.ToArray(); - } - - importTable.ImportAddressTables = importAddressTables; - - // Hint/Name table - var importHintNameTable = new List(); - - if ((importTable.ImportLookupTables != null && importTable.ImportLookupTables.Count > 0) - || importTable.ImportAddressTables != null && importTable.ImportAddressTables.Count > 0) - { - // Get the addresses of the hint/name table entries - List hintNameTableEntryAddresses = new List(); - - // If we have import lookup tables - if (importTable.ImportLookupTables != null && importLookupTables.Count > 0) - { - var addresses = importTable.ImportLookupTables - .SelectMany(kvp => kvp.Value) - .Select(ilte => (int)ilte.HintNameTableRVA.ConvertVirtualAddress(sections)); - hintNameTableEntryAddresses.AddRange(addresses); - } - - // If we have import address tables - if (importTable.ImportAddressTables != null && importTable.ImportAddressTables.Count > 0) - { - var addresses = importTable.ImportAddressTables - .SelectMany(kvp => kvp.Value) - .Select(iate => (int)iate.HintNameTableRVA.ConvertVirtualAddress(sections)); - hintNameTableEntryAddresses.AddRange(addresses); - } - - // Sanitize the addresses - hintNameTableEntryAddresses = hintNameTableEntryAddresses.Where(addr => addr != 0) - .Distinct() - .OrderBy(a => a) - .ToList(); - - // If we have any addresses, add them to the table - if (hintNameTableEntryAddresses.Any()) - { - for (int i = 0; i < hintNameTableEntryAddresses.Count; i++) - { - int hintNameTableEntryAddress = hintNameTableEntryAddresses[i]; - data.Seek(hintNameTableEntryAddress, SeekOrigin.Begin); - - var hintNameTableEntry = new HintNameTableEntry(); - - hintNameTableEntry.Hint = data.ReadUInt16(); - hintNameTableEntry.Name = data.ReadString(Encoding.ASCII); - - importHintNameTable.Add(hintNameTableEntry); - } - } - } - - importTable.HintNameTable = importHintNameTable.ToArray(); - - return importTable; - } - - /// - /// Parse a Stream into a resource directory table - /// - /// Stream to parse - /// Initial offset to use in address comparisons - /// Section table to use for virtual address translation - /// Indicates if this is the top level or not - /// Filled resource directory table on success, null on error - private static ResourceDirectoryTable ParseResourceDirectoryTable(Stream data, long initialOffset, SectionHeader[] sections, bool topLevel = false) - { - // TODO: Use marshalling here instead of building - var resourceDirectoryTable = new ResourceDirectoryTable(); - - resourceDirectoryTable.Characteristics = data.ReadUInt32(); - if (resourceDirectoryTable.Characteristics != 0) - return null; - - resourceDirectoryTable.TimeDateStamp = data.ReadUInt32(); - resourceDirectoryTable.MajorVersion = data.ReadUInt16(); - resourceDirectoryTable.MinorVersion = data.ReadUInt16(); - resourceDirectoryTable.NumberOfNameEntries = data.ReadUInt16(); - resourceDirectoryTable.NumberOfIDEntries = data.ReadUInt16(); - - // If we have no entries - int totalEntryCount = resourceDirectoryTable.NumberOfNameEntries + resourceDirectoryTable.NumberOfIDEntries; - if (totalEntryCount == 0) - return resourceDirectoryTable; - - // Perform top-level pass of data - resourceDirectoryTable.Entries = new ResourceDirectoryEntry[totalEntryCount]; - for (int i = 0; i < totalEntryCount; i++) - { - var entry = new ResourceDirectoryEntry(); - uint offset = data.ReadUInt32(); - if ((offset & 0x80000000) != 0) - entry.NameOffset = offset & ~0x80000000; - else - entry.IntegerID = offset; - - offset = data.ReadUInt32(); - if ((offset & 0x80000000) != 0) - entry.SubdirectoryOffset = offset & ~0x80000000; - else - entry.DataEntryOffset = offset; - - // Read the name from the offset, if needed - if (entry.NameOffset > 0) - { - long currentOffset = data.Position; - offset = entry.NameOffset + (uint)initialOffset; - data.Seek(offset, SeekOrigin.Begin); - - var resourceDirectoryString = new ResourceDirectoryString(); - - resourceDirectoryString.Length = data.ReadUInt16(); - if (resourceDirectoryString.Length > 0) - resourceDirectoryString.UnicodeString = data.ReadBytes(resourceDirectoryString.Length * 2); - - entry.Name = resourceDirectoryString; - - data.Seek(currentOffset, SeekOrigin.Begin); - } - - resourceDirectoryTable.Entries[i] = entry; - } - - // Loop through and process the entries - foreach (var entry in resourceDirectoryTable.Entries) - { - if (entry.DataEntryOffset > 0) - { - uint offset = entry.DataEntryOffset + (uint)initialOffset; - data.Seek(offset, SeekOrigin.Begin); - - var resourceDataEntry = new ResourceDataEntry(); - resourceDataEntry.DataRVA = data.ReadUInt32(); - resourceDataEntry.Size = data.ReadUInt32(); - resourceDataEntry.Codepage = data.ReadUInt32(); - resourceDataEntry.Reserved = data.ReadUInt32(); - - // Read the data from the offset - offset = resourceDataEntry.DataRVA.ConvertVirtualAddress(sections); - if (offset > 0 && resourceDataEntry.Size > 0) - { - data.Seek(offset, SeekOrigin.Begin); - resourceDataEntry.Data = data.ReadBytes((int)resourceDataEntry.Size); - } - - entry.DataEntry = resourceDataEntry; - } - else if (entry.SubdirectoryOffset > 0) - { - uint offset = entry.SubdirectoryOffset + (uint)initialOffset; - data.Seek(offset, SeekOrigin.Begin); - - entry.Subdirectory = ParseResourceDirectoryTable(data, initialOffset, sections); - } - } - - // If we are not at the top level - if (!topLevel) - return resourceDirectoryTable; - - // If we're not aligned to a section - if (!sections.Any(s => s.PointerToRawData == initialOffset)) - return resourceDirectoryTable; - - // Get the section size - int size = (int)sections.First(s => s.PointerToRawData == initialOffset).SizeOfRawData; - - // Align to the 512-byte boundary, we find the start of an MS-DOS header, or the end of the file - while (data.Position - initialOffset < size && data.Position % 0x200 != 0 && data.Position < data.Length - 1) - { - // If we find the start of an MS-DOS header - if (data.ReadUInt16() == SabreTools.Models.MSDOS.Constants.SignatureUInt16) - { - data.Seek(-2, origin: SeekOrigin.Current); - break; - } - - // Otherwise - data.Seek(-1, origin: SeekOrigin.Current); - } - - // If we have not used up the full size, parse the remaining chunk as a single resource - if (data.Position - initialOffset < size) - { - var localEntries = resourceDirectoryTable.Entries; - Array.Resize(ref localEntries, totalEntryCount + 1); - resourceDirectoryTable.Entries = localEntries; - int length = (int)(size - (data.Position - initialOffset)); - - resourceDirectoryTable.Entries[totalEntryCount] = new ResourceDirectoryEntry - { - Name = new ResourceDirectoryString { UnicodeString = Encoding.ASCII.GetBytes("HIDDEN RESOURCE") }, - IntegerID = uint.MaxValue, - DataEntryOffset = (uint)data.Position, - DataEntry = new ResourceDataEntry - { - Size = (uint)length, - Data = data.ReadBytes(length), - Codepage = (uint)Encoding.Unicode.CodePage, - }, - }; - } - - return resourceDirectoryTable; - } - - #endregion - } -} \ No newline at end of file diff --git a/BinaryObjectScanner.Builders/Quantum.cs b/BinaryObjectScanner.Builders/Quantum.cs deleted file mode 100644 index 0526c7d8..00000000 --- a/BinaryObjectScanner.Builders/Quantum.cs +++ /dev/null @@ -1,184 +0,0 @@ -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.Quantum; -using static SabreTools.Models.Quantum.Constants; - -namespace BinaryObjectScanner.Builders -{ - public class Quantum - { - #region Byte Data - - /// - /// Parse a byte array into a Quantum archive - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled archive on success, null on error - public static Archive ParseArchive(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseArchive(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Quantum archive - /// - /// Stream to parse - /// Filled archive on success, null on error - public static Archive ParseArchive(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - int initialOffset = (int)data.Position; - - // Create a new archive to fill - var archive = new Archive(); - - #region Header - - // Try to parse the header - var header = ParseHeader(data); - if (header == null) - return null; - - // Set the archive header - archive.Header = header; - - #endregion - - #region File List - - // If we have any files - if (header.FileCount > 0) - { - var fileDescriptors = new FileDescriptor[header.FileCount]; - - // Read all entries in turn - for (int i = 0; i < header.FileCount; i++) - { - var file = ParseFileDescriptor(data, header.MinorVersion); - if (file == null) - return null; - - fileDescriptors[i] = file; - } - - // Set the file list - archive.FileList = fileDescriptors; - } - - #endregion - - // Cache the compressed data offset - archive.CompressedDataOffset = data.Position; - - return archive; - } - - /// - /// Parse a Stream into a header - /// - /// Stream to parse - /// Filled header on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - Header header = new Header(); - - byte[] signature = data.ReadBytes(2); - header.Signature = Encoding.ASCII.GetString(signature); - if (header.Signature != SignatureString) - return null; - - header.MajorVersion = data.ReadByteValue(); - header.MinorVersion = data.ReadByteValue(); - header.FileCount = data.ReadUInt16(); - header.TableSize = data.ReadByteValue(); - header.CompressionFlags = data.ReadByteValue(); - - return header; - } - - /// - /// Parse a Stream into a file descriptor - /// - /// Stream to parse - /// Minor version of the archive - /// Filled file descriptor on success, null on error - private static FileDescriptor ParseFileDescriptor(Stream data, byte minorVersion) - { - // TODO: Use marshalling here instead of building - FileDescriptor fileDescriptor = new FileDescriptor(); - - fileDescriptor.FileNameSize = ReadVariableLength(data); - if (fileDescriptor.FileNameSize > 0) - { - byte[] fileName = data.ReadBytes(fileDescriptor.FileNameSize); - fileDescriptor.FileName = Encoding.ASCII.GetString(fileName); - } - - fileDescriptor.CommentFieldSize = ReadVariableLength(data); - if (fileDescriptor.CommentFieldSize > 0) - { - byte[] commentField = data.ReadBytes(fileDescriptor.CommentFieldSize); - fileDescriptor.CommentField = Encoding.ASCII.GetString(commentField); - } - - fileDescriptor.ExpandedFileSize = data.ReadUInt32(); - fileDescriptor.FileTime = data.ReadUInt16(); - fileDescriptor.FileDate = data.ReadUInt16(); - - // Hack for unknown format data - if (minorVersion == 22) - fileDescriptor.Unknown = data.ReadUInt16(); - - return fileDescriptor; - } - - /// - /// Parse a Stream into a variable-length size prefix - /// - /// Stream to parse - /// Variable-length size prefix - /// - /// Strings are prefixed with their length. If the length is less than 128 - /// then it is stored directly in one byte. If it is greater than 127 then - /// the high bit of the first byte is set to 1 and the remaining fifteen bits - /// contain the actual length in big-endian format. - /// - private static int ReadVariableLength(Stream data) - { - byte b0 = data.ReadByteValue(); - if (b0 < 0x7F) - return b0; - - b0 &= 0x7F; - byte b1 = data.ReadByteValue(); - return (b0 << 8) | b1; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/SGA.cs b/BinaryObjectScanner.Builders/SGA.cs deleted file mode 100644 index 26d713cc..00000000 --- a/BinaryObjectScanner.Builders/SGA.cs +++ /dev/null @@ -1,732 +0,0 @@ -using System.Collections.Generic; -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.SGA; -using static SabreTools.Models.SGA.Constants; - -namespace BinaryObjectScanner.Builders -{ - public static class SGA - { - #region Byte Data - - /// - /// Parse a byte array into an SGA - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled SGA on success, null on error - public static SabreTools.Models.SGA.File ParseFile(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseFile(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into an SGA - /// - /// Stream to parse - /// Filled SGA on success, null on error - public static SabreTools.Models.SGA.File ParseFile(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - long initialOffset = data.Position; - - // Create a new SGA to fill - var file = new SabreTools.Models.SGA.File(); - - #region Header - - // Try to parse the header - var header = ParseHeader(data); - if (header == null) - return null; - - // Set the SGA header - file.Header = header; - - #endregion - - #region Directory - - // Try to parse the directory - var directory = ParseDirectory(data, header.MajorVersion); - if (directory == null) - return null; - - // Set the SGA directory - file.Directory = directory; - #endregion - - return file; - } - - /// - /// Parse a Stream into an SGA header - /// - /// Stream to parse - /// Filled SGA header on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - byte[] signatureBytes = data.ReadBytes(8); - string signature = Encoding.ASCII.GetString(signatureBytes); - if (signature != SignatureString) - return null; - - ushort majorVersion = data.ReadUInt16(); - ushort minorVersion = data.ReadUInt16(); - if (minorVersion != 0) - return null; - - switch (majorVersion) - { - // Versions 4 and 5 share the same header - case 4: - case 5: - Header4 header4 = new Header4(); - - header4.Signature = signature; - header4.MajorVersion = majorVersion; - header4.MinorVersion = minorVersion; - header4.FileMD5 = data.ReadBytes(0x10); - byte[] header4Name = data.ReadBytes(count: 128); - header4.Name = Encoding.Unicode.GetString(header4Name).TrimEnd('\0'); - header4.HeaderMD5 = data.ReadBytes(0x10); - header4.HeaderLength = data.ReadUInt32(); - header4.FileDataOffset = data.ReadUInt32(); - header4.Dummy0 = data.ReadUInt32(); - - return header4; - - // Versions 6 and 7 share the same header - case 6: - case 7: - Header6 header6 = new Header6(); - - header6.Signature = signature; - header6.MajorVersion = majorVersion; - header6.MinorVersion = minorVersion; - byte[] header6Name = data.ReadBytes(count: 128); - header6.Name = Encoding.Unicode.GetString(header6Name).TrimEnd('\0'); - header6.HeaderLength = data.ReadUInt32(); - header6.FileDataOffset = data.ReadUInt32(); - header6.Dummy0 = data.ReadUInt32(); - - return header6; - - // No other major versions are recognized - default: - return null; - } - } - - /// - /// Parse a Stream into an SGA directory - /// - /// Stream to parse - /// SGA major version - /// Filled SGA directory on success, null on error - private static SabreTools.Models.SGA.Directory ParseDirectory(Stream data, ushort majorVersion) - { - #region Directory - - // Create the appropriate type of directory - SabreTools.Models.SGA.Directory directory; - switch (majorVersion) - { - case 4: directory = new Directory4(); break; - case 5: directory = new Directory5(); break; - case 6: directory = new Directory6(); break; - case 7: directory = new Directory7(); break; - default: return null; - } - - #endregion - - // Cache the current offset - long currentOffset = data.Position; - - #region Directory Header - - // Try to parse the directory header - var directoryHeader = ParseDirectoryHeader(data, majorVersion); - if (directoryHeader == null) - return null; - - // Set the directory header - switch (majorVersion) - { - case 4: (directory as Directory4).DirectoryHeader = directoryHeader as DirectoryHeader4; break; - case 5: (directory as Directory5).DirectoryHeader = directoryHeader as DirectoryHeader5; break; - case 6: (directory as Directory6).DirectoryHeader = directoryHeader as DirectoryHeader5; break; - case 7: (directory as Directory7).DirectoryHeader = directoryHeader as DirectoryHeader7; break; - default: return null; - } - - #endregion - - #region Sections - - // Get the sections offset - long sectionOffset; - switch (majorVersion) - { - case 4: sectionOffset = (directoryHeader as DirectoryHeader4).SectionOffset; break; - case 5: - case 6: sectionOffset = (directoryHeader as DirectoryHeader5).SectionOffset; break; - case 7: sectionOffset = (directoryHeader as DirectoryHeader7).SectionOffset; break; - default: return null; - } - - // Adjust the sections offset based on the directory - sectionOffset += currentOffset; - - // Validate the offset - if (sectionOffset < 0 || sectionOffset >= data.Length) - return null; - - // Seek to the sections - data.Seek(sectionOffset, SeekOrigin.Begin); - - // Get the section count - uint sectionCount; - switch (majorVersion) - { - case 4: sectionCount = (directoryHeader as DirectoryHeader4).SectionCount; break; - case 5: - case 6: sectionCount = (directoryHeader as DirectoryHeader5).SectionCount; break; - case 7: sectionCount = (directoryHeader as DirectoryHeader7).SectionCount; break; - default: return null; - } - - // Create the sections array - object[] sections; - switch (majorVersion) - { - case 4: sections = new Section4[sectionCount]; break; - case 5: - case 6: - case 7: sections = new Section5[sectionCount]; break; - default: return null; - } - - // Try to parse the sections - for (int i = 0; i < sections.Length; i++) - { - switch (majorVersion) - { - case 4: sections[i] = ParseSection4(data); break; - case 5: - case 6: - case 7: sections[i] = ParseSection5(data); break; - default: return null; - } - } - - // Assign the sections - switch (majorVersion) - { - case 4: (directory as Directory4).Sections = sections as Section4[]; break; - case 5: (directory as Directory5).Sections = sections as Section5[]; break; - case 6: (directory as Directory6).Sections = sections as Section5[]; break; - case 7: (directory as Directory7).Sections = sections as Section5[]; break; - default: return null; - } - - #endregion - - #region Folders - - // Get the folders offset - long folderOffset; - switch (majorVersion) - { - case 4: folderOffset = (directoryHeader as DirectoryHeader4).FolderOffset; break; - case 5: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break; - case 6: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break; - case 7: folderOffset = (directoryHeader as DirectoryHeader7).FolderOffset; break; - default: return null; - } - - // Adjust the folders offset based on the directory - folderOffset += currentOffset; - - // Validate the offset - if (folderOffset < 0 || folderOffset >= data.Length) - return null; - - // Seek to the folders - data.Seek(folderOffset, SeekOrigin.Begin); - - // Get the folder count - uint folderCount; - switch (majorVersion) - { - case 4: folderCount = (directoryHeader as DirectoryHeader4).FolderCount; break; - case 5: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break; - case 6: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break; - case 7: folderCount = (directoryHeader as DirectoryHeader7).FolderCount; break; - default: return null; - } - - // Create the folders array - object[] folders; - switch (majorVersion) - { - case 4: folders = new Folder4[folderCount]; break; - case 5: folders = new Folder5[folderCount]; break; - case 6: folders = new Folder5[folderCount]; break; - case 7: folders = new Folder5[folderCount]; break; - default: return null; - } - - // Try to parse the folders - for (int i = 0; i < folders.Length; i++) - { - switch (majorVersion) - { - case 4: folders[i] = ParseFolder4(data); break; - case 5: folders[i] = ParseFolder5(data); break; - case 6: folders[i] = ParseFolder5(data); break; - case 7: folders[i] = ParseFolder5(data); break; - default: return null; - } - } - - // Assign the folders - switch (majorVersion) - { - case 4: (directory as Directory4).Folders = folders as Folder4[]; break; - case 5: (directory as Directory5).Folders = folders as Folder5[]; break; - case 6: (directory as Directory6).Folders = folders as Folder5[]; break; - case 7: (directory as Directory7).Folders = folders as Folder5[]; break; - default: return null; - } - - #endregion - - #region Files - - // Get the files offset - long fileOffset; - switch (majorVersion) - { - case 4: fileOffset = (directoryHeader as DirectoryHeader4).FileOffset; break; - case 5: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break; - case 6: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break; - case 7: fileOffset = (directoryHeader as DirectoryHeader7).FileOffset; break; - default: return null; - } - - // Adjust the files offset based on the directory - fileOffset += currentOffset; - - // Validate the offset - if (fileOffset < 0 || fileOffset >= data.Length) - return null; - - // Seek to the files - data.Seek(fileOffset, SeekOrigin.Begin); - - // Get the file count - uint fileCount; - switch (majorVersion) - { - case 4: fileCount = (directoryHeader as DirectoryHeader4).FileCount; break; - case 5: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break; - case 6: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break; - case 7: fileCount = (directoryHeader as DirectoryHeader7).FileCount; break; - default: return null; - } - - // Create the files array - object[] files; - switch (majorVersion) - { - case 4: files = new File4[fileCount]; break; - case 5: files = new File4[fileCount]; break; - case 6: files = new File6[fileCount]; break; - case 7: files = new File7[fileCount]; break; - default: return null; - } - - // Try to parse the files - for (int i = 0; i < files.Length; i++) - { - switch (majorVersion) - { - case 4: files[i] = ParseFile4(data); break; - case 5: files[i] = ParseFile4(data); break; - case 6: files[i] = ParseFile6(data); break; - case 7: files[i] = ParseFile7(data); break; - default: return null; - } - } - - // Assign the files - switch (majorVersion) - { - case 4: (directory as Directory4).Files = files as File4[]; break; - case 5: (directory as Directory5).Files = files as File4[]; break; - case 6: (directory as Directory6).Files = files as File6[]; break; - case 7: (directory as Directory7).Files = files as File7[]; break; - default: return null; - } - - #endregion - - #region String Table - - // Get the string table offset - long stringTableOffset; - switch (majorVersion) - { - case 4: stringTableOffset = (directoryHeader as DirectoryHeader4).StringTableOffset; break; - case 5: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break; - case 6: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break; - case 7: stringTableOffset = (directoryHeader as DirectoryHeader7).StringTableOffset; break; - default: return null; - } - - // Adjust the string table offset based on the directory - stringTableOffset += currentOffset; - - // Validate the offset - if (stringTableOffset < 0 || stringTableOffset >= data.Length) - return null; - - // Seek to the string table - data.Seek(stringTableOffset, SeekOrigin.Begin); - - // Get the string table count - uint stringCount; - switch (majorVersion) - { - case 4: stringCount = (directoryHeader as DirectoryHeader4).StringTableCount; break; - case 5: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break; - case 6: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break; - case 7: stringCount = (directoryHeader as DirectoryHeader7).StringTableCount; break; - default: return null; - } - - // TODO: Are these strings actually indexed by number and not position? - // TODO: If indexed by position, I think it needs to be adjusted by start of table - - // Create the strings dictionary - Dictionary strings = new Dictionary((int)stringCount); - - // Get the current position to adjust the offsets - long stringTableStart = data.Position; - - // Try to parse the strings - for (int i = 0; i < stringCount; i++) - { - long currentPosition = data.Position - stringTableStart; - strings[currentPosition] = data.ReadString(Encoding.ASCII); - } - - // Assign the files - switch (majorVersion) - { - case 4: (directory as Directory4).StringTable = strings; break; - case 5: (directory as Directory5).StringTable = strings; break; - case 6: (directory as Directory6).StringTable = strings; break; - case 7: (directory as Directory7).StringTable = strings; break; - default: return null; - } - - // Loop through all folders to assign names - for (int i = 0; i < folderCount; i++) - { - switch (majorVersion) - { - case 4: (directory as Directory4).Folders[i].Name = strings[(directory as Directory4).Folders[i].NameOffset]; break; - case 5: (directory as Directory5).Folders[i].Name = strings[(directory as Directory5).Folders[i].NameOffset]; break; - case 6: (directory as Directory6).Folders[i].Name = strings[(directory as Directory6).Folders[i].NameOffset]; break; - case 7: (directory as Directory7).Folders[i].Name = strings[(directory as Directory7).Folders[i].NameOffset]; break; - default: return null; - } - } - - // Loop through all files to assign names - for (int i = 0; i < fileCount; i++) - { - switch (majorVersion) - { - case 4: (directory as Directory4).Files[i].Name = strings[(directory as Directory4).Files[i].NameOffset]; break; - case 5: (directory as Directory5).Files[i].Name = strings[(directory as Directory5).Files[i].NameOffset]; break; - case 6: (directory as Directory6).Files[i].Name = strings[(directory as Directory6).Files[i].NameOffset]; break; - case 7: (directory as Directory7).Files[i].Name = strings[(directory as Directory7).Files[i].NameOffset]; break; - default: return null; - } - } - - #endregion - - return directory; - } - - /// - /// Parse a Stream into an SGA directory header - /// - /// Stream to parse - /// SGA major version - /// Filled SGA directory header on success, null on error - private static object ParseDirectoryHeader(Stream data, ushort majorVersion) - { - switch (majorVersion) - { - case 4: return ParseDirectory4Header(data); - case 5: return ParseDirectory5Header(data); - case 6: return ParseDirectory5Header(data); - case 7: return ParseDirectory7Header(data); - default: return null; - } - } - - /// - /// Parse a Stream into an SGA directory header version 4 - /// - /// Stream to parse - /// Filled SGA directory header version 4 on success, null on error - private static DirectoryHeader4 ParseDirectory4Header(Stream data) - { - DirectoryHeader4 directoryHeader4 = new DirectoryHeader4(); - - directoryHeader4.SectionOffset = data.ReadUInt32(); - directoryHeader4.SectionCount = data.ReadUInt16(); - directoryHeader4.FolderOffset = data.ReadUInt32(); - directoryHeader4.FolderCount = data.ReadUInt16(); - directoryHeader4.FileOffset = data.ReadUInt32(); - directoryHeader4.FileCount = data.ReadUInt16(); - directoryHeader4.StringTableOffset = data.ReadUInt32(); - directoryHeader4.StringTableCount = data.ReadUInt16(); - - return directoryHeader4; - } - - /// - /// Parse a Stream into an SGA directory header version 5 - /// - /// Stream to parse - /// Filled SGA directory header version 5 on success, null on error - private static DirectoryHeader5 ParseDirectory5Header(Stream data) - { - DirectoryHeader5 directoryHeader5 = new DirectoryHeader5(); - - directoryHeader5.SectionOffset = data.ReadUInt32(); - directoryHeader5.SectionCount = data.ReadUInt32(); - directoryHeader5.FolderOffset = data.ReadUInt32(); - directoryHeader5.FolderCount = data.ReadUInt32(); - directoryHeader5.FileOffset = data.ReadUInt32(); - directoryHeader5.FileCount = data.ReadUInt32(); - directoryHeader5.StringTableOffset = data.ReadUInt32(); - directoryHeader5.StringTableCount = data.ReadUInt32(); - - return directoryHeader5; - } - - /// - /// Parse a Stream into an SGA directory header version 7 - /// - /// Stream to parse - /// Filled SGA directory header version 7 on success, null on error - private static DirectoryHeader7 ParseDirectory7Header(Stream data) - { - DirectoryHeader7 directoryHeader7 = new DirectoryHeader7(); - - directoryHeader7.SectionOffset = data.ReadUInt32(); - directoryHeader7.SectionCount = data.ReadUInt32(); - directoryHeader7.FolderOffset = data.ReadUInt32(); - directoryHeader7.FolderCount = data.ReadUInt32(); - directoryHeader7.FileOffset = data.ReadUInt32(); - directoryHeader7.FileCount = data.ReadUInt32(); - directoryHeader7.StringTableOffset = data.ReadUInt32(); - directoryHeader7.StringTableCount = data.ReadUInt32(); - directoryHeader7.HashTableOffset = data.ReadUInt32(); - directoryHeader7.BlockSize = data.ReadUInt32(); - - return directoryHeader7; - } - - /// - /// Parse a Stream into an SGA section version 4 - /// - /// Stream to parse - /// SGA major version - /// Filled SGA section version 4 on success, null on error - private static Section4 ParseSection4(Stream data) - { - Section4 section4 = new Section4(); - - byte[] section4Alias = data.ReadBytes(count: 64); - section4.Alias = Encoding.ASCII.GetString(section4Alias).TrimEnd('\0'); - byte[] section4Name = data.ReadBytes(64); - section4.Name = Encoding.ASCII.GetString(section4Name).TrimEnd('\0'); - section4.FolderStartIndex = data.ReadUInt16(); - section4.FolderEndIndex = data.ReadUInt16(); - section4.FileStartIndex = data.ReadUInt16(); - section4.FileEndIndex = data.ReadUInt16(); - section4.FolderRootIndex = data.ReadUInt16(); - - return section4; - } - - /// - /// Parse a Stream into an SGA section version 5 - /// - /// Stream to parse - /// SGA major version - /// Filled SGA section version 5 on success, null on error - private static Section5 ParseSection5(Stream data) - { - Section5 section5 = new Section5(); - - byte[] section5Alias = data.ReadBytes(count: 64); - section5.Alias = Encoding.ASCII.GetString(section5Alias).TrimEnd('\0'); - byte[] section5Name = data.ReadBytes(64); - section5.Name = Encoding.ASCII.GetString(section5Name).TrimEnd('\0'); - section5.FolderStartIndex = data.ReadUInt32(); - section5.FolderEndIndex = data.ReadUInt32(); - section5.FileStartIndex = data.ReadUInt32(); - section5.FileEndIndex = data.ReadUInt32(); - section5.FolderRootIndex = data.ReadUInt32(); - - return section5; - } - - /// - /// Parse a Stream into an SGA folder version 4 - /// - /// Stream to parse - /// SGA major version - /// Filled SGA folder version 4 on success, null on error - private static Folder4 ParseFolder4(Stream data) - { - Folder4 folder4 = new Folder4(); - - folder4.NameOffset = data.ReadUInt32(); - folder4.Name = null; // Read from string table - folder4.FolderStartIndex = data.ReadUInt16(); - folder4.FolderEndIndex = data.ReadUInt16(); - folder4.FileStartIndex = data.ReadUInt16(); - folder4.FileEndIndex = data.ReadUInt16(); - - return folder4; - } - - /// - /// Parse a Stream into an SGA folder version 5 - /// - /// Stream to parse - /// SGA major version - /// Filled SGA folder version 5 on success, null on error - private static Folder5 ParseFolder5(Stream data) - { - Folder5 folder5 = new Folder5(); - - folder5.NameOffset = data.ReadUInt32(); - folder5.Name = null; // Read from string table - folder5.FolderStartIndex = data.ReadUInt32(); - folder5.FolderEndIndex = data.ReadUInt32(); - folder5.FileStartIndex = data.ReadUInt32(); - folder5.FileEndIndex = data.ReadUInt32(); - - return folder5; - } - - /// - /// Parse a Stream into an SGA file version 4 - /// - /// Stream to parse - /// SGA major version - /// Filled SGA file version 4 on success, null on error - private static File4 ParseFile4(Stream data) - { - File4 file4 = new File4(); - - file4.NameOffset = data.ReadUInt32(); - file4.Name = null; // Read from string table - file4.Offset = data.ReadUInt32(); - file4.SizeOnDisk = data.ReadUInt32(); - file4.Size = data.ReadUInt32(); - file4.TimeModified = data.ReadUInt32(); - file4.Dummy0 = data.ReadByteValue(); - file4.Type = data.ReadByteValue(); - - return file4; - } - - /// - /// Parse a Stream into an SGA file version 6 - /// - /// Stream to parse - /// SGA major version - /// Filled SGA file version 6 on success, null on error - private static File6 ParseFile6(Stream data) - { - File6 file6 = new File6(); - - file6.NameOffset = data.ReadUInt32(); - file6.Name = null; // Read from string table - file6.Offset = data.ReadUInt32(); - file6.SizeOnDisk = data.ReadUInt32(); - file6.Size = data.ReadUInt32(); - file6.TimeModified = data.ReadUInt32(); - file6.Dummy0 = data.ReadByteValue(); - file6.Type = data.ReadByteValue(); - file6.CRC32 = data.ReadUInt32(); - - return file6; - } - - /// - /// Parse a Stream into an SGA file version 7 - /// - /// Stream to parse - /// SGA major version - /// Filled SGA file version 7 on success, null on error - private static File7 ParseFile7(Stream data) - { - File7 file7 = new File7(); - - file7.NameOffset = data.ReadUInt32(); - file7.Name = null; // Read from string table - file7.Offset = data.ReadUInt32(); - file7.SizeOnDisk = data.ReadUInt32(); - file7.Size = data.ReadUInt32(); - file7.TimeModified = data.ReadUInt32(); - file7.Dummy0 = data.ReadByteValue(); - file7.Type = data.ReadByteValue(); - file7.CRC32 = data.ReadUInt32(); - file7.HashOffset = data.ReadUInt32(); - - return file7; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/VBSP.cs b/BinaryObjectScanner.Builders/VBSP.cs deleted file mode 100644 index c4ca53ec..00000000 --- a/BinaryObjectScanner.Builders/VBSP.cs +++ /dev/null @@ -1,141 +0,0 @@ -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.VBSP; -using static SabreTools.Models.VBSP.Constants; - -namespace BinaryObjectScanner.Builders -{ - public static class VBSP - { - #region Byte Data - - /// - /// Parse a byte array into a Half-Life 2 Level - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled Half-Life 2 Level on success, null on error - public static SabreTools.Models.VBSP.File ParseFile(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseFile(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Half-Life 2 Level - /// - /// Stream to parse - /// Filled Half-Life 2 Level on success, null on error - public static SabreTools.Models.VBSP.File ParseFile(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - long initialOffset = data.Position; - - // Create a new Half-Life 2 Level to fill - var file = new SabreTools.Models.VBSP.File(); - - #region Header - - // Try to parse the header - var header = ParseHeader(data); - if (header == null) - return null; - - // Set the package header - file.Header = header; - - #endregion - - return file; - } - - /// - /// Parse a Stream into a Half-Life 2 Level header - /// - /// Stream to parse - /// Filled Half-Life 2 Level header on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - Header header = new Header(); - - byte[] signature = data.ReadBytes(4); - header.Signature = Encoding.ASCII.GetString(signature); - if (header.Signature != SignatureString) - return null; - - header.Version = data.ReadInt32(); - if ((header.Version < 19 || header.Version > 22) && header.Version != 0x00040014) - return null; - - header.Lumps = new Lump[HL_VBSP_LUMP_COUNT]; - for (int i = 0; i < HL_VBSP_LUMP_COUNT; i++) - { - header.Lumps[i] = ParseLump(data, header.Version); - } - - header.MapRevision = data.ReadInt32(); - - return header; - } - - /// - /// Parse a Stream into a Half-Life 2 Level lump - /// - /// Stream to parse - /// VBSP version - /// Filled Half-Life 2 Level lump on success, null on error - private static Lump ParseLump(Stream data, int version) - { - // TODO: Use marshalling here instead of building - Lump lump = new Lump(); - - lump.Offset = data.ReadUInt32(); - lump.Length = data.ReadUInt32(); - lump.Version = data.ReadUInt32(); - lump.FourCC = new char[4]; - for (int i = 0; i < 4; i++) - { - lump.FourCC[i] = (char)data.ReadByte(); - } - - // This block was commented out because test VBSPs with header - // version 21 had the values in the "right" order already and - // were causing decompression issues - - //if (version >= 21 && version != 0x00040014) - //{ - // uint temp = lump.Version; - // lump.Version = lump.Offset; - // lump.Offset = lump.Length; - // lump.Length = temp; - //} - - return lump; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/VPK.cs b/BinaryObjectScanner.Builders/VPK.cs deleted file mode 100644 index 4a4087dd..00000000 --- a/BinaryObjectScanner.Builders/VPK.cs +++ /dev/null @@ -1,318 +0,0 @@ -using System.Collections.Generic; -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.VPK; -using static SabreTools.Models.VPK.Constants; - -namespace BinaryObjectScanner.Builders -{ - public static class VPK - { - #region Byte Data - - /// - /// Parse a byte array into a Valve Package - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled Valve Package on success, null on error - public static SabreTools.Models.VPK.File ParseFile(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseFile(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Valve Package - /// - /// Stream to parse - /// Filled Valve Package on success, null on error - public static SabreTools.Models.VPK.File ParseFile(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - long initialOffset = data.Position; - - // Create a new Valve Package to fill - var file = new SabreTools.Models.VPK.File(); - - #region Header - - // Try to parse the header - // The original version had no signature. - var header = ParseHeader(data); - - // Set the package header - file.Header = header; - - #endregion - - #region Extended Header - - if (header?.Version == 2) - { - // Try to parse the extended header - var extendedHeader = ParseExtendedHeader(data); - if (extendedHeader == null) - return null; - - // Set the package extended header - file.ExtendedHeader = extendedHeader; - } - - #endregion - - #region Directory Items - - // Create the directory items tree - var directoryItems = ParseDirectoryItemTree(data); - - // Set the directory items - file.DirectoryItems = directoryItems; - - #endregion - - #region Archive Hashes - - if (header?.Version == 2 && file.ExtendedHeader != null && file.ExtendedHeader.ArchiveHashLength > 0) - { - // Create the archive hashes list - var archiveHashes = new List(); - - // Cache the current offset - initialOffset = data.Position; - - // Try to parse the directory items - while (data.Position < initialOffset + file.ExtendedHeader.ArchiveHashLength) - { - var archiveHash = ParseArchiveHash(data); - archiveHashes.Add(archiveHash); - } - - file.ArchiveHashes = archiveHashes.ToArray(); - } - - #endregion - - return file; - } - - /// - /// Parse a Stream into a Valve Package header - /// - /// Stream to parse - /// Filled Valve Package header on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - Header header = new Header(); - - header.Signature = data.ReadUInt32(); - if (header.Signature != SignatureUInt32) - return null; - - header.Version = data.ReadUInt32(); - if (header.Version > 2) - return null; - - header.DirectoryLength = data.ReadUInt32(); - - return header; - } - - /// - /// Parse a Stream into a Valve Package extended header - /// - /// Stream to parse - /// Filled Valve Package extended header on success, null on error - private static ExtendedHeader ParseExtendedHeader(Stream data) - { - // TODO: Use marshalling here instead of building - ExtendedHeader extendedHeader = new ExtendedHeader(); - - extendedHeader.Dummy0 = data.ReadUInt32(); - extendedHeader.ArchiveHashLength = data.ReadUInt32(); - extendedHeader.ExtraLength = data.ReadUInt32(); - extendedHeader.Dummy1 = data.ReadUInt32(); - - return extendedHeader; - } - - /// - /// Parse a Stream into a Valve Package archive hash - /// - /// Stream to parse - /// Filled Valve Package archive hash on success, null on error - private static ArchiveHash ParseArchiveHash(Stream data) - { - // TODO: Use marshalling here instead of building - ArchiveHash archiveHash = new ArchiveHash(); - - archiveHash.ArchiveIndex = data.ReadUInt32(); - archiveHash.ArchiveOffset = data.ReadUInt32(); - archiveHash.Length = data.ReadUInt32(); - archiveHash.Hash = data.ReadBytes(0x10); - - return archiveHash; - } - - /// - /// Parse a Stream into a Valve Package directory item tree - /// - /// Stream to parse - /// Filled Valve Package directory item tree on success, null on error - private static DirectoryItem[] ParseDirectoryItemTree(Stream data) - { - // Create the directory items list - var directoryItems = new List(); - - while (true) - { - // Get the extension - string extensionString = data.ReadString(Encoding.ASCII); - if (string.IsNullOrEmpty(extensionString)) - break; - - // Sanitize the extension - for (int i = 0; i < 0x20; i++) - { - extensionString = extensionString.Replace($"{(char)i}", string.Empty); - } - - while (true) - { - // Get the path - string pathString = data.ReadString(Encoding.ASCII); - if (string.IsNullOrEmpty(pathString)) - break; - - // Sanitize the path - for (int i = 0; i < 0x20; i++) - { - pathString = pathString.Replace($"{(char)i}", string.Empty); - } - - while (true) - { - // Get the name - string nameString = data.ReadString(Encoding.ASCII); - if (string.IsNullOrEmpty(nameString)) - break; - - // Sanitize the name - for (int i = 0; i < 0x20; i++) - { - nameString = nameString.Replace($"{(char)i}", string.Empty); - } - - // Get the directory item - var directoryItem = ParseDirectoryItem(data, extensionString, pathString, nameString); - - // Add the directory item - directoryItems.Add(directoryItem); - } - } - } - - return directoryItems.ToArray(); - } - - /// - /// Parse a Stream into a Valve Package directory item - /// - /// Stream to parse - /// Filled Valve Package directory item on success, null on error - private static DirectoryItem ParseDirectoryItem(Stream data, string extension, string path, string name) - { - DirectoryItem directoryItem = new DirectoryItem(); - - directoryItem.Extension = extension; - directoryItem.Path = path; - directoryItem.Name = name; - - // Get the directory entry - var directoryEntry = ParseDirectoryEntry(data); - - // Set the directory entry - directoryItem.DirectoryEntry = directoryEntry; - - // Get the preload data pointer - long preloadDataPointer = -1; int preloadDataLength = -1; - if (directoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE && directoryEntry.EntryLength > 0) - { - preloadDataPointer = directoryEntry.EntryOffset; - preloadDataLength = (int)directoryEntry.EntryLength; - } - else if (directoryEntry.PreloadBytes > 0) - { - preloadDataPointer = data.Position; - preloadDataLength = directoryEntry.PreloadBytes; - } - - // If we had a valid preload data pointer - byte[] preloadData = null; - if (preloadDataPointer >= 0 && preloadDataLength > 0) - { - // Cache the current offset - long initialOffset = data.Position; - - // Seek to the preload data offset - data.Seek(preloadDataPointer, SeekOrigin.Begin); - - // Read the preload data - preloadData = data.ReadBytes(preloadDataLength); - - // Seek back to the original offset - data.Seek(initialOffset, SeekOrigin.Begin); - } - - // Set the preload data - directoryItem.PreloadData = preloadData; - - return directoryItem; - } - - /// - /// Parse a Stream into a Valve Package directory entry - /// - /// Stream to parse - /// Filled Valve Package directory entry on success, null on error - private static DirectoryEntry ParseDirectoryEntry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryEntry directoryEntry = new DirectoryEntry(); - - directoryEntry.CRC = data.ReadUInt32(); - directoryEntry.PreloadBytes = data.ReadUInt16(); - directoryEntry.ArchiveIndex = data.ReadUInt16(); - directoryEntry.EntryOffset = data.ReadUInt32(); - directoryEntry.EntryLength = data.ReadUInt32(); - directoryEntry.Dummy0 = data.ReadUInt16(); - - return directoryEntry; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/WAD.cs b/BinaryObjectScanner.Builders/WAD.cs deleted file mode 100644 index 374194a6..00000000 --- a/BinaryObjectScanner.Builders/WAD.cs +++ /dev/null @@ -1,266 +0,0 @@ -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.WAD; -using static SabreTools.Models.WAD.Constants; - -namespace BinaryObjectScanner.Builders -{ - public static class WAD - { - #region Byte Data - - /// - /// Parse a byte array into a Half-Life Texture Package - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled Half-Life Texture Package on success, null on error - public static SabreTools.Models.WAD.File ParseFile(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseFile(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a Half-Life Texture Package - /// - /// Stream to parse - /// Filled Half-Life Texture Package on success, null on error - public static SabreTools.Models.WAD.File ParseFile(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - long initialOffset = data.Position; - - // Create a new Half-Life Texture Package to fill - var file = new SabreTools.Models.WAD.File(); - - #region Header - - // Try to parse the header - var header = ParseHeader(data); - if (header == null) - return null; - - // Set the package header - file.Header = header; - - #endregion - - #region Lumps - - // Get the lump offset - uint lumpOffset = header.LumpOffset; - if (lumpOffset < 0 || lumpOffset >= data.Length) - return null; - - // Seek to the lump offset - data.Seek(lumpOffset, SeekOrigin.Begin); - - // Create the lump array - file.Lumps = new Lump[header.LumpCount]; - for (int i = 0; i < header.LumpCount; i++) - { - var lump = ParseLump(data); - file.Lumps[i] = lump; - } - - #endregion - - #region Lump Infos - - // Create the lump info array - file.LumpInfos = new LumpInfo[header.LumpCount]; - for (int i = 0; i < header.LumpCount; i++) - { - var lump = file.Lumps[i]; - if (lump.Compression != 0) - { - file.LumpInfos[i] = null; - continue; - } - - // Get the lump info offset - uint lumpInfoOffset = lump.Offset; - if (lumpInfoOffset < 0 || lumpInfoOffset >= data.Length) - { - file.LumpInfos[i] = null; - continue; - } - - // Seek to the lump info offset - data.Seek(lumpInfoOffset, SeekOrigin.Begin); - - // Try to parse the lump info -- TODO: Do we ever set the mipmap level? - var lumpInfo = ParseLumpInfo(data, lump.Type); - file.LumpInfos[i] = lumpInfo; - } - - #endregion - - return file; - } - - /// - /// Parse a Stream into a Half-Life Texture Package header - /// - /// Stream to parse - /// Filled Half-Life Texture Package header on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - Header header = new Header(); - - byte[] signature = data.ReadBytes(4); - header.Signature = Encoding.ASCII.GetString(signature); - if (header.Signature != SignatureString) - return null; - - header.LumpCount = data.ReadUInt32(); - header.LumpOffset = data.ReadUInt32(); - - return header; - } - - /// - /// Parse a Stream into a Half-Life Texture Package lump - /// - /// Stream to parse - /// Filled Half-Life Texture Package lump on success, null on error - private static Lump ParseLump(Stream data) - { - // TODO: Use marshalling here instead of building - Lump lump = new Lump(); - - lump.Offset = data.ReadUInt32(); - lump.DiskLength = data.ReadUInt32(); - lump.Length = data.ReadUInt32(); - lump.Type = data.ReadByteValue(); - lump.Compression = data.ReadByteValue(); - lump.Padding0 = data.ReadByteValue(); - lump.Padding1 = data.ReadByteValue(); - byte[] name = data.ReadBytes(16); - lump.Name = Encoding.ASCII.GetString(name).TrimEnd('\0'); - - return lump; - } - - /// - /// Parse a Stream into a Half-Life Texture Package lump info - /// - /// Stream to parse - /// Lump type - /// Mipmap level - /// Filled Half-Life Texture Package lump info on success, null on error - private static LumpInfo ParseLumpInfo(Stream data, byte type, uint mipmap = 0) - { - // TODO: Use marshalling here instead of building - LumpInfo lumpInfo = new LumpInfo(); - - // Cache the initial offset - long initialOffset = data.Position; - - // Type 0x42 has no name, type 0x43 does. Are these flags? - if (type == 0x42) - { - if (mipmap > 0) - return null; - - lumpInfo.Width = data.ReadUInt32(); - lumpInfo.Height = data.ReadUInt32(); - lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height)); - lumpInfo.PaletteSize = data.ReadUInt16(); - } - else if (type == 0x43) - { - if (mipmap > 3) - return null; - - byte[] name = data.ReadBytes(16); - lumpInfo.Name = Encoding.ASCII.GetString(name); - lumpInfo.Width = data.ReadUInt32(); - lumpInfo.Height = data.ReadUInt32(); - lumpInfo.PixelOffset = data.ReadUInt32(); - _ = data.ReadBytes(12); // Unknown data - - // Cache the current offset - long currentOffset = data.Position; - - // Seek to the pixel data - data.Seek(initialOffset + lumpInfo.PixelOffset, SeekOrigin.Begin); - - // Read the pixel data - lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height)); - - // Seek back to the offset - data.Seek(currentOffset, SeekOrigin.Begin); - - uint pixelSize = lumpInfo.Width * lumpInfo.Height; - - // Mipmap data -- TODO: How do we determine this during initial parsing? - switch (mipmap) - { - case 1: _ = data.ReadBytes((int)pixelSize); break; - case 2: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4))); break; - case 3: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16))); break; - default: return null; - } - - _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16) + (pixelSize / 64))); // Pixel data - lumpInfo.PaletteSize = data.ReadUInt16(); - lumpInfo.PaletteData = data.ReadBytes((int)lumpInfo.PaletteSize * 3); - } - else - { - return null; - } - - // Adjust based on mipmap level - switch (mipmap) - { - case 1: - lumpInfo.Width /= 2; - lumpInfo.Height /= 2; - break; - - case 2: - lumpInfo.Width /= 4; - lumpInfo.Height /= 4; - break; - - case 3: - lumpInfo.Width /= 8; - lumpInfo.Height /= 8; - break; - - default: - return null; - } - - return lumpInfo; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Builders/XZP.cs b/BinaryObjectScanner.Builders/XZP.cs deleted file mode 100644 index 87fcc827..00000000 --- a/BinaryObjectScanner.Builders/XZP.cs +++ /dev/null @@ -1,274 +0,0 @@ -using System.IO; -using System.Text; -using SabreTools.IO; -using SabreTools.Models.XZP; -using static SabreTools.Models.XZP.Constants; - -namespace BinaryObjectScanner.Builders -{ - public static class XZP - { - #region Byte Data - - /// - /// Parse a byte array into a XBox Package File - /// - /// Byte array to parse - /// Offset into the byte array - /// Filled XBox Package File on success, null on error - public static SabreTools.Models.XZP.File ParseFile(byte[] data, int offset) - { - // If the data is invalid - if (data == null) - return null; - - // If the offset is out of bounds - if (offset < 0 || offset >= data.Length) - return null; - - // Create a memory stream and parse that - MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset); - return ParseFile(dataStream); - } - - #endregion - - #region Stream Data - - /// - /// Parse a Stream into a XBox Package File - /// - /// Stream to parse - /// Filled XBox Package File on success, null on error - public static SabreTools.Models.XZP.File ParseFile(Stream data) - { - // If the data is invalid - if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) - return null; - - // If the offset is out of bounds - if (data.Position < 0 || data.Position >= data.Length) - return null; - - // Cache the current offset - long initialOffset = data.Position; - - // Create a new XBox Package File to fill - var file = new SabreTools.Models.XZP.File(); - - #region Header - - // Try to parse the header - var header = ParseHeader(data); - if (header == null) - return null; - - // Set the package header - file.Header = header; - - #endregion - - #region Directory Entries - - // Create the directory entry array - file.DirectoryEntries = new DirectoryEntry[header.DirectoryEntryCount]; - - // Try to parse the directory entries - for (int i = 0; i < header.DirectoryEntryCount; i++) - { - var directoryEntry = ParseDirectoryEntry(data); - file.DirectoryEntries[i] = directoryEntry; - } - - #endregion - - #region Preload Directory Entries - - if (header.PreloadBytes > 0) - { - // Create the preload directory entry array - file.PreloadDirectoryEntries = new DirectoryEntry[header.PreloadDirectoryEntryCount]; - - // Try to parse the preload directory entries - for (int i = 0; i < header.PreloadDirectoryEntryCount; i++) - { - var directoryEntry = ParseDirectoryEntry(data); - file.PreloadDirectoryEntries[i] = directoryEntry; - } - } - - #endregion - - #region Preload Directory Mappings - - if (header.PreloadBytes > 0) - { - // Create the preload directory mapping array - file.PreloadDirectoryMappings = new DirectoryMapping[header.PreloadDirectoryEntryCount]; - - // Try to parse the preload directory mappings - for (int i = 0; i < header.PreloadDirectoryEntryCount; i++) - { - var directoryMapping = ParseDirectoryMapping(data); - file.PreloadDirectoryMappings[i] = directoryMapping; - } - } - - #endregion - - #region Directory Items - - if (header.DirectoryItemCount > 0) - { - // Get the directory item offset - uint directoryItemOffset = header.DirectoryItemOffset; - if (directoryItemOffset < 0 || directoryItemOffset >= data.Length) - return null; - - // Seek to the directory items - data.Seek(directoryItemOffset, SeekOrigin.Begin); - - // Create the directory item array - file.DirectoryItems = new DirectoryItem[header.DirectoryItemCount]; - - // Try to parse the directory items - for (int i = 0; i < header.DirectoryItemCount; i++) - { - var directoryItem = ParseDirectoryItem(data); - file.DirectoryItems[i] = directoryItem; - } - } - - #endregion - - #region Footer - - // Seek to the footer - data.Seek(-8, SeekOrigin.End); - - // Try to parse the footer - var footer = ParseFooter(data); - if (footer == null) - return null; - - // Set the package footer - file.Footer = footer; - - #endregion - - return file; - } - - /// - /// Parse a Stream into a XBox Package File header - /// - /// Stream to parse - /// Filled XBox Package File header on success, null on error - private static Header ParseHeader(Stream data) - { - // TODO: Use marshalling here instead of building - Header header = new Header(); - - byte[] signature = data.ReadBytes(4); - header.Signature = Encoding.ASCII.GetString(signature); - if (header.Signature != HeaderSignatureString) - return null; - - header.Version = data.ReadUInt32(); - if (header.Version != 6) - return null; - - header.PreloadDirectoryEntryCount = data.ReadUInt32(); - header.DirectoryEntryCount = data.ReadUInt32(); - header.PreloadBytes = data.ReadUInt32(); - header.HeaderLength = data.ReadUInt32(); - header.DirectoryItemCount = data.ReadUInt32(); - header.DirectoryItemOffset = data.ReadUInt32(); - header.DirectoryItemLength = data.ReadUInt32(); - - return header; - } - - /// - /// Parse a Stream into a XBox Package File directory entry - /// - /// Stream to parse - /// Filled XBox Package File directory entry on success, null on error - private static DirectoryEntry ParseDirectoryEntry(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryEntry directoryEntry = new DirectoryEntry(); - - directoryEntry.FileNameCRC = data.ReadUInt32(); - directoryEntry.EntryLength = data.ReadUInt32(); - directoryEntry.EntryOffset = data.ReadUInt32(); - - return directoryEntry; - } - - /// - /// Parse a Stream into a XBox Package File directory mapping - /// - /// Stream to parse - /// Filled XBox Package File directory mapping on success, null on error - private static DirectoryMapping ParseDirectoryMapping(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryMapping directoryMapping = new DirectoryMapping(); - - directoryMapping.PreloadDirectoryEntryIndex = data.ReadUInt16(); - - return directoryMapping; - } - - /// - /// Parse a Stream into a XBox Package File directory item - /// - /// Stream to parse - /// Filled XBox Package File directory item on success, null on error - private static DirectoryItem ParseDirectoryItem(Stream data) - { - // TODO: Use marshalling here instead of building - DirectoryItem directoryItem = new DirectoryItem(); - - directoryItem.FileNameCRC = data.ReadUInt32(); - directoryItem.NameOffset = data.ReadUInt32(); - directoryItem.TimeCreated = data.ReadUInt32(); - - // Cache the current offset - long currentPosition = data.Position; - - // Seek to the name offset - data.Seek(directoryItem.NameOffset, SeekOrigin.Begin); - - // Read the name - directoryItem.Name = data.ReadString(Encoding.ASCII); - - // Seek back to the right position - data.Seek(currentPosition, SeekOrigin.Begin); - - return directoryItem; - } - - /// - /// Parse a Stream into a XBox Package File footer - /// - /// Stream to parse - /// Filled XBox Package File footer on success, null on error - private static Footer ParseFooter(Stream data) - { - // TODO: Use marshalling here instead of building - Footer footer = new Footer(); - - footer.FileLength = data.ReadUInt32(); - byte[] signature = data.ReadBytes(4); - footer.Signature = Encoding.ASCII.GetString(signature); - if (footer.Signature != FooterSignatureString) - return null; - - return footer; - } - - #endregion - } -} diff --git a/BinaryObjectScanner.Wrappers/AACSMediaKeyBlock.cs b/BinaryObjectScanner.Wrappers/AACSMediaKeyBlock.cs index 9f194457..ea0d23ec 100644 --- a/BinaryObjectScanner.Wrappers/AACSMediaKeyBlock.cs +++ b/BinaryObjectScanner.Wrappers/AACSMediaKeyBlock.cs @@ -72,7 +72,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var mediaKeyBlock = Builders.AACS.ParseMediaKeyBlock(data); + var mediaKeyBlock = new SabreTools.Serialization.Streams.AACS().Deserialize(data); if (mediaKeyBlock == null) return null; diff --git a/BinaryObjectScanner.Wrappers/BDPlusSVM.cs b/BinaryObjectScanner.Wrappers/BDPlusSVM.cs index b64e22dd..e67bc7e7 100644 --- a/BinaryObjectScanner.Wrappers/BDPlusSVM.cs +++ b/BinaryObjectScanner.Wrappers/BDPlusSVM.cs @@ -89,7 +89,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var svm = Builders.BDPlus.ParseSVM(data); + var svm = new SabreTools.Serialization.Streams.BDPlus().Deserialize(data); if (svm == null) return null; diff --git a/BinaryObjectScanner.Wrappers/BFPK.cs b/BinaryObjectScanner.Wrappers/BFPK.cs index 5780d597..9c340489 100644 --- a/BinaryObjectScanner.Wrappers/BFPK.cs +++ b/BinaryObjectScanner.Wrappers/BFPK.cs @@ -86,7 +86,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var archive = Builders.BFPK.ParseArchive(data); + var archive = new SabreTools.Serialization.Streams.BFPK().Deserialize(data); if (archive == null) return null; diff --git a/BinaryObjectScanner.Wrappers/BSP.cs b/BinaryObjectScanner.Wrappers/BSP.cs index 31c0e39f..729c05f1 100644 --- a/BinaryObjectScanner.Wrappers/BSP.cs +++ b/BinaryObjectScanner.Wrappers/BSP.cs @@ -98,7 +98,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var file = Builders.BSP.ParseFile(data); + var file = new SabreTools.Serialization.Streams.BSP().Deserialize(data); if (file == null) return null; diff --git a/BinaryObjectScanner.Wrappers/BinaryObjectScanner.Wrappers.csproj b/BinaryObjectScanner.Wrappers/BinaryObjectScanner.Wrappers.csproj index 67a2bc15..c20ff14b 100644 --- a/BinaryObjectScanner.Wrappers/BinaryObjectScanner.Wrappers.csproj +++ b/BinaryObjectScanner.Wrappers/BinaryObjectScanner.Wrappers.csproj @@ -22,13 +22,13 @@ - + diff --git a/BinaryObjectScanner.Wrappers/CFB.cs b/BinaryObjectScanner.Wrappers/CFB.cs index 3379f318..5f1c765f 100644 --- a/BinaryObjectScanner.Wrappers/CFB.cs +++ b/BinaryObjectScanner.Wrappers/CFB.cs @@ -186,7 +186,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var binary = Builders.CFB.ParseBinary(data); + var binary = new SabreTools.Serialization.Streams.CFB().Deserialize(data); if (binary == null) return null; diff --git a/BinaryObjectScanner.Wrappers/CIA.cs b/BinaryObjectScanner.Wrappers/CIA.cs index 6ceed2b9..dee2c104 100644 --- a/BinaryObjectScanner.Wrappers/CIA.cs +++ b/BinaryObjectScanner.Wrappers/CIA.cs @@ -308,7 +308,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var archive = Builders.N3DS.ParseCIA(data); + var archive = new SabreTools.Serialization.Streams.CIA().Deserialize(data); if (archive == null) return null; diff --git a/BinaryObjectScanner.Wrappers/GCF.cs b/BinaryObjectScanner.Wrappers/GCF.cs index 71de7215..273938c2 100644 --- a/BinaryObjectScanner.Wrappers/GCF.cs +++ b/BinaryObjectScanner.Wrappers/GCF.cs @@ -459,7 +459,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var file = Builders.GCF.ParseFile(data); + var file = new SabreTools.Serialization.Streams.GCF().Deserialize(data); if (file == null) return null; diff --git a/BinaryObjectScanner.Wrappers/InstallShieldCabinet.cs b/BinaryObjectScanner.Wrappers/InstallShieldCabinet.cs index a4bd3190..9a12e0e2 100644 --- a/BinaryObjectScanner.Wrappers/InstallShieldCabinet.cs +++ b/BinaryObjectScanner.Wrappers/InstallShieldCabinet.cs @@ -286,7 +286,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var cabinet = Builders.InstallShieldCabinet.ParseCabinet(data); + var cabinet = new SabreTools.Serialization.Streams.InstallShieldCabinet().Deserialize(data); if (cabinet == null) return null; diff --git a/BinaryObjectScanner.Wrappers/LinearExecutable.cs b/BinaryObjectScanner.Wrappers/LinearExecutable.cs index 51c30c1f..fba16e90 100644 --- a/BinaryObjectScanner.Wrappers/LinearExecutable.cs +++ b/BinaryObjectScanner.Wrappers/LinearExecutable.cs @@ -338,7 +338,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var executable = Builders.LinearExecutable.ParseExecutable(data); + var executable = new SabreTools.Serialization.Streams.LinearExecutable().Deserialize(data); if (executable == null) return null; diff --git a/BinaryObjectScanner.Wrappers/MSDOS.cs b/BinaryObjectScanner.Wrappers/MSDOS.cs index f189f484..a975f55b 100644 --- a/BinaryObjectScanner.Wrappers/MSDOS.cs +++ b/BinaryObjectScanner.Wrappers/MSDOS.cs @@ -136,7 +136,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var executable = Builders.MSDOS.ParseExecutable(data); + var executable = new SabreTools.Serialization.Streams.MSDOS().Deserialize(data); if (executable == null) return null; diff --git a/BinaryObjectScanner.Wrappers/MicrosoftCabinet.cs b/BinaryObjectScanner.Wrappers/MicrosoftCabinet.cs index 7b6b7073..d53d944b 100644 --- a/BinaryObjectScanner.Wrappers/MicrosoftCabinet.cs +++ b/BinaryObjectScanner.Wrappers/MicrosoftCabinet.cs @@ -147,7 +147,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var cabinet = Builders.MicrosoftCabinet.ParseCabinet(data); + var cabinet = new SabreTools.Serialization.Streams.MicrosoftCabinet().Deserialize(data); if (cabinet == null) return null; diff --git a/BinaryObjectScanner.Wrappers/N3DS.cs b/BinaryObjectScanner.Wrappers/N3DS.cs index b35bc2e0..04555a34 100644 --- a/BinaryObjectScanner.Wrappers/N3DS.cs +++ b/BinaryObjectScanner.Wrappers/N3DS.cs @@ -269,7 +269,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var archive = Builders.N3DS.ParseCart(data); + var archive = new SabreTools.Serialization.Streams.N3DS().Deserialize(data); if (archive == null) return null; diff --git a/BinaryObjectScanner.Wrappers/NCF.cs b/BinaryObjectScanner.Wrappers/NCF.cs index aaed3aa3..bfb3a042 100644 --- a/BinaryObjectScanner.Wrappers/NCF.cs +++ b/BinaryObjectScanner.Wrappers/NCF.cs @@ -246,7 +246,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var file = Builders.NCF.ParseFile(data); + var file = new SabreTools.Serialization.Streams.NCF().Deserialize(data); if (file == null) return null; diff --git a/BinaryObjectScanner.Wrappers/NewExecutable.cs b/BinaryObjectScanner.Wrappers/NewExecutable.cs index e227ee4a..a33162f8 100644 --- a/BinaryObjectScanner.Wrappers/NewExecutable.cs +++ b/BinaryObjectScanner.Wrappers/NewExecutable.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; using System.IO; using System.Text; -using static BinaryObjectScanner.Builders.Extensions; +using static SabreTools.Serialization.Extensions; namespace BinaryObjectScanner.Wrappers { @@ -264,7 +264,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var executable = Builders.NewExecutable.ParseExecutable(data); + var executable = new SabreTools.Serialization.Streams.NewExecutable().Deserialize(data); if (executable == null) return null; diff --git a/BinaryObjectScanner.Wrappers/Nitro.cs b/BinaryObjectScanner.Wrappers/Nitro.cs index 4e919370..154db9fe 100644 --- a/BinaryObjectScanner.Wrappers/Nitro.cs +++ b/BinaryObjectScanner.Wrappers/Nitro.cs @@ -364,7 +364,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var archive = Builders.Nitro.ParseCart(data); + var archive = new SabreTools.Serialization.Streams.Nitro().Deserialize(data); if (archive == null) return null; diff --git a/BinaryObjectScanner.Wrappers/PAK.cs b/BinaryObjectScanner.Wrappers/PAK.cs index ffbf7d26..b499ea4e 100644 --- a/BinaryObjectScanner.Wrappers/PAK.cs +++ b/BinaryObjectScanner.Wrappers/PAK.cs @@ -90,7 +90,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var file = Builders.PAK.ParseFile(data); + var file = new SabreTools.Serialization.Streams.PAK().Deserialize(data); if (file == null) return null; diff --git a/BinaryObjectScanner.Wrappers/PFF.cs b/BinaryObjectScanner.Wrappers/PFF.cs index a6ff98bd..eb86b9de 100644 --- a/BinaryObjectScanner.Wrappers/PFF.cs +++ b/BinaryObjectScanner.Wrappers/PFF.cs @@ -103,7 +103,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var archive = Builders.PFF.ParseArchive(data); + var archive = new SabreTools.Serialization.Streams.PFF().Deserialize(data); if (archive == null) return null; diff --git a/BinaryObjectScanner.Wrappers/PlayJAudioFile.cs b/BinaryObjectScanner.Wrappers/PlayJAudioFile.cs index 2880a15f..6ef9e165 100644 --- a/BinaryObjectScanner.Wrappers/PlayJAudioFile.cs +++ b/BinaryObjectScanner.Wrappers/PlayJAudioFile.cs @@ -270,7 +270,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var audioFile = Builders.PlayJ.ParseAudioFile(data); + var audioFile = new SabreTools.Serialization.Streams.PlayJAudio().Deserialize(data); if (audioFile == null) return null; diff --git a/BinaryObjectScanner.Wrappers/PortableExecutable.cs b/BinaryObjectScanner.Wrappers/PortableExecutable.cs index 99073b54..7505092a 100644 --- a/BinaryObjectScanner.Wrappers/PortableExecutable.cs +++ b/BinaryObjectScanner.Wrappers/PortableExecutable.cs @@ -6,7 +6,7 @@ using System.Text; using System.Xml; using BinaryObjectScanner.ASN1; using SabreTools.IO; -using static BinaryObjectScanner.Builders.Extensions; +using static SabreTools.Serialization.Extensions; namespace BinaryObjectScanner.Wrappers { @@ -1034,7 +1034,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var executable = Builders.PortableExecutable.ParseExecutable(data); + var executable = new SabreTools.Serialization.Streams.PortableExecutable().Deserialize(data); if (executable == null) return null; diff --git a/BinaryObjectScanner.Wrappers/Quantum.cs b/BinaryObjectScanner.Wrappers/Quantum.cs index bd1fde7c..06879033 100644 --- a/BinaryObjectScanner.Wrappers/Quantum.cs +++ b/BinaryObjectScanner.Wrappers/Quantum.cs @@ -96,7 +96,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var archive = Builders.Quantum.ParseArchive(data); + var archive = new SabreTools.Serialization.Streams.Quantum().Deserialize(data); if (archive == null) return null; diff --git a/BinaryObjectScanner.Wrappers/SGA.cs b/BinaryObjectScanner.Wrappers/SGA.cs index a9cef8c5..3091e122 100644 --- a/BinaryObjectScanner.Wrappers/SGA.cs +++ b/BinaryObjectScanner.Wrappers/SGA.cs @@ -404,7 +404,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var file = Builders.SGA.ParseFile(data); + var file = new SabreTools.Serialization.Streams.SGA().Deserialize(data); if (file == null) return null; diff --git a/BinaryObjectScanner.Wrappers/VBSP.cs b/BinaryObjectScanner.Wrappers/VBSP.cs index 6bd03860..1f01012d 100644 --- a/BinaryObjectScanner.Wrappers/VBSP.cs +++ b/BinaryObjectScanner.Wrappers/VBSP.cs @@ -83,7 +83,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var file = Builders.VBSP.ParseFile(data); + var file = new SabreTools.Serialization.Streams.VBSP().Deserialize(data); if (file == null) return null; diff --git a/BinaryObjectScanner.Wrappers/VPK.cs b/BinaryObjectScanner.Wrappers/VPK.cs index c5766c56..74fecd76 100644 --- a/BinaryObjectScanner.Wrappers/VPK.cs +++ b/BinaryObjectScanner.Wrappers/VPK.cs @@ -166,7 +166,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var file = Builders.VPK.ParseFile(data); + var file = new SabreTools.Serialization.Streams.VPK().Deserialize(data); if (file == null) return null; diff --git a/BinaryObjectScanner.Wrappers/WAD.cs b/BinaryObjectScanner.Wrappers/WAD.cs index 23473f69..765675f3 100644 --- a/BinaryObjectScanner.Wrappers/WAD.cs +++ b/BinaryObjectScanner.Wrappers/WAD.cs @@ -97,7 +97,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var file = Builders.WAD.ParseFile(data); + var file = new SabreTools.Serialization.Streams.WAD().Deserialize(data); if (file == null) return null; diff --git a/BinaryObjectScanner.Wrappers/XZP.cs b/BinaryObjectScanner.Wrappers/XZP.cs index a0c62995..a78bb6e5 100644 --- a/BinaryObjectScanner.Wrappers/XZP.cs +++ b/BinaryObjectScanner.Wrappers/XZP.cs @@ -140,7 +140,7 @@ namespace BinaryObjectScanner.Wrappers if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead) return null; - var file = Builders.XZP.ParseFile(data); + var file = new SabreTools.Serialization.Streams.XZP().Deserialize(data); if (file == null) return null; diff --git a/BurnOutSharp.sln b/BurnOutSharp.sln index 246fc2b5..375fc1b7 100644 --- a/BurnOutSharp.sln +++ b/BurnOutSharp.sln @@ -16,8 +16,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution README.md = README.md EndProjectSection EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BinaryObjectScanner.Builders", "BinaryObjectScanner.Builders\BinaryObjectScanner.Builders.csproj", "{7577733A-CC8D-4E7C-8B6D-FFC7EC1B3D07}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BinaryObjectScanner.Wrappers", "BinaryObjectScanner.Wrappers\BinaryObjectScanner.Wrappers.csproj", "{35BD489F-E58D-45DD-9929-DC4B32414750}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BinaryObjectScanner.Matching", "BinaryObjectScanner.Matching\BinaryObjectScanner.Matching.csproj", "{563BC37B-8E02-4178-B6FE-F3F6F65E0096}" @@ -54,10 +52,6 @@ Global {88735BA2-778D-4192-8EB2-FFF6843719E2}.Debug|Any CPU.Build.0 = Debug|Any CPU {88735BA2-778D-4192-8EB2-FFF6843719E2}.Release|Any CPU.ActiveCfg = Release|Any CPU {88735BA2-778D-4192-8EB2-FFF6843719E2}.Release|Any CPU.Build.0 = Release|Any CPU - {7577733A-CC8D-4E7C-8B6D-FFC7EC1B3D07}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7577733A-CC8D-4E7C-8B6D-FFC7EC1B3D07}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7577733A-CC8D-4E7C-8B6D-FFC7EC1B3D07}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7577733A-CC8D-4E7C-8B6D-FFC7EC1B3D07}.Release|Any CPU.Build.0 = Release|Any CPU {35BD489F-E58D-45DD-9929-DC4B32414750}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {35BD489F-E58D-45DD-9929-DC4B32414750}.Debug|Any CPU.Build.0 = Debug|Any CPU {35BD489F-E58D-45DD-9929-DC4B32414750}.Release|Any CPU.ActiveCfg = Release|Any CPU diff --git a/BurnOutSharp/BurnOutSharp.csproj b/BurnOutSharp/BurnOutSharp.csproj index 104f0fac..c31acaf1 100644 --- a/BurnOutSharp/BurnOutSharp.csproj +++ b/BurnOutSharp/BurnOutSharp.csproj @@ -44,10 +44,6 @@ runtime; build; native; contentfiles; analyzers; buildtransitive all - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - runtime; build; native; contentfiles; analyzers; buildtransitive all diff --git a/Test/Test.csproj b/Test/Test.csproj index de3684e1..08b47eca 100644 --- a/Test/Test.csproj +++ b/Test/Test.csproj @@ -9,7 +9,6 @@ - @@ -20,6 +19,7 @@ +