Files

2361 lines
85 KiB
C#
Raw Permalink Normal View History

using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
2025-09-26 13:15:55 -04:00
using SabreTools.Data.Extensions;
2025-09-26 13:06:18 -04:00
using SabreTools.Data.Models.COFF;
using SabreTools.Data.Models.PortableExecutable;
using SabreTools.Data.Models.PortableExecutable.Resource.Entries;
2024-04-17 11:52:22 -04:00
using SabreTools.IO.Extensions;
2026-03-24 19:42:36 -04:00
using SabreTools.Matching;
2026-03-24 19:17:25 -04:00
using SabreTools.Numerics.Extensions;
using SabreTools.Text.Extensions;
2026-01-27 12:03:01 -05:00
#pragma warning disable IDE0330 // Use 'System.Threading.Lock'
2026-03-18 16:37:59 -04:00
namespace SabreTools.Wrappers
{
public partial class PortableExecutable : WrapperBase<Executable>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Portable Executable (PE)";
#endregion
#region Extension Properties
2025-09-22 20:07:18 -04:00
/// <inheritdoc cref="Executable.FileHeader"/>
2025-10-30 23:29:24 -04:00
public FileHeader COFFFileHeader => Model.FileHeader;
2025-07-31 14:00:55 -04:00
/// <summary>
/// Dictionary of debug data
/// </summary>
public Dictionary<int, object> DebugData
{
get
{
2025-09-02 23:51:02 -04:00
lock (_debugDataLock)
{
// Use the cached data if possible
if (field.Count != 0)
return field;
// If we have no resource table, just return
2026-01-25 14:30:18 -05:00
if (DebugDirectoryTable is null || DebugDirectoryTable.Length == 0)
return field;
// Otherwise, build and return the cached dictionary
field = ParseDebugTable();
return field;
}
}
} = [];
2025-09-22 20:07:18 -04:00
/// <inheritdoc cref="Models.PortableExecutable.DebugData.Table.Table"/>
2025-09-26 13:06:18 -04:00
public Data.Models.PortableExecutable.DebugData.Entry[]? DebugDirectoryTable
=> Model.DebugTable?.DebugDirectoryTable;
/// <summary>
/// Entry point data, if it exists
/// </summary>
2025-10-27 22:43:56 -04:00
/// <remarks>Caches up to 128 bytes</remarks>
public byte[] EntryPointData
{
get
{
2025-09-02 23:51:02 -04:00
lock (_entryPointDataLock)
{
2025-09-02 23:51:02 -04:00
// If we already have cached data, just use that immediately
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 09:48:00 -05:00
return field;
2025-09-02 23:51:02 -04:00
// If we have no entry point
2025-08-01 07:58:54 -04:00
int entryPointAddress = (int)OptionalHeader.AddressOfEntryPoint.ConvertVirtualAddress(SectionTable);
if (entryPointAddress == 0)
{
2025-11-14 09:48:00 -05:00
field = [];
return field;
}
// If the entry point matches with the start of a section, use that
int entryPointSection = FindEntryPointSectionIndex();
2025-10-01 20:06:05 -04:00
if (entryPointSection >= 0 && OptionalHeader.AddressOfEntryPoint == SectionTable[entryPointSection].VirtualAddress)
2025-09-02 23:51:02 -04:00
{
2025-11-14 09:48:00 -05:00
field = GetSectionData(entryPointSection) ?? [];
return field;
2025-09-02 23:51:02 -04:00
}
// Read the first 128 bytes of the entry point
2025-11-14 09:48:00 -05:00
field = ReadRangeFromSource(entryPointAddress, length: 128) ?? [];
return field;
}
}
2025-11-14 09:48:00 -05:00
} = null;
2025-09-22 20:07:18 -04:00
/// <inheritdoc cref="Executable.ExportAddressTable"/>
2025-09-26 13:06:18 -04:00
public Data.Models.PortableExecutable.Export.AddressTableEntry[]? ExportTable => Model.ExportAddressTable;
2025-09-22 20:07:18 -04:00
/// <inheritdoc cref="Executable.ExportDirectoryTable"/>
2025-09-26 13:06:18 -04:00
public Data.Models.PortableExecutable.Export.DirectoryTable? ExportDirectoryTable => Model.ExportDirectoryTable;
2025-09-22 20:07:18 -04:00
/// <inheritdoc cref="Executable.NamePointerTable"/>
2025-09-26 13:06:18 -04:00
public Data.Models.PortableExecutable.Export.NamePointerTable? ExportNamePointerTable => Model.NamePointerTable;
2025-09-22 20:07:18 -04:00
/// <inheritdoc cref="Executable.ExportNameTable"/>
2025-09-26 13:06:18 -04:00
public Data.Models.PortableExecutable.Export.NameTable? ExportNameTable => Model.ExportNameTable;
2025-09-22 20:07:18 -04:00
/// <inheritdoc cref="Executable.OrdinalTable"/>
2025-09-26 13:06:18 -04:00
public Data.Models.PortableExecutable.Export.OrdinalTable? ExportOrdinalTable => Model.OrdinalTable;
2025-07-31 14:00:55 -04:00
/// <summary>
/// Header padding data, if it exists
/// </summary>
public byte[] HeaderPaddingData
{
get
{
2025-09-02 23:51:02 -04:00
lock (_headerPaddingDataLock)
{
// If we already have cached data, just use that immediately
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 09:48:00 -05:00
return field;
// TODO: Don't scan the known header data as well
// Populate the raw header padding data based on the source
uint headerStartAddress = Stub.Header.NewExeHeaderAddr;
2024-11-15 22:22:22 -05:00
uint firstSectionAddress = uint.MaxValue;
2025-10-01 20:06:05 -04:00
foreach (var section in SectionTable)
2024-11-15 22:22:22 -05:00
{
2025-10-01 20:06:05 -04:00
if (section.PointerToRawData == 0)
2024-11-15 22:22:22 -05:00
continue;
2025-10-01 20:06:05 -04:00
if (section.PointerToRawData < headerStartAddress)
2024-11-15 22:22:22 -05:00
continue;
2025-10-01 20:06:05 -04:00
if (section.PointerToRawData < firstSectionAddress)
firstSectionAddress = section.PointerToRawData;
2024-11-15 22:22:22 -05:00
}
// Check if the header length is more than 0 before reading data
2024-06-05 22:22:22 -04:00
int headerLength = (int)(firstSectionAddress - headerStartAddress);
if (headerLength <= 0)
2025-09-02 23:51:02 -04:00
{
2025-11-14 09:48:00 -05:00
field = [];
return field;
2025-09-02 23:51:02 -04:00
}
2025-11-14 09:48:00 -05:00
field = ReadRangeFromSource((int)headerStartAddress, headerLength) ?? [];
return field;
}
}
2025-11-14 09:48:00 -05:00
} = null;
/// <summary>
/// Header padding strings, if they exist
/// </summary>
public List<string> HeaderPaddingStrings
{
get
{
2025-09-02 23:51:02 -04:00
lock (_headerPaddingStringsLock)
{
// If we already have cached data, just use that immediately
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 09:48:00 -05:00
return field;
// Get the header padding data, if possible
byte[] headerPaddingData = HeaderPaddingData;
if (headerPaddingData.Length == 0)
2025-09-02 23:51:02 -04:00
{
2025-11-14 09:48:00 -05:00
field = [];
return field;
2025-09-02 23:51:02 -04:00
}
// Otherwise, cache and return the strings
2025-11-14 09:48:00 -05:00
field = headerPaddingData.ReadStringsFrom(charLimit: 3) ?? [];
return field;
}
}
2025-11-14 09:48:00 -05:00
} = null;
2025-09-22 20:07:18 -04:00
/// <inheritdoc cref="Executable.ImportAddressTables"/>
2025-09-26 13:06:18 -04:00
public Dictionary<int, Data.Models.PortableExecutable.Import.AddressTableEntry[]?>? ImportAddressTables => Model.ImportAddressTables;
2025-09-22 20:07:18 -04:00
/// <inheritdoc cref="Executable.ImportDirectoryTable"/>
2025-09-26 13:06:18 -04:00
public Data.Models.PortableExecutable.Import.DirectoryTableEntry[]? ImportDirectoryTable => Model.ImportDirectoryTable;
2025-09-22 20:07:18 -04:00
/// <inheritdoc cref="Executable.HintNameTable"/>
2025-09-26 13:06:18 -04:00
public Data.Models.PortableExecutable.Import.HintNameTableEntry[]? ImportHintNameTable => Model.HintNameTable;
2025-09-22 20:07:18 -04:00
/// <inheritdoc cref="Executable.ImportLookupTables"/>
2025-09-26 13:06:18 -04:00
public Dictionary<int, Data.Models.PortableExecutable.Import.LookupTableEntry[]?>? ImportLookupTables => Model.ImportLookupTables;
2025-09-20 10:32:53 -04:00
/// <summary>
/// SecuROM Matroschka package wrapper, if it exists
/// </summary>
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
public SecuROMMatroschkaPackage? MatroschkaPackage
{
get
{
lock (_matroschkaPackageLock)
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 09:48:00 -05:00
return field;
2025-09-20 10:32:53 -04:00
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
// Check to see if creation has already been attempted
if (_matroschkaPackageFailed)
return null;
2025-09-20 10:32:53 -04:00
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
// Get the available source length, if possible
var dataLength = Length;
if (dataLength == -1)
{
_matroschkaPackageFailed = true;
return null;
}
// Find the matrosch or rcpacker section
2025-09-20 10:32:53 -04:00
SectionHeader? section = null;
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
foreach (var searchedSection in SectionTable)
{
string sectionName = Encoding.ASCII.GetString(searchedSection.Name).TrimEnd('\0');
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
if (sectionName != "matrosch" && sectionName != "rcpacker")
continue;
section = searchedSection;
break;
}
// Otherwise, it could not be found
2026-01-25 14:30:18 -05:00
if (section is null)
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
{
_matroschkaPackageFailed = true;
return null;
}
2025-09-20 10:32:53 -04:00
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
// Get the offset
long offset = section.VirtualAddress.ConvertVirtualAddress(SectionTable);
if (offset < 0 || offset >= Length)
{
_matroschkaPackageFailed = true;
return null;
2025-09-20 10:32:53 -04:00
}
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
// Read the section into a local array
var sectionLength = (int)section.SizeOfRawData;
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
var sectionData = ReadRangeFromSource(offset, sectionLength);
2025-09-20 10:32:53 -04:00
if (sectionData.Length == 0)
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
{
_matroschkaPackageFailed = true;
return null;
}
2025-09-20 10:32:53 -04:00
// Parse the package
2025-11-14 09:48:00 -05:00
field = SecuROMMatroschkaPackage.Create(sectionData, 0);
2026-01-25 14:30:18 -05:00
if (field?.Entries is null)
2025-09-20 10:32:53 -04:00
_matroschkaPackageFailed = true;
2025-11-14 09:48:00 -05:00
return field;
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
}
}
2025-11-14 09:48:00 -05:00
} = null;
2026-01-25 13:38:52 -05:00
/// <summary>
/// InstallShield Executable wrapper, if it exists
/// </summary>
public InstallShieldExecutable? ISEXE
{
get
{
lock (_installshieldExecutableLock)
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
return field;
// Check to see if creation has already been attempted
if (_installshieldExecutableFailed)
return null;
// Get the available source length, if possible
var dataLength = Length;
if (dataLength == -1)
{
_installshieldExecutableFailed = true;
return null;
}
// Check if there's a valid OverlayAddress
if (OverlayAddress < 0 || OverlayAddress > dataLength)
{
_installshieldExecutableFailed = true;
return null;
}
2026-01-25 13:38:52 -05:00
// Parse the package
lock (_dataSourceLock)
{
_dataSource.SeekIfPossible(OverlayAddress, SeekOrigin.Begin);
field = InstallShieldExecutable.Create(_dataSource);
}
if (field?.Entries.Length == 0)
{
_installshieldExecutableFailed = true;
return null;
}
return field;
}
}
} = null;
2025-07-31 14:00:55 -04:00
/// <inheritdoc cref="Executable.OptionalHeader"/>
2025-10-30 23:29:24 -04:00
public Data.Models.PortableExecutable.OptionalHeader OptionalHeader => Model.OptionalHeader;
/// <summary>
/// Address of the overlay, if it exists
/// </summary>
/// <see href="https://www.autoitscript.com/forum/topic/153277-pe-file-overlay-extraction/"/>
public long OverlayAddress
{
get
{
2025-09-02 23:51:02 -04:00
lock (_overlayAddressLock)
{
// Use the cached data if possible
2025-11-14 09:48:00 -05:00
if (field >= 0)
return field;
2025-08-20 07:54:52 -04:00
// Get the available source length, if possible
long dataLength = Length;
if (dataLength == -1)
{
2025-11-14 09:48:00 -05:00
field = -1;
return field;
}
// If we have certificate data, use that as the end
2026-01-25 14:32:49 -05:00
if (OptionalHeader.CertificateTable is not null)
{
long certificateTableAddress = OptionalHeader.CertificateTable.VirtualAddress;
2025-08-20 07:54:52 -04:00
if (certificateTableAddress != 0 && certificateTableAddress < dataLength)
dataLength = certificateTableAddress;
}
2026-03-14 11:23:29 -04:00
// Start from the first section with a valid raw data size and add all section sizes
2026-03-14 11:52:07 -04:00
// TODO: Handle cases where sections are overlapping
var firstSection = Array.Find(SectionTable, s => s.SizeOfRawData != 0);
long endOfSectionData = firstSection?.PointerToRawData ?? 0;
2025-10-01 20:06:05 -04:00
Array.ForEach(SectionTable, s => endOfSectionData += s.SizeOfRawData);
// If we didn't find the end of section data
if (endOfSectionData <= 0)
endOfSectionData = -1;
2025-09-02 23:51:02 -04:00
// If the section data is followed by the end of the data
if (endOfSectionData >= dataLength)
endOfSectionData = -1;
// Cache and return the position
2025-11-14 09:48:00 -05:00
field = endOfSectionData;
return field;
}
}
2025-11-14 09:48:00 -05:00
} = -1;
/// <summary>
/// Overlay data, if it exists
/// </summary>
2025-10-27 22:43:56 -04:00
/// <remarks>Caches up to 0x10000 bytes</remarks>
/// <see href="https://www.autoitscript.com/forum/topic/153277-pe-file-overlay-extraction/"/>
public byte[] OverlayData
{
get
{
2025-09-02 23:51:02 -04:00
lock (_overlayDataLock)
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 09:48:00 -05:00
return field;
2025-08-20 07:54:52 -04:00
// Get the available source length, if possible
long dataLength = Length;
if (dataLength == -1)
{
2025-11-14 09:48:00 -05:00
field = [];
return field;
}
2025-09-25 20:14:41 -04:00
// If we have certificate data, use that as the end
2026-01-25 14:32:49 -05:00
if (OptionalHeader.CertificateTable is not null)
2025-09-25 20:14:41 -04:00
{
long certificateTableAddress = OptionalHeader.CertificateTable.VirtualAddress;
2025-09-25 20:14:41 -04:00
if (certificateTableAddress != 0 && certificateTableAddress < dataLength)
dataLength = certificateTableAddress;
}
2025-09-25 13:50:51 -04:00
// Get the overlay address and size if possible
long endOfSectionData = OverlayAddress;
long overlaySize = OverlaySize;
// If we didn't find the address or size
if (endOfSectionData <= 0 || overlaySize <= 0)
{
2025-11-14 09:48:00 -05:00
field = [];
return field;
}
2025-09-25 13:50:51 -04:00
// If we're at the end of the file, cache an empty byte array
if (endOfSectionData >= dataLength)
{
2025-11-14 09:48:00 -05:00
field = [];
return field;
2025-09-25 13:50:51 -04:00
}
// Otherwise, cache and return the data
2025-09-25 20:14:41 -04:00
overlaySize = Math.Min(overlaySize, 0x10000);
2025-09-25 13:50:51 -04:00
2025-11-14 09:48:00 -05:00
field = ReadRangeFromSource(endOfSectionData, (int)overlaySize) ?? [];
return field;
2025-09-25 13:50:51 -04:00
}
}
2025-11-14 09:48:00 -05:00
} = null;
2025-09-25 13:50:51 -04:00
/// <summary>
/// Size of the overlay data, if it exists
/// </summary>
/// <see href="https://www.autoitscript.com/forum/topic/153277-pe-file-overlay-extraction/"/>
public long OverlaySize
{
get
{
lock (_overlaySizeLock)
{
// Use the cached data if possible
2025-11-14 09:48:00 -05:00
if (field >= 0)
return field;
2025-09-25 13:50:51 -04:00
// Get the available source length, if possible
long dataLength = Length;
if (dataLength == -1)
{
2025-11-14 09:48:00 -05:00
field = 0;
return field;
2025-09-25 13:50:51 -04:00
}
// If we have certificate data, use that as the end
2026-01-25 14:32:49 -05:00
if (OptionalHeader.CertificateTable is not null)
{
long certificateTableAddress = OptionalHeader.CertificateTable.VirtualAddress;
if (certificateTableAddress != 0 && certificateTableAddress < dataLength)
dataLength = certificateTableAddress;
}
2025-09-02 23:51:02 -04:00
// Get the overlay address if possible
long endOfSectionData = OverlayAddress;
// If we didn't find the end of section data
if (endOfSectionData <= 0)
{
2025-11-14 09:48:00 -05:00
field = 0;
return field;
}
// If we're at the end of the file, cache an empty byte array
2025-08-20 07:54:52 -04:00
if (endOfSectionData >= dataLength)
{
2025-11-14 09:48:00 -05:00
field = 0;
return field;
}
2025-09-25 13:50:51 -04:00
// Otherwise, cache and return the length
2025-11-14 09:48:00 -05:00
field = dataLength - endOfSectionData;
return field;
}
}
2025-11-14 09:48:00 -05:00
} = -1;
/// <summary>
/// Overlay strings, if they exist
/// </summary>
public List<string> OverlayStrings
{
get
{
2025-09-02 23:51:02 -04:00
lock (_overlayStringsLock)
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 09:48:00 -05:00
return field;
// Get the overlay data, if possible
var overlayData = OverlayData;
if (overlayData.Length == 0)
{
2025-11-14 09:48:00 -05:00
field = [];
return field;
}
// Otherwise, cache and return the strings
2025-11-14 09:48:00 -05:00
field = overlayData.ReadStringsFrom(charLimit: 3) ?? [];
return field;
}
}
2025-11-14 09:48:00 -05:00
} = null;
/// <inheritdoc cref="Executable.ResourceDirectoryTable"/>
2025-09-26 13:06:18 -04:00
public Data.Models.PortableExecutable.Resource.DirectoryTable? ResourceDirectoryTable => Model.ResourceDirectoryTable;
2025-08-01 07:58:54 -04:00
/// <summary>
/// Sanitized section names
/// </summary>
public string[] SectionNames
{
get
{
2025-09-02 23:51:02 -04:00
lock (_sectionNamesLock)
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 09:48:00 -05:00
return field;
// Otherwise, build and return the cached array
2025-11-14 09:48:00 -05:00
field = new string[SectionTable.Length];
for (int i = 0; i < field.Length; i++)
{
// TODO: Handle long section names with leading `/`
2025-10-01 20:06:05 -04:00
var section = SectionTable[i];
string sectionNameString = Encoding.UTF8.GetString(section.Name).TrimEnd('\0');
2025-11-14 09:48:00 -05:00
field[i] = sectionNameString;
}
2025-11-14 09:48:00 -05:00
return field;
}
}
2025-11-14 09:48:00 -05:00
} = null;
/// <inheritdoc cref="Executable.SectionTable"/>
2025-10-30 23:29:24 -04:00
public SectionHeader[] SectionTable => Model.SectionTable;
/// <summary>
/// Data after the section table, if it exists
/// </summary>
2025-09-11 12:32:44 -04:00
public byte[] SectionTableTrailerData
{
get
{
2025-09-11 12:32:44 -04:00
lock (_sectionTableTrailerDataLock)
{
// If we already have cached data, just use that immediately
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 09:48:00 -05:00
return field;
// Get the offset from the end of the section table
long endOfSectionTable = Stub.Header.NewExeHeaderAddr
2025-09-22 12:56:19 -04:00
+ 24 // Signature size + file header size
+ COFFFileHeader.SizeOfOptionalHeader
+ (COFFFileHeader.NumberOfSections * 40); // Size of a section header
2025-09-11 12:32:44 -04:00
// Assume the extra data aligns to 512-byte segments
int alignment = (int)(OptionalHeader?.FileAlignment ?? 0x200);
int trailerDataSize = alignment - (int)(endOfSectionTable % alignment);
2025-09-11 12:32:44 -04:00
// Cache and return the section table trailer data, even if null
2025-11-14 09:48:00 -05:00
field = ReadRangeFromSource(endOfSectionTable, trailerDataSize);
return field;
}
}
2025-11-14 09:48:00 -05:00
} = null;
/// <inheritdoc cref="Executable.Stub"/>
2025-10-30 23:29:24 -04:00
public Data.Models.MSDOS.Executable Stub => Model.Stub;
/// <summary>
/// Stub executable data, if it exists
/// </summary>
public byte[] StubExecutableData
{
get
{
2025-09-02 23:51:02 -04:00
lock (_stubExecutableDataLock)
{
// If we already have cached data, just use that immediately
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 09:48:00 -05:00
return field;
// Populate the raw stub executable data based on the source
int endOfStubHeader = 0x40;
int lengthOfStubExecutableData = (int)Stub.Header.NewExeHeaderAddr - endOfStubHeader;
2025-11-14 09:48:00 -05:00
field = ReadRangeFromSource(endOfStubHeader, lengthOfStubExecutableData);
// Cache and return the stub executable data, even if null
2025-11-14 09:48:00 -05:00
return field;
}
}
2025-11-14 09:48:00 -05:00
} = null;
/// <summary>
/// Dictionary of resource data
/// </summary>
2026-02-12 18:25:24 -05:00
public Dictionary<string, ResourceDataType?> ResourceData
{
get
{
2025-09-02 23:51:02 -04:00
lock (_resourceDataLock)
{
// Use the cached data if possible
if (_resourceData.Count != 0)
return _resourceData;
// If we have no resource table, just return
2026-01-25 14:30:18 -05:00
if (OptionalHeader.ResourceTable is null
2025-08-01 07:58:54 -04:00
|| OptionalHeader.ResourceTable.VirtualAddress == 0
2026-01-25 14:30:18 -05:00
|| ResourceDirectoryTable is null)
{
return _resourceData;
}
// Otherwise, build and return the cached dictionary
2025-08-01 07:58:54 -04:00
ParseResourceDirectoryTable(ResourceDirectoryTable, types: []);
return _resourceData;
}
}
}
2025-09-18 09:40:15 -04:00
/// <summary>
/// Wise section wrapper, if it exists
/// </summary>
public WiseSectionHeader? WiseSection
{
get
{
lock (_wiseSectionHeaderLock)
{
// If we already have cached data, just use that immediately
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 09:48:00 -05:00
return field;
2025-09-18 09:40:15 -04:00
// If the header will not be found due to missing section data
if (_wiseSectionHeaderMissing)
return null;
// Find the .WISE section
SectionHeader? wiseSection = null;
foreach (var section in SectionTable)
{
string sectionName = Encoding.ASCII.GetString(section.Name).TrimEnd('\0');
2025-09-18 09:40:15 -04:00
if (sectionName != ".WISE")
continue;
wiseSection = section;
break;
}
// If the section cannot be found
2026-01-25 14:30:18 -05:00
if (wiseSection is null)
2025-09-18 09:40:15 -04:00
{
_wiseSectionHeaderMissing = true;
return null;
}
// Get the physical offset of the section
long offset = wiseSection.VirtualAddress.ConvertVirtualAddress(SectionTable);
if (offset < 0 || offset >= Length)
{
_wiseSectionHeaderMissing = true;
return null;
}
// Read the section into a local array
int sectionLength = (int)wiseSection.VirtualSize;
2025-09-20 10:32:53 -04:00
byte[] sectionData = ReadRangeFromSource(offset, sectionLength);
if (sectionData.Length == 0)
{
_wiseSectionHeaderMissing = true;
return null;
}
2025-09-18 09:40:15 -04:00
// Parse the section header
2025-11-14 09:48:00 -05:00
field = WiseSectionHeader.Create(sectionData, 0);
2026-01-25 14:30:18 -05:00
if (field is null)
2025-09-18 09:40:15 -04:00
_wiseSectionHeaderMissing = true;
2025-11-14 09:48:00 -05:00
return field;
2025-09-18 09:40:15 -04:00
}
}
2025-11-14 09:48:00 -05:00
} = null;
2025-09-18 09:40:15 -04:00
#region Version Information
/// <summary>
/// "Build GUID"
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? BuildGuid
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("BuildGuid");
return field;
}
} = null;
/// <summary>
/// "Build signature"
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? BuildSignature
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("BuildSignature");
return field;
}
} = null;
/// <summary>
/// Additional information that should be displayed for diagnostic purposes.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? Comments
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("Comments");
return field;
}
} = null;
/// <summary>
/// Company that produced the file—for example, "Microsoft Corporation" or
/// "Standard Microsystems Corporation, Inc." This string is required.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? CompanyName
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("CompanyName");
return field;
}
} = null;
/// <summary>
/// "Debug version"
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? DebugVersion
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("DebugVersion");
return field;
}
} = null;
/// <summary>
/// File description to be presented to users. This string may be displayed in a
/// list box when the user is choosing files to install—for example, "Keyboard
/// Driver for AT-Style Keyboards". This string is required.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? FileDescription
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("FileDescription");
return field;
}
} = null;
/// <summary>
/// Version number of the file—for example, "3.10" or "5.00.RC2". This string
/// is required.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? FileVersion
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("FileVersion");
return field;
}
} = null;
/// <summary>
/// Internal name of the file, if one exists—for example, a module name if the
/// file is a dynamic-link library. If the file has no internal name, this
/// string should be the original filename, without extension. This string is required.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? InternalName
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("InternalName");
return field;
}
} = null;
/// <summary>
/// Copyright notices that apply to the file. This should include the full text of
/// all notices, legal symbols, copyright dates, and so on. This string is optional.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? LegalCopyright
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("LegalCopyright");
return field;
}
} = null;
/// <summary>
/// Trademarks and registered trademarks that apply to the file. This should include
/// the full text of all notices, legal symbols, trademark numbers, and so on. This
/// string is optional.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? LegalTrademarks
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("LegalTrademarks");
return field;
}
} = null;
/// <summary>
/// Original name of the file, not including a path. This information enables an
/// application to determine whether a file has been renamed by a user. The format of
/// the name depends on the file system for which the file was created. This string
/// is required.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? OriginalFilename
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("OriginalFilename");
return field;
}
} = null;
/// <summary>
/// Information about a private version of the file—for example, "Built by TESTER1 on
/// \TESTBED". This string should be present only if VS_FF_PRIVATEBUILD is specified in
/// the fileflags parameter of the root block.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? PrivateBuild
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("PrivateBuild");
return field;
}
} = null;
/// <summary>
/// "Product GUID"
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? ProductGuid
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("ProductGuid");
return field;
}
} = null;
/// <summary>
/// Name of the product with which the file is distributed. This string is required.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? ProductName
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("ProductName");
return field;
}
} = null;
/// <summary>
/// Version of the product with which the file is distributed—for example, "3.10" or
/// "5.00.RC2". This string is required.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? ProductVersion
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("ProductVersion");
return field;
}
} = null;
/// <summary>
/// Text that specifies how this version of the file differs from the standard
/// version—for example, "Private build for TESTER1 solving mouse problems on M250 and
/// M250E computers". This string should be present only if VS_FF_SPECIALBUILD is
/// specified in the fileflags parameter of the root block.
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? SpecialBuild
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("SpecialBuild") ?? GetVersionInfoString("Special Build");
return field;
}
} = null;
/// <summary>
/// "Trade name"
/// </summary/>
2025-11-14 14:06:43 -05:00
public string? TradeName
{
get
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (field is not null)
2025-11-14 14:06:43 -05:00
return field;
field = GetVersionInfoString("TradeName");
return field;
}
} = null;
/// <summary>
/// Get the internal version as reported by the resources
/// </summary>
/// <returns>Version string, null on error</returns>
/// <remarks>The internal version is either the file version, product version, or assembly version, in that order</remarks>
public string? GetInternalVersion()
{
2024-06-05 22:49:27 -04:00
string? version = FileVersion;
2023-11-21 20:59:20 -05:00
if (!string.IsNullOrEmpty(version))
2023-11-07 23:30:26 -05:00
return version!.Replace(", ", ".");
2024-06-05 22:49:27 -04:00
version = ProductVersion;
2023-11-21 20:59:20 -05:00
if (!string.IsNullOrEmpty(version))
2023-11-07 23:30:26 -05:00
return version!.Replace(", ", ".");
2024-06-05 22:49:27 -04:00
version = AssemblyVersion;
2023-11-21 20:59:20 -05:00
if (!string.IsNullOrEmpty(version))
return version;
return null;
}
#endregion
#region Manifest Information
/// <summary>
/// Description as derived from the assembly manifest
/// </summary>
public string? AssemblyDescription
{
get
{
var manifest = GetAssemblyManifest();
return manifest?
.Description?
.Value;
}
}
/// <summary>
/// Name as derived from the assembly manifest
/// </summary>
/// <remarks>
/// If there are multiple identities included in the manifest,
/// this will only retrieve the value from the first that doesn't
/// have a null or empty name.
/// </remarks>
public string? AssemblyName
{
get
{
var manifest = GetAssemblyManifest();
var identities = manifest?.AssemblyIdentities ?? [];
var nameIdentity = Array.Find(identities, ai => !string.IsNullOrEmpty(ai?.Name));
return nameIdentity?.Name;
}
}
/// <summary>
/// Version as derived from the assembly manifest
/// </summary>
/// <remarks>
/// If there are multiple identities included in the manifest,
/// this will only retrieve the value from the first that doesn't
/// have a null or empty version.
/// </remarks>
public string? AssemblyVersion
{
get
{
var manifest = GetAssemblyManifest();
2024-11-15 22:22:22 -05:00
var identities = manifest?.AssemblyIdentities ?? [];
var versionIdentity = Array.Find(identities, ai => !string.IsNullOrEmpty(ai?.Version));
return versionIdentity?.Version;
}
}
#endregion
#endregion
#region Instance Variables
2025-09-02 23:51:02 -04:00
/// <summary>
/// Lock object for <see cref="DebugData"/>
2025-09-02 23:51:02 -04:00
/// </summary>
private readonly object _debugDataLock = new();
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="EntryPointData"/>
2025-09-02 23:51:02 -04:00
/// </summary>
private readonly object _entryPointDataLock = new();
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="HeaderPaddingData"/>
2025-09-02 23:51:02 -04:00
/// </summary>
private readonly object _headerPaddingDataLock = new();
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="HeaderPaddingStrings"/>
/// </summary>
2025-09-02 23:51:02 -04:00
private readonly object _headerPaddingStringsLock = new();
2025-09-20 10:32:53 -04:00
/// <summary>
/// Lock object for <see cref="InstallShieldExecutable"/>
/// </summary>
private readonly object _installshieldExecutableLock = new();
/// <summary>
/// Cached attempt at creation for <see cref="InstallShieldExecutable"/>
/// </summary>
private bool _installshieldExecutableFailed = false;
2026-01-25 13:38:52 -05:00
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="MatroschkaPackage"/>
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
/// </summary>
private readonly object _matroschkaPackageLock = new();
2025-09-20 10:32:53 -04:00
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
/// <summary>
2025-11-14 09:48:00 -05:00
/// Cached attempt at creation for <see cref="MatroschkaPackage"/>
Add Matroschka processing. (#23) * Made changes * Temporary hack to not rely on models without significantly changing current code. Revert all of this with offset-based reading later. Also added unnecessary casting in wrapperfactory so serialization will build locally. Revert this, since I assume it somehow builds fine for GA/sabre/etc. * small fixes * Store matroschka section as PE extension * Move extractor out of deserializer, remove weird hack * Potential GA fix * More potential GA fixes. * I have no idea why GA hits that error but not me * Giving up on GA for now * fix locking issues * Fix GA building; thank you sabre * Minor improvements all around * Catch some braced single-line if statements * Use var more * Seperate deserializer into helper methods * Make file path reading much more sane * Removed MatroschkaHeaderType enum * Removed MatroschkaGapType enum, further simplify matgaphelper. * Remove MatroschkaHasUnknown enum, further simplify Unknown value reading. * Cache initial offset. * Remove TryCreate patterns. * Rename matroschka variable to package * Newline after object * Rename to obj * Remove a few unecessary TODOs * Seperate hexstring byte read to another line. * Fix documentation. * More private static * Changed data.position setting to seeking. NTS: check if this broke anything later * rename entries to obj * MatroschkaEntry to var * Newline * Alphabetical * More alphabetical. * section to package * Move private variables. * Move to extension properties. * Revert section finding. * Remove uneeded _dataSource lock and access. * combine lines and make var * Combine two null checks. * Packaged files, some past commits I think I forgot to push. * Missed two * newline * space * newline * Combine two lines * Removed comment * Return false explicitly * Change hashing string implementation * Fix order. * Use offset reading instead of filedataarray * Change file reading around a little preemptively for BOS --------- Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2025-09-20 10:00:54 -04:00
/// </summary>
private bool _matroschkaPackageFailed = false;
2025-09-20 10:32:53 -04:00
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="OverlayAddress"/>
2025-09-02 23:51:02 -04:00
/// </summary>
private readonly object _overlayAddressLock = new();
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="OverlayData"/>
2025-09-02 23:51:02 -04:00
/// </summary>
private readonly object _overlayDataLock = new();
2025-09-25 13:50:51 -04:00
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="OverlaySize"/>
2025-09-25 13:50:51 -04:00
/// </summary>
private readonly object _overlaySizeLock = new();
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="OverlayStrings"/>
/// </summary>
2025-09-02 23:51:02 -04:00
private readonly object _overlayStringsLock = new();
/// <summary>
/// Cached resource data
/// </summary>
2026-02-12 18:25:24 -05:00
private readonly Dictionary<string, ResourceDataType?> _resourceData = [];
2025-09-02 23:51:02 -04:00
/// <summary>
2025-10-27 22:43:56 -04:00
/// Lock object for <see cref="_resourceData"/>
2025-09-02 23:51:02 -04:00
/// </summary>
private readonly object _resourceDataLock = new();
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="SectionNames"/>
2025-09-02 23:51:02 -04:00
/// </summary>
private readonly object _sectionNamesLock = new();
/// <summary>
/// Cached raw section data
/// </summary>
2024-11-29 22:54:36 -05:00
private byte[][]? _sectionData = null;
/// <summary>
/// Cached found string data in sections
/// </summary>
private List<string>?[]? _sectionStringData = null;
2025-09-03 13:46:12 -04:00
/// <summary>
2025-10-27 22:43:56 -04:00
/// Lock object for <see cref="_sectionStringData"/>
2025-09-03 13:46:12 -04:00
/// </summary>
private readonly object _sectionStringDataLock = new();
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="SectionTableTrailerData"/>
/// </summary>
2025-09-11 12:32:44 -04:00
private readonly object _sectionTableTrailerDataLock = new();
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="StubExecutableData"/>
/// </summary>
2025-09-02 23:51:02 -04:00
private readonly object _stubExecutableDataLock = new();
/// <summary>
2025-09-02 23:51:02 -04:00
/// Cached raw table data
/// </summary>
private readonly byte[][] _tableData = new byte[16][];
/// <summary>
2025-09-02 23:51:02 -04:00
/// Cached found string data in tables
/// </summary>
private readonly List<string>?[] _tableStringData = new List<string>?[16];
2025-09-18 09:40:15 -04:00
/// <summary>
2025-11-14 09:48:00 -05:00
/// Lock object for <see cref="WiseSection"/>
2025-09-18 09:40:15 -04:00
/// </summary>
private readonly object _wiseSectionHeaderLock = new();
/// <summary>
2025-11-14 09:48:00 -05:00
/// Indicates if <see cref="WiseSection"/> cannot be found
2025-09-18 09:40:15 -04:00
/// </summary>
private bool _wiseSectionHeaderMissing = false;
2025-09-08 08:03:07 -04:00
#region Version Information
/// <summary>
/// Cached version info data
/// </summary>
2025-09-01 16:43:21 -04:00
private VersionInfo? _versionInfo = null;
2025-09-08 08:03:07 -04:00
#endregion
#region Manifest Information
/// <summary>
/// Cached assembly manifest data
/// </summary>
2025-09-01 16:43:21 -04:00
private AssemblyManifest? _assemblyManifest = null;
#endregion
2025-09-08 08:03:07 -04:00
#endregion
#region Constructors
/// <inheritdoc/>
public PortableExecutable(Executable model, byte[] data) : base(model, data) { }
/// <inheritdoc/>
public PortableExecutable(Executable model, byte[] data, int offset) : base(model, data, offset) { }
/// <inheritdoc/>
public PortableExecutable(Executable model, byte[] data, int offset, int length) : base(model, data, offset, length) { }
/// <inheritdoc/>
public PortableExecutable(Executable model, Stream data) : base(model, data) { }
/// <inheritdoc/>
public PortableExecutable(Executable model, Stream data, long offset) : base(model, data, offset) { }
/// <inheritdoc/>
public PortableExecutable(Executable model, Stream data, long offset, long length) : base(model, data, offset, length) { }
#endregion
#region Static Constructors
/// <summary>
/// Create a PE executable from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the executable</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A PE executable wrapper on success, null on failure</returns>
public static PortableExecutable? Create(byte[]? data, int offset)
{
// If the data is invalid
2026-01-25 14:30:18 -05:00
if (data is null || data.Length == 0)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
2023-11-21 21:10:43 -05:00
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a PE executable from a Stream
/// </summary>
/// <param name="data">Stream representing the executable</param>
/// <returns>A PE executable wrapper on success, null on failure</returns>
public static PortableExecutable? Create(Stream? data)
{
// If the data is invalid
2026-01-25 14:30:18 -05:00
if (data is null || !data.CanRead)
return null;
try
{
// Cache the current offset
long currentOffset = data.Position;
2026-03-18 16:37:59 -04:00
var model = new Serialization.Readers.PortableExecutable().Deserialize(data);
2026-01-25 14:30:18 -05:00
if (model is null)
return null;
return new PortableExecutable(model, data, currentOffset);
}
catch
{
return null;
}
}
#endregion
#region Data
// TODO: Cache all certificate objects
/// <summary>
/// Get the version info string associated with a key, if possible
/// </summary>
/// <param name="key">Case-insensitive key to find in the version info</param>
/// <returns>String representing the data, null on error</returns>
/// <remarks>
/// This code does not take into account the locale and will find and return
/// the first available value. This may not actually matter for version info,
/// but it is worth mentioning.
/// </remarks>
public string? GetVersionInfoString(string key)
{
// If we have an invalid key, we can't do anything
if (string.IsNullOrEmpty(key))
return null;
// Ensure the resource table has been parsed
2026-01-25 14:30:18 -05:00
if (ResourceData is null)
return null;
// If we don't have string version info in this executable
var stringTable = _versionInfo?.StringFileInfo?.Children;
2026-01-25 14:30:18 -05:00
if (stringTable is null || stringTable.Length == 0)
return null;
// Try to find a key that matches
2025-09-01 16:43:21 -04:00
StringData? match = null;
foreach (var st in stringTable)
{
2025-10-30 23:29:24 -04:00
if (st.Length == 0)
continue;
2024-12-30 20:47:12 -05:00
// Return the match if found
2025-07-31 11:52:39 -04:00
match = Array.Find(st.Children, sd => key.Equals(sd.Key, StringComparison.OrdinalIgnoreCase));
2026-01-25 14:32:49 -05:00
if (match is not null)
return match.Value?.TrimEnd('\0');
}
2024-11-20 20:01:28 -05:00
return null;
}
/// <summary>
/// Get the assembly manifest, if possible
/// </summary>
/// <returns>Assembly manifest object, null on error</returns>
2025-09-01 16:43:21 -04:00
private AssemblyManifest? GetAssemblyManifest()
{
// Use the cached data if possible
2026-01-25 14:32:49 -05:00
if (_assemblyManifest is not null)
return _assemblyManifest;
// Cache the resource data for easier reading
var resourceData = ResourceData;
if (resourceData.Count == 0)
return null;
// Return the now-cached assembly manifest
return _assemblyManifest;
}
#endregion
#region Debug Data
/// <summary>
/// Find CodeView debug data by path
/// </summary>
/// <param name="path">Partial path to check for</param>
2024-11-20 20:02:36 -05:00
/// <returns>List of matching debug data</returns>
public List<object?> FindCodeViewDebugTableByPath(string path)
{
// Cache the debug data for easier reading
var debugData = DebugData;
if (debugData.Count == 0)
2024-06-05 22:48:42 -04:00
return [];
2024-11-15 22:22:22 -05:00
var debugFound = new List<object?>();
foreach (var data in debugData.Values)
2024-11-15 22:22:22 -05:00
{
2026-01-25 14:30:18 -05:00
if (data is null)
2024-11-15 22:22:22 -05:00
continue;
2025-09-26 13:06:18 -04:00
if (data is Data.Models.PortableExecutable.DebugData.NB10ProgramDatabase n)
2024-11-15 22:22:22 -05:00
{
2026-01-25 14:30:18 -05:00
if (n.PdbFileName is null || !n.PdbFileName.Contains(path))
2024-11-15 22:22:22 -05:00
continue;
debugFound.Add(n);
}
2025-09-26 13:06:18 -04:00
else if (data is Data.Models.PortableExecutable.DebugData.RSDSProgramDatabase r)
2024-11-15 22:22:22 -05:00
{
2026-01-25 14:30:18 -05:00
if (r.PathAndFileName is null || !r.PathAndFileName.Contains(path))
2024-11-15 22:22:22 -05:00
continue;
2024-11-15 22:22:22 -05:00
debugFound.Add(r);
}
}
return debugFound;
}
/// <summary>
/// Find unparsed debug data by string value
/// </summary>
/// <param name="value">String value to check for</param>
2024-11-20 20:02:36 -05:00
/// <returns>List of matching debug data</returns>
public List<byte[]?> FindGenericDebugTableByValue(string value)
{
// Cache the debug data for easier reading
var debugData = DebugData;
if (debugData.Count == 0)
2024-06-05 22:48:42 -04:00
return [];
2024-11-15 22:22:22 -05:00
var table = new List<byte[]?>();
foreach (var data in debugData.Values)
2024-11-15 22:22:22 -05:00
{
2026-01-25 14:30:18 -05:00
if (data is null)
2024-11-15 22:22:22 -05:00
continue;
2026-01-25 14:30:18 -05:00
if (data is not byte[] b || b is null)
2024-11-15 22:22:22 -05:00
continue;
try
{
2024-11-15 22:22:22 -05:00
string? arrayAsASCII = Encoding.ASCII.GetString(b);
if (arrayAsASCII.Contains(value))
{
2024-11-15 22:22:22 -05:00
table.Add(b);
continue;
}
2024-11-15 22:22:22 -05:00
}
catch { }
2024-11-15 22:22:22 -05:00
try
{
string? arrayAsUTF8 = Encoding.UTF8.GetString(b);
if (arrayAsUTF8.Contains(value))
{
2024-11-15 22:22:22 -05:00
table.Add(b);
continue;
}
2024-11-15 22:22:22 -05:00
}
catch { }
2024-11-15 22:22:22 -05:00
try
{
string? arrayAsUnicode = Encoding.Unicode.GetString(b);
if (arrayAsUnicode.Contains(value))
{
2024-11-15 22:22:22 -05:00
table.Add(b);
continue;
}
2024-11-15 22:22:22 -05:00
}
catch { }
}
2024-11-15 22:22:22 -05:00
return table;
}
#endregion
#region Debug Parsing
/// <summary>
/// Parse the debug directory table information
/// </summary>
private Dictionary<int, object> ParseDebugTable()
{
// If there is no debug table
2026-01-25 14:30:18 -05:00
if (DebugDirectoryTable is null || DebugDirectoryTable.Length == 0)
return [];
// Create a new debug table
Dictionary<int, object> debugData = [];
// Loop through all debug table entries
for (int i = 0; i < DebugDirectoryTable.Length; i++)
{
var entry = DebugDirectoryTable[i];
uint address = entry.PointerToRawData;
uint size = entry.SizeOfData;
2024-06-20 11:23:28 -04:00
// Read the entry data until we have the end of the stream
byte[]? entryData;
try
{
entryData = ReadRangeFromSource((int)address, (int)size);
2025-09-20 09:49:42 -04:00
if (entryData.Length < 4)
2024-06-20 11:23:28 -04:00
continue;
}
catch (EndOfStreamException)
{
return debugData;
2024-06-20 11:23:28 -04:00
}
// If we have CodeView debug data, try to parse it
if (entry.DebugType == DebugType.IMAGE_DEBUG_TYPE_CODEVIEW)
{
// Read the signature
int offset = 0;
uint signature = entryData.ReadUInt32LittleEndian(ref offset);
// Reset the offset
offset = 0;
// NB10
if (signature == 0x3031424E)
{
2026-03-18 16:37:59 -04:00
var nb10ProgramDatabase = Serialization.Readers.PortableExecutable.ParseNB10ProgramDatabase(entryData, ref offset);
2026-01-25 14:32:49 -05:00
if (nb10ProgramDatabase is not null)
{
debugData[i] = nb10ProgramDatabase;
continue;
}
}
// RSDS
else if (signature == 0x53445352)
{
2026-03-18 16:37:59 -04:00
var rsdsProgramDatabase = Serialization.Readers.PortableExecutable.ParseRSDSProgramDatabase(entryData, ref offset);
2026-01-25 14:32:49 -05:00
if (rsdsProgramDatabase is not null)
{
debugData[i] = rsdsProgramDatabase;
continue;
}
}
}
else
{
debugData[i] = entryData;
}
}
return debugData;
}
#endregion
#region Resource Data
/// <summary>
/// Find dialog box resources by title
/// </summary>
/// <param name="title">Dialog box title to check for</param>
2024-11-20 20:02:36 -05:00
/// <returns>List of matching resources</returns>
2025-09-01 16:43:21 -04:00
public List<DialogBoxResource?> FindDialogByTitle(string title)
{
// Cache the resource data for easier reading
var resourceData = ResourceData;
if (resourceData.Count == 0)
2024-06-05 22:48:42 -04:00
return [];
2025-09-01 16:43:21 -04:00
var resources = new List<DialogBoxResource?>();
foreach (var resource in resourceData.Values)
2024-11-15 22:22:22 -05:00
{
2026-01-25 14:30:18 -05:00
if (resource is null)
2024-11-15 22:22:22 -05:00
continue;
2026-01-25 14:30:18 -05:00
if (resource is not DialogBoxResource dbr || dbr is null)
2024-11-15 22:22:22 -05:00
continue;
if (dbr.DialogTemplate?.TitleResource?.Contains(title) ?? false)
resources.Add(dbr);
else if (dbr.ExtendedDialogTemplate?.TitleResource?.Contains(title) ?? false)
resources.Add(dbr);
}
return resources;
}
/// <summary>
/// Find dialog box resources by contained item title
/// </summary>
/// <param name="title">Dialog box item title to check for</param>
2024-11-20 20:02:36 -05:00
/// <returns>List of matching resources</returns>
2025-09-01 16:43:21 -04:00
public List<DialogBoxResource?> FindDialogBoxByItemTitle(string title)
{
// Cache the resource data for easier reading
var resourceData = ResourceData;
if (resourceData.Count == 0)
2024-06-05 22:48:42 -04:00
return [];
2025-09-01 16:43:21 -04:00
var resources = new List<DialogBoxResource?>();
foreach (var resource in resourceData.Values)
2024-11-15 22:22:22 -05:00
{
2026-01-25 14:30:18 -05:00
if (resource is null)
2024-11-15 22:22:22 -05:00
continue;
2026-01-25 14:30:18 -05:00
if (resource is not DialogBoxResource dbr || dbr is null)
2024-11-15 22:22:22 -05:00
continue;
2026-01-25 14:32:49 -05:00
if (dbr.DialogItemTemplates is not null)
{
2026-01-25 14:32:49 -05:00
var templates = Array.FindAll(dbr.DialogItemTemplates, dit => dit?.TitleResource is not null);
2024-11-15 22:22:22 -05:00
if (Array.FindIndex(templates, dit => dit?.TitleResource?.Contains(title) == true) > -1)
resources.Add(dbr);
}
2026-01-25 14:32:49 -05:00
else if (dbr.ExtendedDialogItemTemplates is not null)
2024-11-15 22:22:22 -05:00
{
2026-01-25 14:32:49 -05:00
var templates = Array.FindAll(dbr.ExtendedDialogItemTemplates, edit => edit?.TitleResource is not null);
2024-11-15 22:22:22 -05:00
if (Array.FindIndex(templates, edit => edit?.TitleResource?.Contains(title) == true) > -1)
resources.Add(dbr);
}
}
2024-11-15 22:22:22 -05:00
return resources;
}
/// <summary>
/// Find string table resources by contained string entry
/// </summary>
/// <param name="entry">String entry to check for</param>
2024-11-20 20:02:36 -05:00
/// <returns>List of matching resources</returns>
2026-02-12 18:25:24 -05:00
public List<StringTableResource?> FindStringTableByEntry(string entry)
{
// Cache the resource data for easier reading
var resourceData = ResourceData;
if (resourceData.Count == 0)
2024-06-05 22:48:42 -04:00
return [];
2026-02-12 18:25:24 -05:00
var stringTables = new List<StringTableResource?>();
foreach (var resource in resourceData.Values)
2024-11-15 22:22:22 -05:00
{
2026-01-25 14:30:18 -05:00
if (resource is null)
2024-11-15 22:22:22 -05:00
continue;
2026-02-12 18:25:24 -05:00
if (resource is not StringTableResource st || st is null)
2024-11-15 22:22:22 -05:00
continue;
2026-02-12 18:25:24 -05:00
foreach (string? s in st.Data.Values)
2024-11-20 20:01:28 -05:00
{
2025-07-31 11:57:20 -04:00
#if NETFRAMEWORK || NETSTANDARD
2026-01-25 14:30:18 -05:00
if (s is null || !s.Contains(entry))
2025-07-31 11:57:20 -04:00
#else
2026-01-25 14:30:18 -05:00
if (s is null || !s.Contains(entry, StringComparison.OrdinalIgnoreCase))
2025-07-31 11:57:20 -04:00
#endif
2025-08-25 13:06:22 -04:00
continue;
2025-07-31 11:57:20 -04:00
2024-11-20 20:01:28 -05:00
stringTables.Add(st);
break;
}
2024-11-15 22:22:22 -05:00
}
return stringTables;
}
/// <summary>
/// Find unparsed resources by type name
/// </summary>
/// <param name="typeName">Type name to check for</param>
2024-11-20 20:02:36 -05:00
/// <returns>List of matching resources</returns>
public List<byte[]?> FindResourceByNamedType(string typeName)
{
// Cache the resource data for easier reading
var resourceData = ResourceData;
if (resourceData.Count == 0)
2024-06-05 22:48:42 -04:00
return [];
2024-11-15 22:22:22 -05:00
var resources = new List<byte[]?>();
foreach (var kvp in resourceData)
2024-11-15 22:22:22 -05:00
{
if (!kvp.Key.Contains(typeName))
continue;
2026-02-12 18:25:24 -05:00
if (kvp.Value is null || kvp.Value is not GenericResourceEntry b || b is null)
2024-11-15 22:22:22 -05:00
continue;
2026-02-12 18:25:24 -05:00
resources.Add(b.Data);
2024-11-15 22:22:22 -05:00
}
return resources;
}
/// <summary>
/// Find unparsed resources by string value
/// </summary>
/// <param name="value">String value to check for</param>
2024-11-20 20:02:36 -05:00
/// <returns>List of matching resources</returns>
public List<byte[]?> FindGenericResource(string value)
{
// Cache the resource data for easier reading
var resourceData = ResourceData;
if (resourceData.Count == 0)
2024-06-05 22:48:42 -04:00
return [];
2024-11-15 22:22:22 -05:00
var resources = new List<byte[]?>();
foreach (var resource in resourceData.Values)
2024-11-15 22:22:22 -05:00
{
2026-01-25 14:30:18 -05:00
if (resource is null)
2024-11-15 22:22:22 -05:00
continue;
2026-02-12 18:25:24 -05:00
if (resource is not GenericResourceEntry b || b is null)
2024-11-15 22:22:22 -05:00
continue;
try
{
2026-02-12 18:25:24 -05:00
string? arrayAsASCII = Encoding.ASCII.GetString(b!.Data);
2024-11-15 22:22:22 -05:00
if (arrayAsASCII.Contains(value))
{
2026-02-12 18:25:24 -05:00
resources.Add(b.Data);
2024-11-15 22:22:22 -05:00
continue;
}
2024-11-15 22:22:22 -05:00
}
catch { }
2024-11-15 22:22:22 -05:00
try
{
2026-02-12 18:25:24 -05:00
string? arrayAsUTF8 = Encoding.UTF8.GetString(b!.Data);
2024-11-15 22:22:22 -05:00
if (arrayAsUTF8.Contains(value))
{
2026-02-12 18:25:24 -05:00
resources.Add(b.Data);
2024-11-15 22:22:22 -05:00
continue;
}
2024-11-15 22:22:22 -05:00
}
catch { }
2024-11-15 22:22:22 -05:00
try
{
2026-02-12 18:25:24 -05:00
string? arrayAsUnicode = Encoding.Unicode.GetString(b!.Data);
2024-11-15 22:22:22 -05:00
if (arrayAsUnicode.Contains(value))
{
2026-02-12 18:25:24 -05:00
resources.Add(b.Data);
2024-11-15 22:22:22 -05:00
continue;
}
2024-11-15 22:22:22 -05:00
}
catch { }
}
2024-11-15 22:22:22 -05:00
return resources;
}
2025-09-01 18:38:43 -04:00
/// <summary>
/// Find the location of a Wise overlay header, if it exists
/// </summary>
/// <returns>Offset to the overlay header on success, -1 otherwise</returns>
public long FindWiseOverlayHeader()
2025-09-01 18:38:43 -04:00
{
// Get the overlay offset
long overlayOffset = OverlayAddress;
lock (_dataSourceLock)
2025-09-01 18:38:43 -04:00
{
// Attempt to get the overlay header
if (overlayOffset >= 0 && overlayOffset < Length)
2025-09-01 18:38:43 -04:00
{
2025-10-27 22:43:56 -04:00
_dataSource.SeekIfPossible(overlayOffset, SeekOrigin.Begin);
var header = WiseOverlayHeader.Create(_dataSource);
2026-01-25 14:32:49 -05:00
if (header is not null)
return overlayOffset;
}
2025-09-01 18:38:43 -04:00
// Check section data
2025-10-30 23:29:24 -04:00
foreach (var section in SectionTable)
{
string sectionName = Encoding.ASCII.GetString(section.Name).TrimEnd('\0');
long sectionOffset = section.VirtualAddress.ConvertVirtualAddress(SectionTable);
2025-10-27 22:43:56 -04:00
_dataSource.SeekIfPossible(sectionOffset, SeekOrigin.Begin);
2025-09-01 18:38:43 -04:00
var header = WiseOverlayHeader.Create(_dataSource);
2026-01-25 14:32:49 -05:00
if (header is not null)
return sectionOffset;
// Check after the resource table
if (sectionName == ".rsrc")
{
// Data immediately following
long afterResourceOffset = sectionOffset + section.SizeOfRawData;
2025-10-27 22:43:56 -04:00
_dataSource.SeekIfPossible(afterResourceOffset, SeekOrigin.Begin);
header = WiseOverlayHeader.Create(_dataSource);
2026-01-25 14:32:49 -05:00
if (header is not null)
return afterResourceOffset;
// Data following padding data
2025-10-27 22:43:56 -04:00
_dataSource.SeekIfPossible(afterResourceOffset, SeekOrigin.Begin);
_ = _dataSource.ReadNullTerminatedAnsiString();
afterResourceOffset = _dataSource.Position;
header = WiseOverlayHeader.Create(_dataSource);
2026-01-25 14:32:49 -05:00
if (header is not null)
return afterResourceOffset;
}
2025-09-01 18:38:43 -04:00
}
}
// If there are no resources
2026-01-25 14:30:18 -05:00
if (OptionalHeader.ResourceTable is null)
return -1;
// Cache the resource data for easier reading
var resourceData = ResourceData;
if (resourceData.Count == 0)
2025-09-01 18:38:43 -04:00
return -1;
// Get the resources that have an executable signature
bool exeResources = false;
foreach (var kvp in resourceData)
2025-09-01 18:38:43 -04:00
{
2026-02-12 18:25:24 -05:00
if (kvp.Value is null || kvp.Value is not GenericResourceEntry ba)
2025-09-01 18:38:43 -04:00
continue;
2026-02-12 18:25:24 -05:00
if (!ba.Data.StartsWith(Data.Models.MSDOS.Constants.SignatureBytes))
2025-09-01 18:38:43 -04:00
continue;
exeResources = true;
break;
}
// If there are no executable resources
if (!exeResources)
return -1;
// Get the raw resource table offset
long resourceTableOffset = OptionalHeader.ResourceTable.VirtualAddress.ConvertVirtualAddress(SectionTable);
if (resourceTableOffset <= 0)
return -1;
lock (_dataSourceLock)
2025-09-01 18:38:43 -04:00
{
// Search the resource table data for the offset
long resourceOffset = -1;
2025-10-27 22:43:56 -04:00
_dataSource.SeekIfPossible(resourceTableOffset, SeekOrigin.Begin);
while (_dataSource.Position < resourceTableOffset + OptionalHeader.ResourceTable.Size && _dataSource.Position < _dataSource.Length)
2025-09-01 18:38:43 -04:00
{
2025-10-31 09:36:28 -04:00
ushort possibleSignature = _dataSource.ReadUInt16LittleEndian();
2025-09-26 13:06:18 -04:00
if (possibleSignature == Data.Models.MSDOS.Constants.SignatureUInt16)
{
resourceOffset = _dataSource.Position - 2;
break;
}
2025-09-01 18:38:43 -04:00
2025-10-27 22:43:56 -04:00
_dataSource.SeekIfPossible(-1, SeekOrigin.Current);
}
2025-09-01 18:38:43 -04:00
// If there was no valid offset, somehow
if (resourceOffset == -1)
return -1;
2025-09-01 18:38:43 -04:00
// Parse the executable and recurse
2025-10-27 22:43:56 -04:00
_dataSource.SeekIfPossible(resourceOffset, SeekOrigin.Begin);
var resourceExe = WrapperFactory.CreateExecutableWrapper(_dataSource);
if (resourceExe is not PortableExecutable resourcePex)
return -1;
2025-09-01 18:38:43 -04:00
return resourcePex.FindWiseOverlayHeader();
}
2025-09-01 18:38:43 -04:00
}
#endregion
#region Resource Parsing
/// <summary>
/// Parse the resource directory table information
/// </summary>
2025-09-26 13:06:18 -04:00
private void ParseResourceDirectoryTable(Data.Models.PortableExecutable.Resource.DirectoryTable table, List<object> types)
{
for (int i = 0; i < table.Entries.Length; i++)
{
var entry = table.Entries[i];
2026-02-12 12:46:49 -05:00
var newTypes = new List<object>(types);
2026-01-25 14:32:49 -05:00
if (entry.Name?.UnicodeString is not null)
2024-05-08 12:09:11 -04:00
newTypes.Add(Encoding.Unicode.GetString(entry.Name.UnicodeString));
else
newTypes.Add(entry.IntegerID);
ParseResourceDirectoryEntry(entry, newTypes);
}
}
/// <summary>
/// Parse the name resource directory entry information
/// </summary>
2025-09-26 13:06:18 -04:00
private void ParseResourceDirectoryEntry(Data.Models.PortableExecutable.Resource.DirectoryEntry entry, List<object> types)
{
2026-01-25 14:32:49 -05:00
if (entry.DataEntry is not null)
ParseResourceDataEntry(entry.DataEntry, types);
2026-01-25 14:32:49 -05:00
else if (entry.Subdirectory is not null)
ParseResourceDirectoryTable(entry.Subdirectory, types);
}
/// <summary>
/// Parse the resource data entry information
/// </summary>
/// <remarks>
/// When caching the version information and assembly manifest, this code assumes that there is only one of each
/// of those resources in the entire exectuable. This means that only the last found version or manifest will
/// ever be cached.
/// </remarks>
2025-09-26 13:06:18 -04:00
private void ParseResourceDataEntry(Data.Models.PortableExecutable.Resource.DataEntry entry, List<object> types)
{
// Create the key and value objects
2026-02-12 12:46:49 -05:00
string key = string.Join(", ", Array.ConvertAll([.. types], t => t.ToString()));
2024-11-12 16:18:56 -05:00
2026-03-18 16:37:59 -04:00
ResourceDataType? value = Serialization.Readers.PortableExecutable.ParseGenericResourceEntry(entry.Data);
// If we have a known resource type
2026-02-12 12:46:49 -05:00
if (types.Count > 0 && types[0] is uint resourceType)
{
try
{
switch ((ResourceType)resourceType)
{
case ResourceType.RT_CURSOR:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_BITMAP:
2026-01-25 16:15:05 -05:00
case ResourceType.RT_NEWBITMAP:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_ICON:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_MENU:
2026-01-25 16:15:05 -05:00
case ResourceType.RT_NEWMENU:
2026-03-18 16:37:59 -04:00
value = Serialization.Readers.PortableExecutable.ParseMenuResource(entry.Data);
break;
case ResourceType.RT_DIALOG:
2026-01-25 16:15:05 -05:00
case ResourceType.RT_NEWDIALOG:
2026-03-18 16:37:59 -04:00
value = Serialization.Readers.PortableExecutable.ParseDialogBoxResource(entry.Data);
break;
case ResourceType.RT_STRING:
2026-03-18 16:37:59 -04:00
value = Serialization.Readers.PortableExecutable.ParseStringTableResource(entry.Data);
break;
case ResourceType.RT_FONTDIR:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_FONT:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_ACCELERATOR:
2026-03-18 16:37:59 -04:00
value = Serialization.Readers.PortableExecutable.ParseAcceleratorTable(entry.Data);
break;
case ResourceType.RT_RCDATA:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_MESSAGETABLE:
2026-03-18 16:37:59 -04:00
value = Serialization.Readers.PortableExecutable.ParseMessageResourceData(entry.Data);
break;
case ResourceType.RT_GROUP_CURSOR:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_GROUP_ICON:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_VERSION:
2026-03-18 16:37:59 -04:00
_versionInfo = Serialization.Readers.PortableExecutable.ParseVersionInfo(entry.Data);
value = _versionInfo;
break;
case ResourceType.RT_DLGINCLUDE:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_PLUGPLAY:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_VXD:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_ANICURSOR:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_ANIICON:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_HTML:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
case ResourceType.RT_MANIFEST:
2026-03-18 16:37:59 -04:00
_assemblyManifest = Serialization.Readers.PortableExecutable.ParseAssemblyManifest(entry.Data);
2024-12-30 23:17:52 -05:00
value = _assemblyManifest;
break;
2026-01-25 16:15:05 -05:00
// Bitflag, ignore
case ResourceType.RT_NEWRESOURCE:
break;
// Error state, ignore
case ResourceType.RT_ERROR:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
2026-01-25 16:15:05 -05:00
break;
default:
2026-02-12 18:25:24 -05:00
// TODO: Implement specific parsing
break;
}
}
catch
{
// Fall back on byte array data for malformed items
2026-03-18 16:37:59 -04:00
value = Serialization.Readers.PortableExecutable.ParseGenericResourceEntry(entry.Data);
}
}
// If we have a custom resource type
2026-02-12 12:46:49 -05:00
else if (types.Count > 0 && types[0] is string)
{
2026-03-18 16:37:59 -04:00
value = Serialization.Readers.PortableExecutable.ParseGenericResourceEntry(entry.Data);
}
// Add the key and value to the cache
_resourceData[key] = value;
}
#endregion
#region Sections
/// <summary>
/// Determine if a section is contained within the section table
/// </summary>
/// <param name="sectionName">Name of the section to check for</param>
/// <param name="exact">True to enable exact matching of names, false for starts-with</param>
/// <returns>True if the section is in the executable, false otherwise</returns>
public bool ContainsSection(string? sectionName, bool exact = false)
{
// If no section name is provided
2026-01-25 14:30:18 -05:00
if (sectionName is null)
return false;
// Get all section names first
if (SectionNames.Length == 0)
return false;
// If we're checking exactly, return only exact matches
if (exact)
2024-11-15 22:22:22 -05:00
return Array.FindIndex(SectionNames, n => n.Equals(sectionName)) > -1;
// Otherwise, check if section name starts with the value
else
2024-11-15 22:22:22 -05:00
return Array.FindIndex(SectionNames, n => n.StartsWith(sectionName)) > -1;
}
/// <summary>
/// Get the section index corresponding to the entry point, if possible
/// </summary>
/// <returns>Section index on success, null on error</returns>
public int FindEntryPointSectionIndex()
{
// If we don't have an entry point
if (OptionalHeader.AddressOfEntryPoint.ConvertVirtualAddress(SectionTable) == 0)
return -1;
// Otherwise, find the section it exists within
return OptionalHeader.AddressOfEntryPoint.ContainingSectionIndex(SectionTable);
}
/// <summary>
/// Get the first section based on name, if possible
/// </summary>
/// <param name="name">Name of the section to check for</param>
/// <param name="exact">True to enable exact matching of names, false for starts-with</param>
/// <returns>Section data on success, null on error</returns>
public SectionHeader? GetFirstSection(string? name, bool exact = false)
{
// If we have no sections
2025-10-30 23:29:24 -04:00
if (SectionNames.Length == 0 || SectionTable.Length == 0)
return null;
// If the section doesn't exist
if (!ContainsSection(name, exact))
return null;
// Get the first index of the section
int index = Array.IndexOf(SectionNames, name);
if (index == -1)
return null;
// Return the section
return SectionTable[index];
}
/// <summary>
/// Get the last section based on name, if possible
/// </summary>
/// <param name="name">Name of the section to check for</param>
/// <param name="exact">True to enable exact matching of names, false for starts-with</param>
/// <returns>Section data on success, null on error</returns>
public SectionHeader? GetLastSection(string? name, bool exact = false)
{
// If we have no sections
2025-10-30 23:29:24 -04:00
if (SectionNames.Length == 0 || SectionTable.Length == 0)
return null;
// If the section doesn't exist
if (!ContainsSection(name, exact))
return null;
// Get the last index of the section
int index = Array.LastIndexOf(SectionNames, name);
if (index == -1)
return null;
// Return the section
return SectionTable[index];
}
/// <summary>
/// Get the section based on index, if possible
/// </summary>
/// <param name="index">Index of the section to check for</param>
/// <returns>Section data on success, null on error</returns>
public SectionHeader? GetSection(int index)
{
// If we have no sections
2025-10-30 23:29:24 -04:00
if (SectionTable.Length == 0)
return null;
// If the section doesn't exist
if (index < 0 || index >= SectionTable.Length)
return null;
// Return the section
return SectionTable[index];
}
/// <summary>
/// Get the first section data based on name, if possible
/// </summary>
/// <param name="name">Name of the section to check for</param>
/// <param name="exact">True to enable exact matching of names, false for starts-with</param>
/// <returns>Section data on success, null on error</returns>
public byte[]? GetFirstSectionData(string? name, bool exact = false)
{
// If we have no sections
2025-10-30 23:29:24 -04:00
if (SectionNames.Length == 0 || SectionTable.Length == 0)
return null;
// If the section doesn't exist
if (!ContainsSection(name, exact))
return null;
// Get the first index of the section
int index = Array.IndexOf(SectionNames, name);
return GetSectionData(index);
}
/// <summary>
/// Get the last section data based on name, if possible
/// </summary>
/// <param name="name">Name of the section to check for</param>
/// <param name="exact">True to enable exact matching of names, false for starts-with</param>
/// <returns>Section data on success, null on error</returns>
public byte[]? GetLastSectionData(string? name, bool exact = false)
{
// If we have no sections
2025-10-30 23:29:24 -04:00
if (SectionNames.Length == 0 || SectionTable.Length == 0)
return null;
// If the section doesn't exist
if (!ContainsSection(name, exact))
return null;
// Get the last index of the section
int index = Array.LastIndexOf(SectionNames, name);
return GetSectionData(index);
}
/// <summary>
/// Get the section data based on index, if possible
/// </summary>
/// <param name="index">Index of the section to check for</param>
/// <returns>Section data on success, null on error</returns>
public byte[]? GetSectionData(int index)
{
// If we have no sections
2025-10-30 23:29:24 -04:00
if (SectionNames.Length == 0 || SectionTable.Length == 0)
return null;
// If the section doesn't exist
if (index < 0 || index >= SectionTable.Length)
return null;
// Get the section data from the table
var section = SectionTable[index];
uint address = section.VirtualAddress.ConvertVirtualAddress(SectionTable);
if (address == 0)
return null;
// Set the section size
uint size = section.SizeOfRawData;
// Create the section data array if we have to
_sectionData ??= new byte[SectionNames.Length][];
// If we already have cached data, just use that immediately
2026-01-25 14:32:49 -05:00
if (_sectionData[index] is not null && _sectionData[index].Length > 0)
return _sectionData[index];
// Populate the raw section data based on the source
2025-09-20 09:49:42 -04:00
var sectionData = ReadRangeFromSource((int)address, (int)size);
2025-09-20 09:49:42 -04:00
// Cache and return the section data
_sectionData[index] = sectionData;
return sectionData;
}
/// <summary>
/// Get the first section strings based on name, if possible
/// </summary>
/// <param name="name">Name of the section to check for</param>
/// <param name="exact">True to enable exact matching of names, false for starts-with</param>
/// <returns>Section strings on success, null on error</returns>
public List<string>? GetFirstSectionStrings(string? name, bool exact = false)
{
// If we have no sections
2025-10-30 23:29:24 -04:00
if (SectionNames.Length == 0 || SectionTable.Length == 0)
return null;
// If the section doesn't exist
if (!ContainsSection(name, exact))
return null;
// Get the first index of the section
int index = Array.IndexOf(SectionNames, name);
return GetSectionStrings(index);
}
/// <summary>
/// Get the last section strings based on name, if possible
/// </summary>
/// <param name="name">Name of the section to check for</param>
/// <param name="exact">True to enable exact matching of names, false for starts-with</param>
/// <returns>Section strings on success, null on error</returns>
public List<string>? GetLastSectionStrings(string? name, bool exact = false)
{
// If we have no sections
2025-10-30 23:29:24 -04:00
if (SectionNames.Length == 0 || SectionTable.Length == 0)
return null;
// If the section doesn't exist
if (!ContainsSection(name, exact))
return null;
// Get the last index of the section
int index = Array.LastIndexOf(SectionNames, name);
return GetSectionStrings(index);
}
/// <summary>
/// Get the section strings based on index, if possible
/// </summary>
/// <param name="index">Index of the section to check for</param>
/// <returns>Section strings on success, null on error</returns>
public List<string>? GetSectionStrings(int index)
{
2025-09-20 22:32:54 -04:00
// If we have no sections
2025-10-30 23:29:24 -04:00
if (SectionNames.Length == 0 || SectionTable.Length == 0)
2025-09-20 22:32:54 -04:00
return null;
// If the section doesn't exist
if (index < 0 || index >= SectionTable.Length)
return null;
2025-09-03 13:46:12 -04:00
lock (_sectionStringDataLock)
{
// Create the section string array if we have to
_sectionStringData ??= new List<string>?[SectionNames.Length];
// If we already have cached data, just use that immediately
2026-01-25 14:32:49 -05:00
if (_sectionStringData[index] is not null)
return _sectionStringData[index];
// Get the section data, if possible
byte[]? sectionData = GetSectionData(index);
2026-01-25 14:30:18 -05:00
if (sectionData is null || sectionData.Length == 0)
{
_sectionStringData[index] = [];
return _sectionStringData[index];
}
// Otherwise, cache and return the strings
_sectionStringData[index] = sectionData.ReadStringsFrom(charLimit: 3) ?? [];
return _sectionStringData[index];
}
}
#endregion
#region Tables
/// <summary>
/// Get the table based on index, if possible
/// </summary>
/// <param name="index">Index of the table to check for</param>
/// <returns>Table on success, null on error</returns>
public DataDirectory? GetTable(int index)
{
// If the table doesn't exist
2025-10-30 23:29:24 -04:00
if (index < 0 || index > 16)
return null;
return index switch
{
1 => OptionalHeader.ExportTable,
2 => OptionalHeader.ImportTable,
3 => OptionalHeader.ResourceTable,
4 => OptionalHeader.ExceptionTable,
5 => OptionalHeader.CertificateTable,
6 => OptionalHeader.BaseRelocationTable,
7 => OptionalHeader.Debug,
8 => null, // Architecture Table
9 => OptionalHeader.GlobalPtr,
10 => OptionalHeader.ThreadLocalStorageTable,
11 => OptionalHeader.LoadConfigTable,
12 => OptionalHeader.BoundImport,
13 => OptionalHeader.ImportAddressTable,
14 => OptionalHeader.DelayImportDescriptor,
15 => OptionalHeader.CLRRuntimeHeader,
16 => null, // Reserved
// Should never be possible
_ => null,
};
}
/// <summary>
/// Get the table data based on index, if possible
/// </summary>
/// <param name="index">Index of the table to check for</param>
/// <returns>Table data on success, null on error</returns>
public byte[]? GetTableData(int index)
{
// If the table doesn't exist
2025-10-30 23:29:24 -04:00
if (index < 0 || index > 16)
return null;
// If we already have cached data, just use that immediately
2026-01-25 14:32:49 -05:00
if (_tableData[index] is not null && _tableData[index].Length > 0)
return _tableData[index];
// Get the table from the optional header
var table = GetTable(index);
// Get the virtual address and size from the entries
uint virtualAddress = table?.VirtualAddress ?? 0;
uint size = table?.Size ?? 0;
// Get the physical address from the virtual one
uint address = virtualAddress.ConvertVirtualAddress(SectionTable);
if (address == 0 || size == 0)
return null;
// Populate the raw table data based on the source
2025-09-20 09:49:42 -04:00
var tableData = ReadRangeFromSource((int)address, (int)size);
2025-09-20 09:49:42 -04:00
// Cache and return the table data
_tableData[index] = tableData;
return tableData;
}
/// <summary>
/// Get the table strings based on index, if possible
/// </summary>
/// <param name="index">Index of the table to check for</param>
/// <returns>Table strings on success, null on error</returns>
public List<string>? GetTableStrings(int index)
{
// If the table doesn't exist
if (index < 0 || index > 16)
return null;
// If we already have cached data, just use that immediately
2026-01-25 14:32:49 -05:00
if (_tableStringData[index] is not null)
return _tableStringData[index];
// Get the table data, if possible
byte[]? tableData = GetTableData(index);
2026-01-25 14:30:18 -05:00
if (tableData is null || tableData.Length == 0)
{
_tableStringData[index] = [];
return _tableStringData[index];
}
// Otherwise, cache and return the strings
_tableStringData[index] = tableData.ReadStringsFrom(charLimit: 5) ?? [];
return _tableStringData[index];
}
#endregion
}
}