Compare commits

...

11 Commits
1.7.3 ... 1.7.5

Author SHA1 Message Date
Matt Nadareski
2776928946 Bump version 2024-11-15 22:38:35 -05:00
Matt Nadareski
8cc87c6540 Recombine WrapperBase files 2024-11-15 22:26:45 -05:00
Matt Nadareski
3c212022aa Use safe enumeration 2024-11-15 22:25:28 -05:00
Matt Nadareski
511c4d09e5 Update ASN1 to 1.4.1 and IO to 1.5.1 2024-11-15 22:22:22 -05:00
Matt Nadareski
d7eba27dc5 Framework only matters for executable 2024-11-15 21:10:27 -05:00
Matt Nadareski
09370618ca Reorder some methods 2024-11-14 20:51:25 -05:00
Matt Nadareski
2197167088 Add remaining easy sizes per partition 2024-11-14 20:48:42 -05:00
Matt Nadareski
b527635fe7 Add remaining easy offsets per partition 2024-11-14 20:46:56 -05:00
Matt Nadareski
695309bc32 Bump version 2024-11-14 13:32:19 -05:00
Matt Nadareski
97b2f68ec7 Use offsets instead of guessing... 2024-11-14 13:27:02 -05:00
Matt Nadareski
593044dbf3 Fix code binary check in N3DS 2024-11-14 12:49:16 -05:00
11 changed files with 742 additions and 501 deletions

View File

@@ -9,7 +9,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.7.3</Version>
<Version>1.7.4</Version>
</PropertyGroup>
<!-- Support All Frameworks -->

View File

@@ -48,11 +48,7 @@ namespace InfoPrint
}
else if (Directory.Exists(path))
{
#if NET20 || NET35
foreach (string file in Directory.GetFiles(path, "*", SearchOption.AllDirectories))
#else
foreach (string file in Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories))
#endif
foreach (string file in IOExtensions.SafeEnumerateFiles(path, "*", SearchOption.AllDirectories))
{
PrintFileInfo(file, json, debug);
}

View File

@@ -78,8 +78,14 @@ namespace SabreTools.Serialization.Deserializers
// Iterate and build the partitions
for (int i = 0; i < 8; i++)
{
// Cache the offset of the partition
initialOffset = data.Position;
// Find the offset to the partition
long partitionOffset = cart.Header.PartitionsTable?[i]?.Offset ?? 0;
partitionOffset *= mediaUnitSize;
if (partitionOffset == 0)
continue;
// Seek to the start of the partition
data.Seek(partitionOffset, SeekOrigin.Begin);
// Handle the normal header
var partition = ParseNCCHHeader(data);
@@ -101,7 +107,7 @@ namespace SabreTools.Serialization.Deserializers
if (partition.ExeFSSizeInMediaUnits > 0)
{
long offset = partition.ExeFSOffsetInMediaUnits * mediaUnitSize;
data.Seek(initialOffset + offset, SeekOrigin.Begin);
data.Seek(partitionOffset + offset, SeekOrigin.Begin);
var exeFsHeader = ParseExeFSHeader(data);
if (exeFsHeader == null)
@@ -114,7 +120,7 @@ namespace SabreTools.Serialization.Deserializers
if (partition.RomFSSizeInMediaUnits > 0)
{
long offset = partition.RomFSOffsetInMediaUnits * mediaUnitSize;
data.Seek(initialOffset + offset, SeekOrigin.Begin);
data.Seek(partitionOffset + offset, SeekOrigin.Begin);
var romFsHeader = ParseRomFSHeader(data);
if (romFsHeader == null)
@@ -124,10 +130,6 @@ namespace SabreTools.Serialization.Deserializers
cart.RomFSHeaders[i] = romFsHeader;
}
// Skip past other data
long partitionSize = partition.ContentSizeInMediaUnits * mediaUnitSize;
data.Seek(initialOffset + partitionSize, SeekOrigin.Begin);
}
#endregion

View File

@@ -1,7 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
#if NET35_OR_GREATER || NETCOREAPP
using System.Linq;
#endif
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.PortableExecutable;
@@ -1052,28 +1054,62 @@ namespace SabreTools.Serialization.Deserializers
// If we have import lookup tables
if (importTable.ImportLookupTables != null && importLookupTables.Count > 0)
{
#if NET20
var addresses = new List<int>();
foreach (var kvp in importTable.ImportLookupTables)
{
if (kvp.Value == null)
continue;
var vaddrs = Array.ConvertAll(kvp.Value,
ilte => ilte == null ? 0 : (int)ilte.HintNameTableRVA.ConvertVirtualAddress(sections));
addresses.AddRange(vaddrs);
}
#else
var addresses = importTable.ImportLookupTables
.SelectMany(kvp => kvp.Value ?? [])
.Where(ilte => ilte != null)
.Select(ilte => (int)ilte!.HintNameTableRVA.ConvertVirtualAddress(sections));
#endif
hintNameTableEntryAddresses.AddRange(addresses);
}
// If we have import address tables
if (importTable.ImportAddressTables != null && importTable.ImportAddressTables.Count > 0)
{
#if NET20
var addresses = new List<int>();
foreach (var kvp in importTable.ImportAddressTables)
{
if (kvp.Value == null)
continue;
var vaddrs = Array.ConvertAll(kvp.Value,
iate => iate == null ? 0 : (int)iate.HintNameTableRVA.ConvertVirtualAddress(sections));
addresses.AddRange(vaddrs);
}
#else
var addresses = importTable.ImportAddressTables
.SelectMany(kvp => kvp.Value ?? [])
.Where(iate => iate != null)
.Select(iate => (int)iate!.HintNameTableRVA.ConvertVirtualAddress(sections));
#endif
hintNameTableEntryAddresses.AddRange(addresses);
}
// Sanitize the addresses
hintNameTableEntryAddresses = hintNameTableEntryAddresses.Where(addr => addr != 0)
.Distinct()
.OrderBy(a => a)
.ToList();
hintNameTableEntryAddresses = hintNameTableEntryAddresses.FindAll(addr => addr != 0);
#if NET20
var temp = new List<int>();
foreach (int value in hintNameTableEntryAddresses)
{
if (!temp.Contains(value))
temp.Add(value);
}
#else
hintNameTableEntryAddresses = hintNameTableEntryAddresses.Distinct().ToList();
#endif
hintNameTableEntryAddresses.Sort();
// If we have any addresses, add them to the table
if (hintNameTableEntryAddresses.Count > 0)
@@ -1214,11 +1250,12 @@ namespace SabreTools.Serialization.Deserializers
return resourceDirectoryTable;
// If we're not aligned to a section
if (!sections.Any(s => s != null && s.PointerToRawData == initialOffset))
var firstSection = Array.Find(sections, s => s != null && s.PointerToRawData == initialOffset);
if (firstSection == null)
return resourceDirectoryTable;
// Get the section size
int size = (int)sections.First(s => s != null && s.PointerToRawData == initialOffset)!.SizeOfRawData;
int size = (int)firstSection.SizeOfRawData;
// Align to the 512-byte boundary, we find the start of an MS-DOS header, or the end of the file
while (data.Position - initialOffset < size && data.Position % 0x200 != 0 && data.Position < data.Length - 1)

View File

@@ -0,0 +1,9 @@
#if NET20
namespace System.Runtime.CompilerServices
{
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Method)]
internal sealed class ExtensionAttribute : Attribute {}
}
#endif

View File

@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.7.3</Version>
<Version>1.7.5</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
@@ -24,34 +24,15 @@
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`)) OR $(TargetFramework.StartsWith(`net9`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<!-- Support for old .NET versions -->
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`))">
<PackageReference Include="Net30.LinqBridge" Version="1.3.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.ASN1" Version="1.4.0" />
<PackageReference Include="SabreTools.ASN1" Version="1.4.1" />
<PackageReference Include="SabreTools.Hashing" Version="1.4.0" />
<PackageReference Include="SabreTools.IO" Version="1.5.0" />
<PackageReference Include="SabreTools.IO" Version="1.5.1" />
<PackageReference Include="SabreTools.Models" Version="1.5.1" />
</ItemGroup>

View File

@@ -1,5 +1,5 @@
using System;
using System.IO;
using System.Linq;
using SabreTools.Models.InstallShieldCabinet;
namespace SabreTools.Serialization.Wrappers
@@ -289,7 +289,7 @@ namespace SabreTools.Serialization.Wrappers
if (Model.FileGroups == null)
return null;
return Model.FileGroups.FirstOrDefault(fg => fg != null && string.Equals(fg.Name, name));
return Array.Find(Model.FileGroups, fg => fg != null && string.Equals(fg.Name, name));
}
/// <summary>

View File

@@ -327,7 +327,43 @@ namespace SabreTools.Serialization.Wrappers
if (fileHeader == null)
return false;
return fileHeader.FileName == ".code\0\0\0";
return fileHeader.FileName == ".code";
}
/// <summary>
/// Get if the NoCrypto bit is set
/// </summary>
public bool PossiblyDecrypted(int index)
{
var bitMasks = GetBitMasks(index);
#if NET20 || NET35
return (bitMasks & BitMasks.NoCrypto) != 0;
#else
return bitMasks.HasFlag(BitMasks.NoCrypto);
#endif
}
#endregion
#region Encryption
/// <summary>
/// Get the initial value for the ExeFS counter
/// </summary>
public byte[] ExeFSIV(int index)
{
if (Partitions == null)
return [];
if (index < 0 || index >= Partitions.Length)
return [];
var header = Partitions[index];
if (header == null || header.MagicID != NCCHMagicNumber)
return [];
byte[] partitionIdBytes = BitConverter.GetBytes(header.PartitionId);
Array.Reverse(partitionIdBytes);
return [.. partitionIdBytes, .. ExefsCounter];
}
/// <summary>
@@ -349,25 +385,6 @@ namespace SabreTools.Serialization.Wrappers
return [.. partitionIdBytes, .. PlainCounter];
}
/// <summary>
/// Get the initial value for the ExeFS counter
/// </summary>
public byte[] ExeFSIV(int index)
{
if (Partitions == null)
return [];
if (index < 0 || index >= Partitions.Length)
return [];
var header = Partitions[index];
if (header == null || header.MagicID != NCCHMagicNumber)
return [];
byte[] partitionIdBytes = BitConverter.GetBytes(header.PartitionId);
Array.Reverse(partitionIdBytes);
return [.. partitionIdBytes, .. ExefsCounter];
}
/// <summary>
/// Get the initial value for the RomFS counter
/// </summary>
@@ -387,19 +404,6 @@ namespace SabreTools.Serialization.Wrappers
return [.. partitionIdBytes, .. RomfsCounter];
}
/// <summary>
/// Get if the NoCrypto bit is set
/// </summary>
public bool PossiblyDecrypted(int index)
{
var bitMasks = GetBitMasks(index);
#if NET20 || NET35
return (bitMasks & BitMasks.NoCrypto) != 0;
#else
return bitMasks.HasFlag(BitMasks.NoCrypto);
#endif
}
#endregion
#region Offsets
@@ -436,6 +440,38 @@ namespace SabreTools.Serialization.Wrappers
return (partitionOffsetMU + exeFsOffsetMU) * MediaUnitSize;
}
/// <summary>
/// Get the offset of a partition logo region
/// </summary>
/// <returns>Offset to the logo region of the partition, 0 on error</returns>
public uint GetLogoRegionOffset(int index)
{
// No partitions means no size is available
if (PartitionsTable == null || Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition table entry means no size is available
var entry = PartitionsTable[index];
if (entry == null)
return 0;
// Invalid partition means no size is available
var header = Partitions[index];
if (header == null || header.MagicID != NCCHMagicNumber)
return 0;
// If the offset is 0, return 0
uint logoOffsetMU = header.LogoRegionOffsetInMediaUnits;
if (logoOffsetMU == 0)
return 0;
// Return the adjusted offset
uint partitionOffsetMU = entry.Offset;
return (partitionOffsetMU + logoOffsetMU) * MediaUnitSize;
}
/// <summary>
/// Get the offset of a partition
/// </summary>
@@ -462,6 +498,38 @@ namespace SabreTools.Serialization.Wrappers
return partitionOffsetMU * MediaUnitSize;
}
/// <summary>
/// Get the offset of a partition plain region
/// </summary>
/// <returns>Offset to the plain region of the partition, 0 on error</returns>
public uint GetPlainRegionOffset(int index)
{
// No partitions means no size is available
if (PartitionsTable == null || Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition table entry means no size is available
var entry = PartitionsTable[index];
if (entry == null)
return 0;
// Invalid partition means no size is available
var header = Partitions[index];
if (header == null || header.MagicID != NCCHMagicNumber)
return 0;
// If the offset is 0, return 0
uint prOffsetMU = header.PlainRegionOffsetInMediaUnits;
if (prOffsetMU == 0)
return 0;
// Return the adjusted offset
uint partitionOffsetMU = entry.Offset;
return (partitionOffsetMU + prOffsetMU) * MediaUnitSize;
}
/// <summary>
/// Get the offset of a partition RomFS
/// </summary>
@@ -540,6 +608,48 @@ namespace SabreTools.Serialization.Wrappers
return header.ExtendedHeaderSizeInBytes;
}
/// <summary>
/// Get the size of a partition logo region
/// </summary>
/// <returns>Size of the partition logo region in bytes, 0 on error</returns>
public uint GetLogoRegionSize(int index)
{
// Empty partitions array means no size is available
if (Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition header means no size is available
var header = Partitions[index];
if (header == null)
return 0;
// Return the adjusted size
return header.LogoRegionSizeInMediaUnits * MediaUnitSize;
}
/// <summary>
/// Get the size of a partition plain region
/// </summary>
/// <returns>Size of the partition plain region in bytes, 0 on error</returns>
public uint GetPlainRegionSize(int index)
{
// Empty partitions array means no size is available
if (Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition header means no size is available
var header = Partitions[index];
if (header == null)
return 0;
// Return the adjusted size
return header.PlainRegionSizeInMediaUnits * MediaUnitSize;
}
/// <summary>
/// Get the size of a partition RomFS
/// </summary>

View File

@@ -1,7 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
#if NET35_OR_GREATER || NETCOREAPP
using System.Linq;
#endif
using System.Text;
using SabreTools.IO.Extensions;
@@ -41,10 +43,17 @@ namespace SabreTools.Serialization.Wrappers
// Populate the raw header padding data based on the source
uint headerStartAddress = Model.Stub.Header.NewExeHeaderAddr;
uint firstSectionAddress = Model.SectionTable
.Select(s => s?.PointerToRawData ?? 0)
.Where(s => s != 0 && s >= headerStartAddress)
.Min();
uint firstSectionAddress = uint.MaxValue;
foreach (var s in Model.SectionTable)
{
if (s == null || s.PointerToRawData == 0)
continue;
if (s.PointerToRawData < headerStartAddress)
continue;
if (s.PointerToRawData < firstSectionAddress)
firstSectionAddress = s.PointerToRawData;
}
// Check if the header length is more than 0 before reading data
int headerLength = (int)(firstSectionAddress - headerStartAddress);
@@ -82,10 +91,17 @@ namespace SabreTools.Serialization.Wrappers
// Populate the header padding strings based on the source
uint headerStartAddress = Model.Stub.Header.NewExeHeaderAddr;
uint firstSectionAddress = Model.SectionTable
.Select(s => s?.PointerToRawData ?? 0)
.Where(s => s != 0 && s >= headerStartAddress)
.Min();
uint firstSectionAddress = uint.MaxValue;
foreach (var s in Model.SectionTable)
{
if (s == null || s.PointerToRawData == 0)
continue;
if (s.PointerToRawData < headerStartAddress)
continue;
if (s.PointerToRawData < firstSectionAddress)
firstSectionAddress = s.PointerToRawData;
}
// Check if the header length is more than 0 before reading strings
int headerLength = (int)(firstSectionAddress - headerStartAddress);
@@ -643,10 +659,9 @@ namespace SabreTools.Serialization.Wrappers
get
{
var manifest = GetAssemblyManifest();
return manifest?
.AssemblyIdentities?
.FirstOrDefault(ai => !string.IsNullOrEmpty(ai?.Version))?
.Version;
var identities = manifest?.AssemblyIdentities ?? [];
var versionIdentity = Array.Find(identities, ai => !string.IsNullOrEmpty(ai?.Version));
return versionIdentity?.Version;
}
}
@@ -837,9 +852,22 @@ namespace SabreTools.Serialization.Wrappers
return null;
// Try to find a key that matches
#if NET20
Models.PortableExecutable.StringData? match = null;
foreach (var st in stringTable)
{
if (st?.Children == null)
continue;
match = Array.Find(st.Children, sd => sd != null && key.Equals(sd.Key, StringComparison.OrdinalIgnoreCase));
if (match != null)
break;
}
#else
var match = stringTable
.SelectMany(st => st?.Children ?? [])
.FirstOrDefault(sd => sd != null && key.Equals(sd.Key, StringComparison.OrdinalIgnoreCase));
#endif
// Return either the match or null
return match?.Value?.TrimEnd('\0');
@@ -878,19 +906,29 @@ namespace SabreTools.Serialization.Wrappers
if (DebugData == null)
return [];
var nb10Found = DebugData.Select(r => r.Value)
.Select(r => r as SabreTools.Models.PortableExecutable.NB10ProgramDatabase)
.Where(n => n != null)
.Where(n => n?.PdbFileName?.Contains(path) == true)
.Select(n => n as object);
var debugFound = new List<object?>();
foreach (var data in DebugData.Values)
{
if (data == null)
continue;
var rsdsFound = DebugData.Select(r => r.Value)
.Select(r => r as SabreTools.Models.PortableExecutable.RSDSProgramDatabase)
.Where(r => r != null)
.Where(r => r?.PathAndFileName?.Contains(path) == true)
.Select(r => r as object);
if (data is Models.PortableExecutable.NB10ProgramDatabase n)
{
if (n.PdbFileName == null || !n.PdbFileName.Contains(path))
continue;
return nb10Found.Concat(rsdsFound);
debugFound.Add(n);
}
else if (data is Models.PortableExecutable.RSDSProgramDatabase r)
{
if (r.PathAndFileName == null || !r.PathAndFileName.Contains(path))
continue;
debugFound.Add(r);
}
}
return debugFound;
}
/// <summary>
@@ -904,37 +942,49 @@ namespace SabreTools.Serialization.Wrappers
if (DebugData == null)
return [];
return DebugData.Select(r => r.Value)
.Select(b => b as byte[])
.Where(b => b != null)
.Where(b =>
var table = new List<byte[]?>();
foreach (var data in DebugData.Values)
{
if (data == null)
continue;
if (data is not byte[] b || b == null)
continue;
try
{
try
string? arrayAsASCII = Encoding.ASCII.GetString(b);
if (arrayAsASCII.Contains(value))
{
string? arrayAsASCII = Encoding.ASCII.GetString(b!);
if (arrayAsASCII.Contains(value))
return true;
table.Add(b);
continue;
}
catch { }
}
catch { }
try
try
{
string? arrayAsUTF8 = Encoding.UTF8.GetString(b);
if (arrayAsUTF8.Contains(value))
{
string? arrayAsUTF8 = Encoding.UTF8.GetString(b!);
if (arrayAsUTF8.Contains(value))
return true;
table.Add(b);
continue;
}
catch { }
}
catch { }
try
try
{
string? arrayAsUnicode = Encoding.Unicode.GetString(b);
if (arrayAsUnicode.Contains(value))
{
string? arrayAsUnicode = Encoding.Unicode.GetString(b!);
if (arrayAsUnicode.Contains(value))
return true;
table.Add(b);
continue;
}
catch { }
}
catch { }
}
return false;
});
return table;
}
#endregion
@@ -1027,14 +1077,21 @@ namespace SabreTools.Serialization.Wrappers
if (ResourceData == null)
return [];
return ResourceData.Select(r => r.Value)
.Select(r => r as SabreTools.Models.PortableExecutable.DialogBoxResource)
.Where(d => d != null)
.Where(d =>
{
return (d?.DialogTemplate?.TitleResource?.Contains(title) ?? false)
|| (d?.ExtendedDialogTemplate?.TitleResource?.Contains(title) ?? false);
});
var resources = new List<Models.PortableExecutable.DialogBoxResource?>();
foreach (var resource in ResourceData.Values)
{
if (resource == null)
continue;
if (resource is not Models.PortableExecutable.DialogBoxResource dbr || dbr == null)
continue;
if (dbr.DialogTemplate?.TitleResource?.Contains(title) ?? false)
resources.Add(dbr);
else if (dbr.ExtendedDialogTemplate?.TitleResource?.Contains(title) ?? false)
resources.Add(dbr);
}
return resources;
}
/// <summary>
@@ -1048,26 +1105,29 @@ namespace SabreTools.Serialization.Wrappers
if (ResourceData == null)
return [];
return ResourceData.Select(r => r.Value)
.Select(r => r as SabreTools.Models.PortableExecutable.DialogBoxResource)
.Where(d => d != null)
.Where(d =>
{
if (d?.DialogItemTemplates != null)
{
return d.DialogItemTemplates
.Where(dit => dit?.TitleResource != null)
.Any(dit => dit?.TitleResource?.Contains(title) == true);
}
else if (d?.ExtendedDialogItemTemplates != null)
{
return d.ExtendedDialogItemTemplates
.Where(edit => edit?.TitleResource != null)
.Any(edit => edit?.TitleResource?.Contains(title) == true);
}
var resources = new List<Models.PortableExecutable.DialogBoxResource?>();
foreach (var resource in ResourceData.Values)
{
if (resource == null)
continue;
if (resource is not Models.PortableExecutable.DialogBoxResource dbr || dbr == null)
continue;
return false;
});
if (dbr.DialogItemTemplates != null)
{
var templates = Array.FindAll(dbr.DialogItemTemplates, dit => dit?.TitleResource != null);
if (Array.FindIndex(templates, dit => dit?.TitleResource?.Contains(title) == true) > -1)
resources.Add(dbr);
}
else if (dbr.ExtendedDialogItemTemplates != null)
{
var templates = Array.FindAll(dbr.ExtendedDialogItemTemplates, edit => edit?.TitleResource != null);
if (Array.FindIndex(templates, edit => edit?.TitleResource?.Contains(title) == true) > -1)
resources.Add(dbr);
}
}
return resources;
}
/// <summary>
@@ -1081,11 +1141,26 @@ namespace SabreTools.Serialization.Wrappers
if (ResourceData == null)
return [];
return ResourceData.Select(r => r.Value)
#if NET20
var stringTables = new List<Dictionary<int, string?>?>();
foreach (var resource in ResourceData.Values)
{
if (resource == null)
continue;
if (resource is not Dictionary<int, string?> st || st == null)
continue;
}
return stringTables;
#else
return ResourceData.Values
.Select(r => r as Dictionary<int, string?>)
.Where(st => st != null)
.Where(st => st?.Select(kvp => kvp.Value)?
.Any(s => s != null && s.Contains(entry)) == true);
#endif
}
/// <summary>
@@ -1099,9 +1174,24 @@ namespace SabreTools.Serialization.Wrappers
if (ResourceData == null)
return [];
#if NET20
var resources = new List<byte[]?>();
foreach (var kvp in ResourceData)
{
if (!kvp.Key.Contains(typeName))
continue;
if (kvp.Value == null || kvp.Value is not byte[] b || b == null)
continue;
resources.Add(b);
}
return resources;
#else
return ResourceData.Where(kvp => kvp.Key.Contains(typeName))
.Select(kvp => kvp.Value as byte[])
.Where(b => b != null);
#endif
}
/// <summary>
@@ -1115,37 +1205,49 @@ namespace SabreTools.Serialization.Wrappers
if (ResourceData == null)
return [];
return ResourceData.Select(r => r.Value)
.Select(r => r as byte[])
.Where(b => b != null)
.Where(b =>
var resources = new List<byte[]?>();
foreach (var resource in ResourceData.Values)
{
if (resource == null)
continue;
if (resource is not byte[] b || b == null)
continue;
try
{
try
string? arrayAsASCII = Encoding.ASCII.GetString(b!);
if (arrayAsASCII.Contains(value))
{
string? arrayAsASCII = Encoding.ASCII.GetString(b!);
if (arrayAsASCII.Contains(value))
return true;
resources.Add(b);
continue;
}
catch { }
}
catch { }
try
try
{
string? arrayAsUTF8 = Encoding.UTF8.GetString(b!);
if (arrayAsUTF8.Contains(value))
{
string? arrayAsUTF8 = Encoding.UTF8.GetString(b!);
if (arrayAsUTF8.Contains(value))
return true;
resources.Add(b);
continue;
}
catch { }
}
catch { }
try
try
{
string? arrayAsUnicode = Encoding.Unicode.GetString(b!);
if (arrayAsUnicode.Contains(value))
{
string? arrayAsUnicode = Encoding.Unicode.GetString(b!);
if (arrayAsUnicode.Contains(value))
return true;
resources.Add(b);
continue;
}
catch { }
}
catch { }
}
return false;
});
return resources;
}
#endregion
@@ -1321,11 +1423,11 @@ namespace SabreTools.Serialization.Wrappers
// If we're checking exactly, return only exact matches
if (exact)
return SectionNames.Any(n => n.Equals(sectionName));
return Array.FindIndex(SectionNames, n => n.Equals(sectionName)) > -1;
// Otherwise, check if section name starts with the value
else
return SectionNames.Any(n => n.StartsWith(sectionName));
return Array.FindIndex(SectionNames, n => n.StartsWith(sectionName)) > -1;
}
/// <summary>

View File

@@ -1,3 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
@@ -27,4 +32,332 @@ namespace SabreTools.Serialization.Wrappers
#endregion
}
public abstract class WrapperBase<T> : WrapperBase, IWrapper<T>
{
#region Properties
/// <inheritdoc/>
public T GetModel() => Model;
/// <summary>
/// Internal model
/// </summary>
public T Model { get; private set; }
#endregion
#region Instance Variables
/// <summary>
/// Source of the original data
/// </summary>
protected DataSource _dataSource = DataSource.UNKNOWN;
/// <summary>
/// Lock object for reading from the source
/// </summary>
private readonly object _streamDataLock = new();
/// <summary>
/// Source byte array data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected byte[]? _byteArrayData = null;
/// <summary>
/// Source byte array data offset
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected int _byteArrayOffset = -1;
/// <summary>
/// Source Stream data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.Stream"/></remarks>
protected Stream? _streamData = null;
#if !NETFRAMEWORK
/// <summary>
/// JSON serializer options for output printing
/// </summary>
protected System.Text.Json.JsonSerializerOptions _jsonSerializerOptions
{
get
{
#if NETCOREAPP3_1
var serializer = new System.Text.Json.JsonSerializerOptions { WriteIndented = true };
#else
var serializer = new System.Text.Json.JsonSerializerOptions { IncludeFields = true, WriteIndented = true };
#endif
serializer.Converters.Add(new ConcreteAbstractSerializer());
serializer.Converters.Add(new ConcreteInterfaceSerializer());
serializer.Converters.Add(new System.Text.Json.Serialization.JsonStringEnumConverter());
return serializer;
}
}
#endif
#endregion
#region Constructors
/// <summary>
/// Construct a new instance of the wrapper from a byte array
/// </summary>
protected WrapperBase(T? model, byte[]? data, int offset)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (offset < 0 || offset >= data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
Model = model;
_dataSource = DataSource.ByteArray;
_byteArrayData = data;
_byteArrayOffset = offset;
}
/// <summary>
/// Construct a new instance of the wrapper from a Stream
/// </summary>
protected WrapperBase(T? model, Stream? data)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (data.Length == 0 || !data.CanSeek || !data.CanRead)
throw new ArgumentOutOfRangeException(nameof(data));
Model = model;
_dataSource = DataSource.Stream;
_streamData = data;
}
#endregion
#region Data
/// <summary>
/// Validate the backing data source
/// </summary>
/// <returns>True if the data source is valid, false otherwise</returns>
public bool DataSourceIsValid()
{
return _dataSource switch
{
// Byte array data requires both a valid array and offset
DataSource.ByteArray => _byteArrayData != null && _byteArrayOffset >= 0,
// Stream data requires both a valid stream
DataSource.Stream => _streamData != null && _streamData.CanRead && _streamData.CanSeek,
// Everything else is invalid
_ => false,
};
}
/// <summary>
/// Check if a data segment is valid in the data source
/// </summary>
/// <param name="position">Position in the source</param>
/// <param name="length">Length of the data to check</param>
/// <returns>True if the positional data is valid, false otherwise</returns>
public bool SegmentValid(int position, int length)
{
// Validate the data souece
if (!DataSourceIsValid())
return false;
// If we have an invalid position
if (position < 0 || position >= GetEndOfFile())
return false;
return _dataSource switch
{
DataSource.ByteArray => _byteArrayOffset + position + length <= _byteArrayData!.Length,
DataSource.Stream => position + length <= _streamData!.Length,
// Everything else is invalid
_ => false,
};
}
/// <summary>
/// Read data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <returns>Byte array containing the requested data, null on error</returns>
public byte[]? ReadFromDataSource(int position, int length)
{
// Validate the data source
if (!DataSourceIsValid())
return null;
// Validate the requested segment
if (!SegmentValid(position, length))
return null;
// Read and return the data
byte[]? sectionData = null;
switch (_dataSource)
{
case DataSource.ByteArray:
sectionData = new byte[length];
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
break;
case DataSource.Stream:
lock (_streamDataLock)
{
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
}
}
return sectionData;
}
/// <summary>
/// Read string data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <returns>String list containing the requested data, null on error</returns>
public List<string>? ReadStringsFromDataSource(int position, int length, int charLimit = 5)
{
// Read the data as a byte array first
byte[]? sourceData = ReadFromDataSource(position, length);
if (sourceData == null)
return null;
// Check for ASCII strings
var asciiStrings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.ASCII);
// Check for UTF-8 strings
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
var utf8Strings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.UTF8);
// Check for Unicode strings
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
var unicodeStrings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.Unicode);
// Ignore duplicate strings across encodings
List<string> sourceStrings = [.. asciiStrings, .. utf8Strings, .. unicodeStrings];
// Sort the strings and return
sourceStrings.Sort();
return sourceStrings;
}
/// <summary>
/// Get the ending offset of the source
/// </summary>
/// <returns>Value greater than 0 for a valid end of file, -1 on error</returns>
public int GetEndOfFile()
{
// Validate the data souece
if (!DataSourceIsValid())
return -1;
// Return the effective endpoint
return _dataSource switch
{
DataSource.ByteArray => _byteArrayData!.Length - _byteArrayOffset,
DataSource.Stream => (int)_streamData!.Length,
_ => -1,
};
}
/// <summary>
/// Read string data from the source with an encoding
/// </summary>
/// <param name="sourceData">Byte array representing the source data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <param name="encoding">Character encoding to use for checking</param>
/// <returns>String list containing the requested data, empty on error</returns>
/// <remarks>TODO: Move to IO?</remarks>
#if NET20
private List<string> ReadStringsWithEncoding(byte[] sourceData, int charLimit, Encoding encoding)
#else
private HashSet<string> ReadStringsWithEncoding(byte[] sourceData, int charLimit, Encoding encoding)
#endif
{
// If we have an invalid character limit, default to 5
if (charLimit <= 0)
charLimit = 5;
// Create the string hash set to return
#if NET20
var sourceStrings = new List<string>();
#else
var sourceStrings = new HashSet<string>();
#endif
// Setup cached data
int sourceDataIndex = 0;
List<char> cachedChars = [];
// Check for strings
while (sourceDataIndex < sourceData.Length)
{
// Read the next character
char ch = encoding.GetChars(sourceData, sourceDataIndex, 1)[0];
// If we have a control character or an invalid byte
bool isValid = !char.IsControl(ch) && (ch & 0xFF00) == 0;
if (!isValid)
{
// If we have no cached string
if (cachedChars.Count == 0)
continue;
// If we have a cached string greater than the limit
if (cachedChars.Count >= charLimit)
sourceStrings.Add(new string([.. cachedChars]));
cachedChars.Clear();
continue;
}
// If a long repeating string is found, discard it
if (cachedChars.Count >= 64 && cachedChars.TrueForAll(c => c == cachedChars[0]))
{
cachedChars.Clear();
continue;
}
// Append the character to the cached string
cachedChars.Add(ch);
sourceDataIndex++;
}
// If we have a cached string greater than the limit
if (cachedChars.Count >= charLimit)
sourceStrings.Add(new string([.. cachedChars]));
return sourceStrings;
}
#endregion
#region JSON Export
#if !NETFRAMEWORK
/// <summary>
/// Export the item information as JSON
/// </summary>
public override string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
#endif
#endregion
}
}

View File

@@ -1,329 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public abstract class WrapperBase<T> : WrapperBase, IWrapper<T>
{
#region Properties
/// <inheritdoc/>
public T GetModel() => Model;
/// <summary>
/// Internal model
/// </summary>
public T Model { get; private set; }
#endregion
#region Instance Variables
/// <summary>
/// Source of the original data
/// </summary>
protected DataSource _dataSource = DataSource.UNKNOWN;
/// <summary>
/// Lock object for reading from the source
/// </summary>
private readonly object _streamDataLock = new();
/// <summary>
/// Source byte array data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected byte[]? _byteArrayData = null;
/// <summary>
/// Source byte array data offset
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected int _byteArrayOffset = -1;
/// <summary>
/// Source Stream data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.Stream"/></remarks>
protected Stream? _streamData = null;
#if !NETFRAMEWORK
/// <summary>
/// JSON serializer options for output printing
/// </summary>
protected System.Text.Json.JsonSerializerOptions _jsonSerializerOptions
{
get
{
#if NETCOREAPP3_1
var serializer = new System.Text.Json.JsonSerializerOptions { WriteIndented = true };
#else
var serializer = new System.Text.Json.JsonSerializerOptions { IncludeFields = true, WriteIndented = true };
#endif
serializer.Converters.Add(new ConcreteAbstractSerializer());
serializer.Converters.Add(new ConcreteInterfaceSerializer());
serializer.Converters.Add(new System.Text.Json.Serialization.JsonStringEnumConverter());
return serializer;
}
}
#endif
#endregion
#region Constructors
/// <summary>
/// Construct a new instance of the wrapper from a byte array
/// </summary>
protected WrapperBase(T? model, byte[]? data, int offset)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (offset < 0 || offset >= data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
Model = model;
_dataSource = DataSource.ByteArray;
_byteArrayData = data;
_byteArrayOffset = offset;
}
/// <summary>
/// Construct a new instance of the wrapper from a Stream
/// </summary>
protected WrapperBase(T? model, Stream? data)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (data.Length == 0 || !data.CanSeek || !data.CanRead)
throw new ArgumentOutOfRangeException(nameof(data));
Model = model;
_dataSource = DataSource.Stream;
_streamData = data;
}
#endregion
#region Data
/// <summary>
/// Validate the backing data source
/// </summary>
/// <returns>True if the data source is valid, false otherwise</returns>
public bool DataSourceIsValid()
{
return _dataSource switch
{
// Byte array data requires both a valid array and offset
DataSource.ByteArray => _byteArrayData != null && _byteArrayOffset >= 0,
// Stream data requires both a valid stream
DataSource.Stream => _streamData != null && _streamData.CanRead && _streamData.CanSeek,
// Everything else is invalid
_ => false,
};
}
/// <summary>
/// Check if a data segment is valid in the data source
/// </summary>
/// <param name="position">Position in the source</param>
/// <param name="length">Length of the data to check</param>
/// <returns>True if the positional data is valid, false otherwise</returns>
public bool SegmentValid(int position, int length)
{
// Validate the data souece
if (!DataSourceIsValid())
return false;
// If we have an invalid position
if (position < 0 || position >= GetEndOfFile())
return false;
return _dataSource switch
{
DataSource.ByteArray => _byteArrayOffset + position + length <= _byteArrayData!.Length,
DataSource.Stream => position + length <= _streamData!.Length,
// Everything else is invalid
_ => false,
};
}
/// <summary>
/// Read data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <returns>Byte array containing the requested data, null on error</returns>
public byte[]? ReadFromDataSource(int position, int length)
{
// Validate the data source
if (!DataSourceIsValid())
return null;
// Validate the requested segment
if (!SegmentValid(position, length))
return null;
// Read and return the data
byte[]? sectionData = null;
switch (_dataSource)
{
case DataSource.ByteArray:
sectionData = new byte[length];
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
break;
case DataSource.Stream:
lock (_streamDataLock)
{
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
}
}
return sectionData;
}
/// <summary>
/// Read string data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <returns>String list containing the requested data, null on error</returns>
public List<string>? ReadStringsFromDataSource(int position, int length, int charLimit = 5)
{
// Read the data as a byte array first
byte[]? sourceData = ReadFromDataSource(position, length);
if (sourceData == null)
return null;
// Check for ASCII strings
var asciiStrings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.ASCII);
// Check for UTF-8 strings
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
var utf8Strings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.UTF8);
// Check for Unicode strings
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
var unicodeStrings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.Unicode);
// Ignore duplicate strings across encodings
List<string> sourceStrings = [.. asciiStrings, .. utf8Strings, .. unicodeStrings];
// Sort the strings and return
sourceStrings.Sort();
return sourceStrings;
}
/// <summary>
/// Get the ending offset of the source
/// </summary>
/// <returns>Value greater than 0 for a valid end of file, -1 on error</returns>
public int GetEndOfFile()
{
// Validate the data souece
if (!DataSourceIsValid())
return -1;
// Return the effective endpoint
return _dataSource switch
{
DataSource.ByteArray => _byteArrayData!.Length - _byteArrayOffset,
DataSource.Stream => (int)_streamData!.Length,
_ => -1,
};
}
/// <summary>
/// Read string data from the source with an encoding
/// </summary>
/// <param name="sourceData">Byte array representing the source data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <param name="encoding">Character encoding to use for checking</param>
/// <returns>String list containing the requested data, empty on error</returns>
/// <remarks>TODO: Move to IO?</remarks>
private HashSet<string> ReadStringsWithEncoding(byte[] sourceData, int charLimit, Encoding encoding)
{
// If we have an invalid character limit, default to 5
if (charLimit <= 0)
charLimit = 5;
// Create the string hash set to return
var sourceStrings = new HashSet<string>();
// Setup cached data
int sourceDataIndex = 0;
List<char> cachedChars = [];
// Check for strings
while (sourceDataIndex < sourceData.Length)
{
// Read the next character
char ch = encoding.GetChars(sourceData, sourceDataIndex, 1)[0];
// If we have a control character or an invalid byte
bool isValid = !char.IsControl(ch) && (ch & 0xFF00) == 0;
if (!isValid)
{
// If we have no cached string
if (cachedChars.Count == 0)
continue;
// If we have a cached string greater than the limit
if (cachedChars.Count >= charLimit)
sourceStrings.Add(new string([.. cachedChars]));
cachedChars.Clear();
continue;
}
// If a long repeating string is found, discard it
if (cachedChars.Count >= 64 && cachedChars.TrueForAll(c => c == cachedChars[0]))
{
cachedChars.Clear();
continue;
}
// Append the character to the cached string
cachedChars.Add(ch);
sourceDataIndex++;
}
// If we have a cached string greater than the limit
if (cachedChars.Count >= charLimit)
sourceStrings.Add(new string([.. cachedChars]));
return sourceStrings;
}
#endregion
#region JSON Export
#if !NETFRAMEWORK
/// <summary>
/// Export the item information as JSON
/// </summary>
public override string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
#endif
#endregion
}
}