Fix build and nullability issues

This commit is contained in:
Matt Nadareski
2023-09-10 23:01:45 -04:00
parent 14c86fce7f
commit cf1e866d58
15 changed files with 251 additions and 33 deletions

View File

@@ -24,6 +24,7 @@ namespace SabreTools.Serialization.CrossModel
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.File
.Where(f => f != null)
.Select(ConvertMachineToInternalModel)
.Where(m => m != null)
.ToArray();
}
@@ -45,7 +46,11 @@ namespace SabreTools.Serialization.CrossModel
/// <summary>
/// Convert from <cref="Models.ArchiveDotOrg.File"/> to <cref="Models.Metadata.Machine"/>
/// </summary>
#if NET48
private static Models.Metadata.Machine ConvertMachineToInternalModel(Models.ArchiveDotOrg.File item)
#else
private static Models.Metadata.Machine ConvertMachineToInternalModel(Models.ArchiveDotOrg.File? item)
#endif
{
var machine = new Models.Metadata.Machine
{
@@ -57,8 +62,15 @@ namespace SabreTools.Serialization.CrossModel
/// <summary>
/// Convert from <cref="Models.ArchiveDotOrg.File"/> to <cref="Models.Metadata.Rom"/>
/// </summary>
#if NET48
private static Models.Metadata.Rom ConvertToInternalModel(Models.ArchiveDotOrg.File item)
#else
private static Models.Metadata.Rom? ConvertToInternalModel(Models.ArchiveDotOrg.File? item)
#endif
{
if (item == null)
return null;
var rom = new Models.Metadata.Rom
{
[Models.Metadata.Rom.NameKey] = item.Name,

View File

@@ -25,6 +25,7 @@ namespace SabreTools.Serialization.CrossModel
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Row
.Where(r => r != null)
.Select(ConvertMachineToInternalModel)
.Where(m => m != null)
.ToArray();
}
@@ -46,8 +47,15 @@ namespace SabreTools.Serialization.CrossModel
/// <summary>
/// Convert from <cref="Models.AttractMode.Row"/> to <cref="Models.Metadata.Machine"/>
/// </summary>
#if NET48
private static Models.Metadata.Machine ConvertMachineToInternalModel(Row item)
#else
private static Models.Metadata.Machine? ConvertMachineToInternalModel(Row? item)
#endif
{
if (item == null)
return null;
var machine = new Models.Metadata.Machine
{
[Models.Metadata.Machine.NameKey] = item.Name,

View File

@@ -25,6 +25,7 @@ namespace SabreTools.Serialization.CrossModel
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Game
.Where(g => g != null)
.Select(ConvertMachineToInternalModel)
.Where(m => m != null)
.ToArray();
}
@@ -60,8 +61,15 @@ namespace SabreTools.Serialization.CrossModel
/// <summary>
/// Convert from <cref="Models.ClrMamePro.GameBase"/> to <cref="Models.Metadata.Machine"/>
/// </summary>
#if NET48
private static Models.Metadata.Machine ConvertMachineToInternalModel(GameBase item)
#else
private static Models.Metadata.Machine? ConvertMachineToInternalModel(GameBase? item)
#endif
{
if (item == null)
return null;
var machine = new Models.Metadata.Machine
{
[Models.Metadata.Machine.NameKey] = item.Name,

View File

@@ -75,7 +75,11 @@ namespace SabreTools.Serialization
/// <param name="rva">Relative virtual address to convert</param>
/// <param name="sections">Array of sections to check against</param>
/// <returns>Physical address, 0 on error</returns>
#if NET48
public static uint ConvertVirtualAddress(this uint rva, Models.PortableExecutable.SectionHeader[] sections)
#else
public static uint ConvertVirtualAddress(this uint rva, Models.PortableExecutable.SectionHeader?[] sections)
#endif
{
// If we have an invalid section table, we can't do anything
if (sections == null || sections.Length == 0)
@@ -86,7 +90,7 @@ namespace SabreTools.Serialization
return 0;
// If the RVA matches a section start exactly, use that
var matchingSection = sections.FirstOrDefault(s => s.VirtualAddress == rva);
var matchingSection = sections.FirstOrDefault(s => s != null && s.VirtualAddress == rva);
if (matchingSection != null)
return rva - matchingSection.VirtualAddress + matchingSection.PointerToRawData;
@@ -98,11 +102,19 @@ namespace SabreTools.Serialization
continue;
// If the section "starts" at 0, just skip it
#if NET48
if (sections[i].PointerToRawData == 0)
#else
if (sections[i]!.PointerToRawData == 0)
#endif
continue;
// Attempt to derive the physical address from the current section
#if NET48
var section = sections[i];
#else
var section = sections[i]!;
#endif
if (rva >= section.VirtualAddress && section.VirtualSize != 0 && rva <= section.VirtualAddress + section.VirtualSize)
return rva - section.VirtualAddress + section.PointerToRawData;
else if (rva >= section.VirtualAddress && section.SizeOfRawData != 0 && rva <= section.VirtualAddress + section.SizeOfRawData)
@@ -1146,6 +1158,9 @@ namespace SabreTools.Serialization
for (int i = 0; i < messageResourceData.Blocks.Length; i++)
{
var messageResourceBlock = messageResourceData.Blocks[i];
if (messageResourceBlock == null)
continue;
offset = (int)messageResourceBlock.OffsetToEntries;
for (uint j = messageResourceBlock.LowId; j <= messageResourceBlock.HighId; j++)

View File

@@ -48,7 +48,7 @@ namespace SabreTools.Serialization.Streams
#if NET48
private static void WriteRows(Row[] rows, SeparatedValueWriter writer)
#else
private static void WriteRows(Row[]? rows, SeparatedValueWriter writer)
private static void WriteRows(Row?[]? rows, SeparatedValueWriter writer)
#endif
{
// If the games information is missing, we can't do anything
@@ -58,6 +58,9 @@ namespace SabreTools.Serialization.Streams
// Loop through and write out the rows
foreach (var row in rows)
{
if (row == null)
continue;
#if NET48
var rowArray = new string[]
#else

View File

@@ -60,7 +60,7 @@ namespace SabreTools.Serialization.Streams
// Try to get the texture header lump
var textureDataLump = file.Lumps[HL_BSP_LUMP_TEXTUREDATA];
if (textureDataLump.Offset == 0 || textureDataLump.Length == 0)
if (textureDataLump == null || textureDataLump.Offset == 0 || textureDataLump.Length == 0)
return null;
// Seek to the texture header
@@ -88,7 +88,7 @@ namespace SabreTools.Serialization.Streams
#if NET48
int offset = (int)(textureHeader.Offsets[i] + file.Lumps[HL_BSP_LUMP_TEXTUREDATA].Offset);
#else
int offset = (int)(textureHeader.Offsets![i] + file.Lumps[HL_BSP_LUMP_TEXTUREDATA].Offset);
int offset = (int)(textureHeader.Offsets![i] + file.Lumps[HL_BSP_LUMP_TEXTUREDATA]!.Offset);
#endif
if (offset < 0 || offset >= data.Length)
continue;
@@ -154,7 +154,7 @@ namespace SabreTools.Serialization.Streams
TextureHeader textureHeader = new TextureHeader();
textureHeader.TextureCount = data.ReadUInt32();
var offsets = new uint[textureHeader.TextureCount];
for (int i = 0; i < textureHeader.TextureCount; i++)

View File

@@ -46,14 +46,18 @@ namespace SabreTools.Serialization.Streams
#region DIFAT Sector Numbers
// Create a DIFAT sector table
#if NET48
var difatSectors = new List<SectorNumber>();
#else
var difatSectors = new List<SectorNumber?>();
#endif
// Add the sectors from the header
if (fileHeader.DIFAT != null)
difatSectors.AddRange(fileHeader.DIFAT);
// Loop through and add the DIFAT sectors
SectorNumber currentSector = (SectorNumber)fileHeader.FirstDIFATSectorLocation;
var currentSector = (SectorNumber?)fileHeader.FirstDIFATSectorLocation;
for (int i = 0; i < fileHeader.NumberOfDIFATSectors; i++)
{
// If we have a readable sector
@@ -88,7 +92,11 @@ namespace SabreTools.Serialization.Streams
#region FAT Sector Numbers
// Create a FAT sector table
#if NET48
var fatSectors = new List<SectorNumber>();
#else
var fatSectors = new List<SectorNumber?>();
#endif
// Loop through and add the FAT sectors
currentSector = binary.DIFATSectorNumbers[0];
@@ -126,7 +134,11 @@ namespace SabreTools.Serialization.Streams
#region Mini FAT Sector Numbers
// Create a mini FAT sector table
#if NET48
var miniFatSectors = new List<SectorNumber>();
#else
var miniFatSectors = new List<SectorNumber?>();
#endif
// Loop through and add the mini FAT sectors
currentSector = (SectorNumber)fileHeader.FirstMiniFATSectorLocation;
@@ -274,7 +286,11 @@ namespace SabreTools.Serialization.Streams
header.NumberOfMiniFATSectors = data.ReadUInt32();
header.FirstDIFATSectorLocation = data.ReadUInt32();
header.NumberOfDIFATSectors = data.ReadUInt32();
#if NET48
header.DIFAT = new SectorNumber[109];
#else
header.DIFAT = new SectorNumber?[109];
#endif
for (int i = 0; i < header.DIFAT.Length; i++)
{
header.DIFAT[i] = (SectorNumber)data.ReadUInt32();
@@ -293,11 +309,19 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="sectorShift">Sector shift from the header</param>
/// <returns>Filled sector full of sector numbers on success, null on error</returns>
#if NET48
private static SectorNumber[] ParseSectorNumbers(Stream data, ushort sectorShift)
#else
private static SectorNumber?[] ParseSectorNumbers(Stream data, ushort sectorShift)
#endif
{
// TODO: Use marshalling here instead of building
int sectorCount = (int)(Math.Pow(2, sectorShift) / sizeof(uint));
SectorNumber[] sectorNumbers = new SectorNumber[sectorCount];
#if NET48
var sectorNumbers = new SectorNumber[sectorCount];
#else
var sectorNumbers = new SectorNumber?[sectorCount];
#endif
for (int i = 0; i < sectorNumbers.Length; i++)
{

View File

@@ -87,7 +87,7 @@ namespace SabreTools.Serialization.Streams
#if NET48
private static void WriteGames(GameBase[] games, ClrMameProWriter writer)
#else
private static void WriteGames(GameBase[]? games, ClrMameProWriter writer)
private static void WriteGames(GameBase?[]? games, ClrMameProWriter writer)
#endif
{
// If the games information is missing, we can't do anything
@@ -97,6 +97,9 @@ namespace SabreTools.Serialization.Streams
// Loop through and write out the games
foreach (var game in games)
{
if (game == null)
continue;
WriteGame(game, writer);
writer.Flush();
}

View File

@@ -204,7 +204,8 @@ namespace SabreTools.Serialization.Streams
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
if (directoryEntry != null)
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}

View File

@@ -93,15 +93,27 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < 8; i++)
{
// If we have an encrypted or invalid partition
#if NET48
if (cart.Partitions[i].MagicID != NCCHMagicNumber)
#else
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
#endif
continue;
// If we have no partitions table
#if NET48
if (cart.Header.PartitionsTable == null)
#else
if (cart.Header!.PartitionsTable == null)
#endif
continue;
// Get the extended header offset
#if NET48
long offset = (cart.Header.PartitionsTable[i].Offset * mediaUnitSize) + 0x200;
#else
long offset = (cart.Header.PartitionsTable[i]!.Offset * mediaUnitSize) + 0x200;
#endif
if (offset < 0 || offset >= data.Length)
continue;
@@ -125,15 +137,27 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < 8; i++)
{
// If we have an encrypted or invalid partition
#if NET48
if (cart.Partitions[i].MagicID != NCCHMagicNumber)
#else
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
#endif
continue;
// If we have no partitions table
#if NET48
if (cart.Header.PartitionsTable == null)
#else
if (cart.Header!.PartitionsTable == null)
#endif
continue;
// Get the ExeFS header offset
#if NET48
long offset = (cart.Header.PartitionsTable[i].Offset + cart.Partitions[i].ExeFSOffsetInMediaUnits) * mediaUnitSize;
#else
long offset = (cart.Header.PartitionsTable[i]!.Offset + cart.Partitions[i]!.ExeFSOffsetInMediaUnits) * mediaUnitSize;
#endif
if (offset < 0 || offset >= data.Length)
continue;
@@ -155,15 +179,27 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < 8; i++)
{
// If we have an encrypted or invalid partition
#if NET48
if (cart.Partitions[i].MagicID != NCCHMagicNumber)
#else
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
#endif
continue;
// If we have no partitions table
#if NET48
if (cart.Header.PartitionsTable == null)
#else
if (cart.Header!.PartitionsTable == null)
#endif
continue;
// Get the RomFS header offset
#if NET48
long offset = (cart.Header.PartitionsTable[i].Offset + cart.Partitions[i].RomFSOffsetInMediaUnits) * mediaUnitSize;
#else
long offset = (cart.Header.PartitionsTable[i]!.Offset + cart.Partitions[i]!.RomFSOffsetInMediaUnits) * mediaUnitSize;
#endif
if (offset < 0 || offset >= data.Length)
continue;

View File

@@ -116,7 +116,8 @@ namespace SabreTools.Serialization.Streams
// Loop and assign to entries
foreach (var directoryEntry in file.DirectoryEntries)
{
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
if (directoryEntry != null)
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
}
}

View File

@@ -333,12 +333,25 @@ namespace SabreTools.Serialization.Streams
// Get the full list of unique string offsets
var stringOffsets = resourceTable.ResourceTypes
.Where(rt => rt != null)
#if NET48
.Where(rt => rt.IsIntegerType() == false)
.Select(rt => rt.TypeID)
#else
.Where(rt => rt!.IsIntegerType() == false)
.Select(rt => rt!.TypeID)
#endif
.Union(resourceTable.ResourceTypes
.Where(rt => rt != null)
#if NET48
.SelectMany(rt => rt.Resources)
.Where(r => r.IsIntegerType() == false)
.Select(r => r.ResourceID))
#else
.SelectMany(rt => rt!.Resources ?? System.Array.Empty<ResourceTypeResourceEntry>())
.Where(r => r!.IsIntegerType() == false)
.Select(r => r!.ResourceID))
#endif
.Distinct()
.OrderBy(o => o)
.ToList();

View File

@@ -778,7 +778,11 @@ namespace SabreTools.Serialization.Streams
/// <param name="endOffset">First address not part of the base relocation table</param>
/// <param name="sections">Section table to use for virtual address translation</param>
/// <returns>Filled base relocation table on success, null on error</returns>
#if NET48
private static BaseRelocationBlock[] ParseBaseRelocationTable(Stream data, int endOffset, SectionHeader[] sections)
#else
private static BaseRelocationBlock[] ParseBaseRelocationTable(Stream data, int endOffset, SectionHeader?[] sections)
#endif
{
// TODO: Use marshalling here instead of building
var baseRelocationTable = new List<BaseRelocationBlock>();
@@ -819,7 +823,11 @@ namespace SabreTools.Serialization.Streams
/// <param name="endOffset">First address not part of the debug table</param>
/// <param name="sections">Section table to use for virtual address translation</param>
/// <returns>Filled debug table on success, null on error</returns>
#if NET48
private static DebugTable ParseDebugTable(Stream data, int endOffset, SectionHeader[] sections)
#else
private static DebugTable ParseDebugTable(Stream data, int endOffset, SectionHeader?[] sections)
#endif
{
// TODO: Use marshalling here instead of building
var debugTable = new DebugTable();
@@ -856,7 +864,11 @@ namespace SabreTools.Serialization.Streams
/// <param name="data">Stream to parse</param>
/// <param name="sections">Section table to use for virtual address translation</param>
/// <returns>Filled export table on success, null on error</returns>
#if NET48
private static ExportTable ParseExportTable(Stream data, SectionHeader[] sections)
#else
private static ExportTable ParseExportTable(Stream data, SectionHeader?[] sections)
#endif
{
// TODO: Use marshalling here instead of building
var exportTable = new ExportTable();
@@ -981,7 +993,11 @@ namespace SabreTools.Serialization.Streams
/// <param name="magic">Optional header magic number indicating PE32 or PE32+</param>
/// <param name="sections">Section table to use for virtual address translation</param>
/// <returns>Filled import table on success, null on error</returns>
#if NET48
private static ImportTable ParseImportTable(Stream data, OptionalHeaderMagicNumber magic, SectionHeader[] sections)
#else
private static ImportTable ParseImportTable(Stream data, OptionalHeaderMagicNumber magic, SectionHeader?[] sections)
#endif
{
// TODO: Use marshalling here instead of building
var importTable = new ImportTable();
@@ -1017,6 +1033,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < importTable.ImportDirectoryTable.Length; i++)
{
var importDirectoryTableEntry = importTable.ImportDirectoryTable[i];
if (importDirectoryTableEntry == null)
continue;
if (importDirectoryTableEntry.NameRVA.ConvertVirtualAddress(sections) == 0)
continue;
@@ -1041,6 +1060,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < importTable.ImportDirectoryTable.Length; i++)
{
var importDirectoryTableEntry = importTable.ImportDirectoryTable[i];
if (importDirectoryTableEntry == null)
continue;
if (importDirectoryTableEntry.ImportLookupTableRVA.ConvertVirtualAddress(sections) == 0)
continue;
@@ -1096,6 +1118,9 @@ namespace SabreTools.Serialization.Streams
for (int i = 0; i < importTable.ImportDirectoryTable.Length; i++)
{
var importDirectoryTableEntry = importTable.ImportDirectoryTable[i];
if (importDirectoryTableEntry == null)
continue;
if (importDirectoryTableEntry.ImportAddressTableRVA.ConvertVirtualAddress(sections) == 0)
continue;
@@ -1154,8 +1179,18 @@ namespace SabreTools.Serialization.Streams
if (importTable.ImportLookupTables != null && importLookupTables.Count > 0)
{
var addresses = importTable.ImportLookupTables
.Where(kvp => kvp.Value != null)
#if NET48
.SelectMany(kvp => kvp.Value)
#else
.SelectMany(kvp => kvp.Value!)
#endif
.Where(ilte => ilte != null)
#if NET48
.Select(ilte => (int)ilte.HintNameTableRVA.ConvertVirtualAddress(sections));
#else
.Select(ilte => (int)ilte!.HintNameTableRVA.ConvertVirtualAddress(sections));
#endif
hintNameTableEntryAddresses.AddRange(addresses);
}
@@ -1163,8 +1198,18 @@ namespace SabreTools.Serialization.Streams
if (importTable.ImportAddressTables != null && importTable.ImportAddressTables.Count > 0)
{
var addresses = importTable.ImportAddressTables
.Where(kvp => kvp.Value != null)
#if NET48
.SelectMany(kvp => kvp.Value)
#else
.SelectMany(kvp => kvp.Value!)
#endif
.Where(iate => iate != null)
#if NET48
.Select(iate => (int)iate.HintNameTableRVA.ConvertVirtualAddress(sections));
#else
.Select(iate => (int)iate!.HintNameTableRVA.ConvertVirtualAddress(sections));
#endif
hintNameTableEntryAddresses.AddRange(addresses);
}
@@ -1208,7 +1253,7 @@ namespace SabreTools.Serialization.Streams
#if NET48
private static ResourceDirectoryTable ParseResourceDirectoryTable(Stream data, long initialOffset, SectionHeader[] sections, bool topLevel = false)
#else
private static ResourceDirectoryTable? ParseResourceDirectoryTable(Stream data, long initialOffset, SectionHeader[] sections, bool topLevel = false)
private static ResourceDirectoryTable? ParseResourceDirectoryTable(Stream data, long initialOffset, SectionHeader?[] sections, bool topLevel = false)
#endif
{
// TODO: Use marshalling here instead of building
@@ -1270,6 +1315,9 @@ namespace SabreTools.Serialization.Streams
// Loop through and process the entries
foreach (var entry in resourceDirectoryTable.Entries)
{
if (entry == null)
continue;
if (entry.DataEntryOffset > 0)
{
uint offset = entry.DataEntryOffset + (uint)initialOffset;
@@ -1305,11 +1353,15 @@ namespace SabreTools.Serialization.Streams
return resourceDirectoryTable;
// If we're not aligned to a section
if (!sections.Any(s => s.PointerToRawData == initialOffset))
if (!sections.Any(s => s != null && s.PointerToRawData == initialOffset))
return resourceDirectoryTable;
// Get the section size
int size = (int)sections.First(s => s.PointerToRawData == initialOffset).SizeOfRawData;
#if NET48
int size = (int)sections.First(s => s != null && s.PointerToRawData == initialOffset).SizeOfRawData;
#else
int size = (int)sections.First(s => s != null && s.PointerToRawData == initialOffset)!.SizeOfRawData;
#endif
// Align to the 512-byte boundary, we find the start of an MS-DOS header, or the end of the file
while (data.Position - initialOffset < size && data.Position % 0x200 != 0 && data.Position < data.Length - 1)
@@ -1328,7 +1380,9 @@ namespace SabreTools.Serialization.Streams
// If we have not used up the full size, parse the remaining chunk as a single resource
if (data.Position - initialOffset < size)
{
Array.Resize(ref resourceDirectoryTable.Entries, totalEntryCount + 1);
var localEntries = resourceDirectoryTable.Entries;
Array.Resize(ref localEntries, totalEntryCount + 1);
resourceDirectoryTable.Entries = localEntries;
int length = (int)(size - (data.Position - initialOffset));
resourceDirectoryTable.Entries[totalEntryCount] = new ResourceDirectoryEntry

View File

@@ -554,18 +554,35 @@ namespace SabreTools.Serialization.Streams
// Loop through all folders to assign names
for (int i = 0; i < folderCount; i++)
{
uint nameOffset;
switch (majorVersion)
{
#if NET48
case 4: (directory as Directory4).Folders[i].Name = strings[(directory as Directory4).Folders[i].NameOffset]; break;
case 5: (directory as Directory5).Folders[i].Name = strings[(directory as Directory5).Folders[i].NameOffset]; break;
case 6: (directory as Directory6).Folders[i].Name = strings[(directory as Directory6).Folders[i].NameOffset]; break;
case 7: (directory as Directory7).Folders[i].Name = strings[(directory as Directory7).Folders[i].NameOffset]; break;
case 4: nameOffset = (directory as Directory4).Folders[i].NameOffset; break;
case 5: nameOffset = (directory as Directory5).Folders[i].NameOffset; break;
case 6: nameOffset = (directory as Directory6).Folders[i].NameOffset; break;
case 7: nameOffset = (directory as Directory7).Folders[i].NameOffset; break;
#else
case 4: (directory as Directory4)!.Folders[i]!.Name = strings[(directory as Directory4)!.Folders[i]!.NameOffset] ?? string.Empty; break;
case 5: (directory as Directory5)!.Folders[i]!.Name = strings[(directory as Directory5)!.Folders[i]!.NameOffset] ?? string.Empty; break;
case 6: (directory as Directory6)!.Folders[i]!.Name = strings[(directory as Directory6)!.Folders[i]!.NameOffset] ?? string.Empty; break;
case 7: (directory as Directory7)!.Folders[i]!.Name = strings[(directory as Directory7)!.Folders[i]!.NameOffset] ?? string.Empty; break;
case 4: nameOffset = (directory as Directory4)!.Folders![i]!.NameOffset; break;
case 5: nameOffset = (directory as Directory5)!.Folders![i]!.NameOffset; break;
case 6: nameOffset = (directory as Directory6)!.Folders![i]!.NameOffset; break;
case 7: nameOffset = (directory as Directory7)!.Folders![i]!.NameOffset; break;
#endif
default: return null;
}
switch (majorVersion)
{
#if NET48
case 4: (directory as Directory4).Folders[i].Name = strings[nameOffset]; break;
case 5: (directory as Directory5).Folders[i].Name = strings[nameOffset]; break;
case 6: (directory as Directory6).Folders[i].Name = strings[nameOffset]; break;
case 7: (directory as Directory7).Folders[i].Name = strings[nameOffset]; break;
#else
case 4: (directory as Directory4)!.Folders![i]!.Name = strings[nameOffset]; break;
case 5: (directory as Directory5)!.Folders![i]!.Name = strings[nameOffset]; break;
case 6: (directory as Directory6)!.Folders![i]!.Name = strings[nameOffset]; break;
case 7: (directory as Directory7)!.Folders![i]!.Name = strings[nameOffset]; break;
#endif
default: return null;
}
@@ -574,18 +591,35 @@ namespace SabreTools.Serialization.Streams
// Loop through all files to assign names
for (int i = 0; i < fileCount; i++)
{
uint nameOffset;
switch (majorVersion)
{
#if NET48
case 4: (directory as Directory4).Files[i].Name = strings[(directory as Directory4).Files[i].NameOffset]; break;
case 5: (directory as Directory5).Files[i].Name = strings[(directory as Directory5).Files[i].NameOffset]; break;
case 6: (directory as Directory6).Files[i].Name = strings[(directory as Directory6).Files[i].NameOffset]; break;
case 7: (directory as Directory7).Files[i].Name = strings[(directory as Directory7).Files[i].NameOffset]; break;
case 4: nameOffset = (directory as Directory4).Files[i].NameOffset; break;
case 5: nameOffset = (directory as Directory5).Files[i].NameOffset; break;
case 6: nameOffset = (directory as Directory6).Files[i].NameOffset; break;
case 7: nameOffset = (directory as Directory7).Files[i].NameOffset; break;
#else
case 4: (directory as Directory4)!.Files[i]!.Name = strings[(directory as Directory4)!.Files[i]!.NameOffset] ?? string.Empty; break;
case 5: (directory as Directory5)!.Files[i]!.Name = strings[(directory as Directory5)!.Files[i]!.NameOffset] ?? string.Empty; break;
case 6: (directory as Directory6)!.Files[i]!.Name = strings[(directory as Directory6)!.Files[i]!.NameOffset] ?? string.Empty; break;
case 7: (directory as Directory7)!.Files[i]!.Name = strings[(directory as Directory7)!.Files[i]!.NameOffset] ?? string.Empty; break;
case 4: nameOffset = (directory as Directory4)!.Files![i]!.NameOffset; break;
case 5: nameOffset = (directory as Directory5)!.Files![i]!.NameOffset; break;
case 6: nameOffset = (directory as Directory6)!.Files![i]!.NameOffset; break;
case 7: nameOffset = (directory as Directory7)!.Files![i]!.NameOffset; break;
#endif
default: return null;
}
switch (majorVersion)
{
#if NET48
case 4: (directory as Directory4).Files[i].Name = strings[nameOffset]; break;
case 5: (directory as Directory5).Files[i].Name = strings[nameOffset]; break;
case 6: (directory as Directory6).Files[i].Name = strings[nameOffset]; break;
case 7: (directory as Directory7).Files[i].Name = strings[nameOffset]; break;
#else
case 4: (directory as Directory4)!.Files![i]!.Name = strings[nameOffset]; break;
case 5: (directory as Directory5)!.Files![i]!.Name = strings[nameOffset]; break;
case 6: (directory as Directory6)!.Files![i]!.Name = strings[nameOffset]; break;
case 7: (directory as Directory7)!.Files![i]!.Name = strings[nameOffset]; break;
#endif
default: return null;
}

View File

@@ -64,14 +64,20 @@ namespace SabreTools.Serialization.Streams
#region Lump Infos
// Create the lump info array
#if NET48
#if NET48
file.LumpInfos = new LumpInfo[header.LumpCount];
#else
#else
file.LumpInfos = new LumpInfo?[header.LumpCount];
#endif
#endif
for (int i = 0; i < header.LumpCount; i++)
{
var lump = file.Lumps[i];
if (lump == null)
{
file.LumpInfos[i] = null;
continue;
}
if (lump.Compression != 0)
{
file.LumpInfos[i] = null;