mirror of
https://github.com/SabreTools/SabreTools.Serialization.git
synced 2026-02-04 05:36:12 +00:00
Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9214a22cc9 | ||
|
|
5ba1156245 | ||
|
|
cb91cdff1d | ||
|
|
4df6a4e79d | ||
|
|
5b82a48267 | ||
|
|
8f70c50a48 | ||
|
|
5fe4d81fa4 | ||
|
|
7d3addbf0a | ||
|
|
b7d5873eb7 | ||
|
|
4e40cc19d5 | ||
|
|
1bc9316bc1 | ||
|
|
c995ec1dca | ||
|
|
4d2fbbae04 | ||
|
|
2776928946 | ||
|
|
8cc87c6540 | ||
|
|
3c212022aa | ||
|
|
511c4d09e5 | ||
|
|
d7eba27dc5 | ||
|
|
09370618ca | ||
|
|
2197167088 | ||
|
|
b527635fe7 |
@@ -9,7 +9,7 @@
|
||||
<Nullable>enable</Nullable>
|
||||
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<Version>1.7.4</Version>
|
||||
<Version>1.7.6</Version>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Support All Frameworks -->
|
||||
@@ -30,4 +30,8 @@
|
||||
<ProjectReference Include="..\SabreTools.Serialization\SabreTools.Serialization.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="SabreTools.IO" Version="1.5.1" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -48,11 +48,7 @@ namespace InfoPrint
|
||||
}
|
||||
else if (Directory.Exists(path))
|
||||
{
|
||||
#if NET20 || NET35
|
||||
foreach (string file in Directory.GetFiles(path, "*", SearchOption.AllDirectories))
|
||||
#else
|
||||
foreach (string file in Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories))
|
||||
#endif
|
||||
foreach (string file in IOExtensions.SafeEnumerateFiles(path, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
PrintFileInfo(file, json, debug);
|
||||
}
|
||||
|
||||
@@ -26,8 +26,8 @@
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.5.1" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
|
||||
<PackageReference Include="SabreTools.Hashing" Version="1.4.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
@@ -20,9 +19,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new media key block to fill
|
||||
var mediaKeyBlock = new MediaKeyBlock();
|
||||
|
||||
@@ -73,38 +69,27 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled record on success, null on error</returns>
|
||||
private static Record? ParseRecord(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
|
||||
// The first 4 bytes make up the type and length
|
||||
byte[]? typeAndLength = data.ReadBytes(4);
|
||||
if (typeAndLength == null)
|
||||
return null;
|
||||
|
||||
RecordType type = (RecordType)typeAndLength[0];
|
||||
|
||||
// Remove the first byte and parse as big-endian
|
||||
typeAndLength[0] = 0x00;
|
||||
Array.Reverse(typeAndLength);
|
||||
uint length = BitConverter.ToUInt32(typeAndLength, 0);
|
||||
RecordType type = (RecordType)data.ReadByteValue();
|
||||
uint length = data.ReadUInt24();
|
||||
|
||||
// Create a record based on the type
|
||||
switch (type)
|
||||
return type switch
|
||||
{
|
||||
// Recognized record types
|
||||
case RecordType.EndOfMediaKeyBlock: return ParseEndOfMediaKeyBlockRecord(data, type, length);
|
||||
case RecordType.ExplicitSubsetDifference: return ParseExplicitSubsetDifferenceRecord(data, type, length);
|
||||
case RecordType.MediaKeyData: return ParseMediaKeyDataRecord(data, type, length);
|
||||
case RecordType.SubsetDifferenceIndex: return ParseSubsetDifferenceIndexRecord(data, type, length);
|
||||
case RecordType.TypeAndVersion: return ParseTypeAndVersionRecord(data, type, length);
|
||||
case RecordType.DriveRevocationList: return ParseDriveRevocationListRecord(data, type, length);
|
||||
case RecordType.HostRevocationList: return ParseHostRevocationListRecord(data, type, length);
|
||||
case RecordType.VerifyMediaKey: return ParseVerifyMediaKeyRecord(data, type, length);
|
||||
case RecordType.Copyright: return ParseCopyrightRecord(data, type, length);
|
||||
|
||||
// Unrecognized record type
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
// Known record types
|
||||
RecordType.EndOfMediaKeyBlock => ParseEndOfMediaKeyBlockRecord(data, type, length),
|
||||
RecordType.ExplicitSubsetDifference => ParseExplicitSubsetDifferenceRecord(data, type, length),
|
||||
RecordType.MediaKeyData => ParseMediaKeyDataRecord(data, type, length),
|
||||
RecordType.SubsetDifferenceIndex => ParseSubsetDifferenceIndexRecord(data, type, length),
|
||||
RecordType.TypeAndVersion => ParseTypeAndVersionRecord(data, type, length),
|
||||
RecordType.DriveRevocationList => ParseDriveRevocationListRecord(data, type, length),
|
||||
RecordType.HostRevocationList => ParseHostRevocationListRecord(data, type, length),
|
||||
RecordType.VerifyMediaKey => ParseVerifyMediaKeyRecord(data, type, length),
|
||||
RecordType.Copyright => ParseCopyrightRecord(data, type, length),
|
||||
|
||||
// Unknown record type
|
||||
_ => null,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -118,7 +103,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.EndOfMediaKeyBlock)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new EndOfMediaKeyBlockRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -140,7 +124,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.ExplicitSubsetDifference)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new ExplicitSubsetDifferenceRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -184,7 +167,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.MediaKeyData)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new MediaKeyDataRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -221,7 +203,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.SubsetDifferenceIndex)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new SubsetDifferenceIndexRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -259,7 +240,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.TypeAndVersion)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new TypeAndVersionRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -281,7 +261,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.DriveRevocationList)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new DriveRevocationListRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -342,7 +321,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.HostRevocationList)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new HostRevocationListRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -403,7 +381,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.VerifyMediaKey)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new VerifyMediaKeyRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -424,7 +401,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.Copyright)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new CopyrightRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
|
||||
@@ -56,8 +56,8 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
svm.Unknown2 = data.ReadBytes(4);
|
||||
svm.Length = data.ReadUInt32();
|
||||
// if (svm.Length > 0)
|
||||
// svm.Data = data.ReadBytes((int)svm.Length);
|
||||
if (svm.Length > 0)
|
||||
svm.Data = data.ReadBytes((int)svm.Length);
|
||||
|
||||
return svm;
|
||||
}
|
||||
|
||||
@@ -40,24 +40,21 @@ namespace SabreTools.Serialization.Deserializers
|
||||
#region Files
|
||||
|
||||
// If we have any files
|
||||
if (header.Files > 0)
|
||||
var files = new FileEntry[header.Files];
|
||||
|
||||
// Read all entries in turn
|
||||
for (int i = 0; i < header.Files; i++)
|
||||
{
|
||||
var files = new FileEntry[header.Files];
|
||||
var file = ParseFileEntry(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
// Read all entries in turn
|
||||
for (int i = 0; i < header.Files; i++)
|
||||
{
|
||||
var file = ParseFileEntry(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
files[i] = file;
|
||||
}
|
||||
|
||||
// Set the files
|
||||
archive.Files = files;
|
||||
files[i] = file;
|
||||
}
|
||||
|
||||
// Set the files
|
||||
archive.Files = files;
|
||||
|
||||
#endregion
|
||||
|
||||
return archive;
|
||||
@@ -72,9 +69,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Magic != SignatureString)
|
||||
if (header?.Magic != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
@@ -6,10 +8,10 @@ using static SabreTools.Models.BSP.Constants;
|
||||
|
||||
namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
public class BSP : BaseBinaryDeserializer<Models.BSP.File>
|
||||
public class BSP : BaseBinaryDeserializer<BspFile>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public override Models.BSP.File? Deserialize(Stream? data)
|
||||
public override BspFile? Deserialize(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
@@ -19,17 +21,14 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new Half-Life Level to fill
|
||||
var file = new Models.BSP.File();
|
||||
var file = new BspFile();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
if (header?.Lumps == null)
|
||||
return null;
|
||||
|
||||
// Set the level header
|
||||
@@ -39,59 +38,70 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
#region Lumps
|
||||
|
||||
// Create the lump array
|
||||
file.Lumps = new Lump[HL_BSP_LUMP_COUNT];
|
||||
|
||||
// Try to parse the lumps
|
||||
for (int i = 0; i < HL_BSP_LUMP_COUNT; i++)
|
||||
for (int l = 0; l < BSP_HEADER_LUMPS; l++)
|
||||
{
|
||||
var lump = ParseLump(data);
|
||||
if (lump == null)
|
||||
return null;
|
||||
|
||||
file.Lumps[i] = lump;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Texture header
|
||||
|
||||
// Try to get the texture header lump
|
||||
var textureDataLump = file.Lumps[HL_BSP_LUMP_TEXTUREDATA];
|
||||
if (textureDataLump == null || textureDataLump.Offset == 0 || textureDataLump.Length == 0)
|
||||
return null;
|
||||
|
||||
// Seek to the texture header
|
||||
data.Seek(textureDataLump.Offset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the texture header
|
||||
var textureHeader = ParseTextureHeader(data);
|
||||
if (textureHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the texture header
|
||||
file.TextureHeader = textureHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Textures
|
||||
|
||||
// Create the texture array
|
||||
file.Textures = new Texture[textureHeader.TextureCount];
|
||||
|
||||
// Try to parse the textures
|
||||
for (int i = 0; i < textureHeader.TextureCount; i++)
|
||||
{
|
||||
// Get the texture offset
|
||||
int offset = (int)(textureHeader.Offsets![i] + file.Lumps[HL_BSP_LUMP_TEXTUREDATA]!.Offset);
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
// Get the next lump entry
|
||||
var lumpEntry = header.Lumps[l];
|
||||
if (lumpEntry == null)
|
||||
continue;
|
||||
if (lumpEntry.Offset == 0 || lumpEntry.Length == 0)
|
||||
continue;
|
||||
|
||||
// Seek to the texture
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
// Seek to the lump offset
|
||||
data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
|
||||
|
||||
var texture = ParseTexture(data);
|
||||
file.Textures[i] = texture;
|
||||
// Read according to the lump type
|
||||
switch ((LumpType)l)
|
||||
{
|
||||
case LumpType.LUMP_ENTITIES:
|
||||
file.Entities = ParseEntitiesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_PLANES:
|
||||
file.PlanesLump = ParsePlanesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_TEXTURES:
|
||||
file.TextureLump = ParseTextureLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_VERTICES:
|
||||
file.VerticesLump = ParseVerticesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_VISIBILITY:
|
||||
file.VisibilityLump = ParseVisibilityLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_NODES:
|
||||
file.NodesLump = ParseNodesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_TEXINFO:
|
||||
file.TexinfoLump = ParseTexinfoLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_FACES:
|
||||
file.FacesLump = ParseFacesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_LIGHTING:
|
||||
file.LightmapLump = ParseLightmapLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_CLIPNODES:
|
||||
file.ClipnodesLump = ParseClipnodesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_LEAVES:
|
||||
file.LeavesLump = ParseLeavesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_MARKSURFACES:
|
||||
file.MarksurfacesLump = ParseMarksurfacesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_EDGES:
|
||||
file.EdgesLump = ParseEdgesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_SURFEDGES:
|
||||
file.SurfedgesLump = ParseSurfedgesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_MODELS:
|
||||
file.ModelsLump = ParseModelsLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
default:
|
||||
// Unsupported LumpType value, ignore
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -105,26 +115,98 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Level header on success, null on error</returns>
|
||||
/// <remarks>Only recognized versions are 29 and 30</remarks>
|
||||
private static Header? ParseHeader(Stream data)
|
||||
private static BspHeader? ParseHeader(Stream data)
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
var header = data.ReadType<BspHeader>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Version != 29 && header.Version != 30)
|
||||
if (header.Version < 29 || header.Version > 30)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a lump
|
||||
/// Parse a Stream into LUMP_ENTITIES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled lump on success, null on error</returns>
|
||||
private static Lump? ParseLump(Stream data)
|
||||
/// <returns>Filled LUMP_ENTITIES on success, null on error</returns>
|
||||
private static EntitiesLump? ParseEntitiesLump(Stream data, int offset, int length)
|
||||
{
|
||||
return data.ReadType<Lump>();
|
||||
var entities = new List<Entity>();
|
||||
|
||||
// Read the entire lump as text
|
||||
byte[] lumpData = data.ReadBytes(length);
|
||||
string lumpText = Encoding.ASCII.GetString(lumpData);
|
||||
|
||||
// Break the text by ending curly braces
|
||||
string[] lumpSections = lumpText.Split('}');
|
||||
Array.ForEach(lumpSections, s => s.Trim('{', '}'));
|
||||
|
||||
// Loop through all sections
|
||||
for (int i = 0; i < lumpSections.Length; i++)
|
||||
{
|
||||
// Prepare an attributes list
|
||||
var attributes = new List<KeyValuePair<string, string>>();
|
||||
|
||||
// Split the section by newlines
|
||||
string section = lumpSections[i];
|
||||
string[] lines = section.Split('\n');
|
||||
Array.ForEach(lines, l => l.Trim());
|
||||
|
||||
// Convert each line into a key-value pair and add
|
||||
for (int j = 0; j < lines.Length; j++)
|
||||
{
|
||||
// TODO: Split lines and add
|
||||
}
|
||||
|
||||
// Create a new entity and add
|
||||
var entity = new Entity { Attributes = attributes };
|
||||
entities.Add(entity);
|
||||
}
|
||||
|
||||
return new EntitiesLump { Entities = [.. entities] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_PLANES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_PLANES on success, null on error</returns>
|
||||
private static PlanesLump? ParsePlanesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var planes = new List<Plane>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var plane = data.ReadType<Plane>();
|
||||
if (plane != null)
|
||||
planes.Add(plane);
|
||||
}
|
||||
|
||||
return new PlanesLump { Planes = [.. planes] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_TEXTURES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_TEXTURES on success, null on error</returns>
|
||||
private static TextureLump? ParseTextureLump(Stream data, int offset, int length)
|
||||
{
|
||||
var lump = new TextureLump();
|
||||
|
||||
lump.Header = ParseTextureHeader(data);
|
||||
var textures = new List<MipTexture>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var texture = data.ReadType<MipTexture>();
|
||||
if (texture != null)
|
||||
textures.Add(texture);
|
||||
}
|
||||
|
||||
lump.Textures = [.. textures];
|
||||
return lump;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -134,83 +216,231 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled Half-Life Level texture header on success, null on error</returns>
|
||||
private static TextureHeader ParseTextureHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var textureHeader = new TextureHeader();
|
||||
|
||||
textureHeader.TextureCount = data.ReadUInt32();
|
||||
|
||||
var offsets = new uint[textureHeader.TextureCount];
|
||||
|
||||
for (int i = 0; i < textureHeader.TextureCount; i++)
|
||||
textureHeader.MipTextureCount = data.ReadUInt32();
|
||||
textureHeader.Offsets = new int[textureHeader.MipTextureCount];
|
||||
for (int i = 0; i < textureHeader.Offsets.Length; i++)
|
||||
{
|
||||
offsets[i] = data.ReadUInt32();
|
||||
if (data.Position >= data.Length)
|
||||
break;
|
||||
textureHeader.Offsets[i] = data.ReadInt32();
|
||||
}
|
||||
|
||||
textureHeader.Offsets = offsets;
|
||||
|
||||
return textureHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a texture
|
||||
/// Parse a Stream into LUMP_VERTICES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="mipmap">Mipmap level</param>
|
||||
/// <returns>Filled texture on success, null on error</returns>
|
||||
private static Texture ParseTexture(Stream data, uint mipmap = 0)
|
||||
/// <returns>Filled LUMP_VERTICES on success, null on error</returns>
|
||||
private static VerticesLump? ParseVerticesLump(Stream data, int offset, int length)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var texture = new Texture();
|
||||
|
||||
byte[]? name = data.ReadBytes(16);
|
||||
if (name != null)
|
||||
texture.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
|
||||
texture.Width = data.ReadUInt32();
|
||||
texture.Height = data.ReadUInt32();
|
||||
texture.Offsets = new uint[4];
|
||||
for (int i = 0; i < 4; i++)
|
||||
var vertices = new List<Vector3D>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
texture.Offsets[i] = data.ReadUInt32();
|
||||
vertices.Add(data.ReadType<Vector3D>());
|
||||
}
|
||||
|
||||
// Get the size of the pixel data
|
||||
uint pixelSize = 0;
|
||||
for (int i = 0; i < HL_BSP_MIPMAP_COUNT; i++)
|
||||
return new VerticesLump { Vertices = [.. vertices] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_VISIBILITY
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_VISIBILITY on success, null on error</returns>
|
||||
private static VisibilityLump? ParseVisibilityLump(Stream data, int offset, int length)
|
||||
{
|
||||
var lump = new VisibilityLump();
|
||||
|
||||
lump.NumClusters = data.ReadInt32();
|
||||
lump.ByteOffsets = new int[lump.NumClusters][];
|
||||
for (int i = 0; i < lump.NumClusters; i++)
|
||||
{
|
||||
if (texture.Offsets[i] != 0)
|
||||
lump.ByteOffsets[i] = new int[2];
|
||||
for (int j = 0; j < 2; j++)
|
||||
{
|
||||
pixelSize += (texture.Width >> i) * (texture.Height >> i);
|
||||
lump.ByteOffsets[i][j] = data.ReadInt32();
|
||||
}
|
||||
}
|
||||
|
||||
// If we have no pixel data
|
||||
if (pixelSize == 0)
|
||||
return texture;
|
||||
return lump;
|
||||
}
|
||||
|
||||
texture.TextureData = data.ReadBytes((int)pixelSize);
|
||||
texture.PaletteSize = data.ReadUInt16();
|
||||
texture.PaletteData = data.ReadBytes((int)(texture.PaletteSize * 3));
|
||||
|
||||
// Adjust the dimensions based on mipmap level
|
||||
switch (mipmap)
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_NODES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_NODES on success, null on error</returns>
|
||||
private static BspNodesLump? ParseNodesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var nodes = new List<BspNode>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
case 1:
|
||||
texture.Width /= 2;
|
||||
texture.Height /= 2;
|
||||
break;
|
||||
case 2:
|
||||
texture.Width /= 4;
|
||||
texture.Height /= 4;
|
||||
break;
|
||||
case 3:
|
||||
texture.Width /= 8;
|
||||
texture.Height /= 8;
|
||||
break;
|
||||
var node = data.ReadType<BspNode>();
|
||||
if (node != null)
|
||||
nodes.Add(node);
|
||||
}
|
||||
|
||||
return texture;
|
||||
return new BspNodesLump { Nodes = [.. nodes] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_TEXINFO
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_TEXINFO on success, null on error</returns>
|
||||
private static BspTexinfoLump? ParseTexinfoLump(Stream data, int offset, int length)
|
||||
{
|
||||
var texinfos = new List<BspTexinfo>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var texinfo = data.ReadType<BspTexinfo>();
|
||||
if (texinfo != null)
|
||||
texinfos.Add(texinfo);
|
||||
}
|
||||
|
||||
return new BspTexinfoLump { Texinfos = [.. texinfos] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_FACES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_FACES on success, null on error</returns>
|
||||
private static BspFacesLump? ParseFacesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var faces = new List<BspFace>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var face = data.ReadType<BspFace>();
|
||||
if (face != null)
|
||||
faces.Add(face);
|
||||
}
|
||||
|
||||
return new BspFacesLump { Faces = [.. faces] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_LIGHTING
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_LIGHTING on success, null on error</returns>
|
||||
private static LightmapLump? ParseLightmapLump(Stream data, int offset, int length)
|
||||
{
|
||||
var lump = new LightmapLump();
|
||||
lump.Lightmap = new byte[length / 3][];
|
||||
|
||||
for (int i = 0; i < length / 3; i++)
|
||||
{
|
||||
lump.Lightmap[i] = data.ReadBytes(3);
|
||||
}
|
||||
|
||||
return lump;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_CLIPNODES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_CLIPNODES on success, null on error</returns>
|
||||
private static ClipnodesLump? ParseClipnodesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var clipnodes = new List<Clipnode>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var clipnode = data.ReadType<Clipnode>();
|
||||
if (clipnode != null)
|
||||
clipnodes.Add(clipnode);
|
||||
}
|
||||
|
||||
return new ClipnodesLump { Clipnodes = [.. clipnodes] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_LEAVES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_LEAVES on success, null on error</returns>
|
||||
private static BspLeavesLump? ParseLeavesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var leaves = new List<BspLeaf>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var leaf = data.ReadType<BspLeaf>();
|
||||
if (leaf != null)
|
||||
leaves.Add(leaf);
|
||||
}
|
||||
|
||||
return new BspLeavesLump { Leaves = [.. leaves] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_MARKSURFACES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_MARKSURFACES on success, null on error</returns>
|
||||
private static MarksurfacesLump? ParseMarksurfacesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var marksurfaces = new List<ushort>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
marksurfaces.Add(data.ReadUInt16());
|
||||
}
|
||||
|
||||
return new MarksurfacesLump { Marksurfaces = [.. marksurfaces] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_EDGES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_EDGES on success, null on error</returns>
|
||||
private static EdgesLump? ParseEdgesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var edges = new List<Edge>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var edge = data.ReadType<Edge>();
|
||||
if (edge != null)
|
||||
edges.Add(edge);
|
||||
}
|
||||
|
||||
return new EdgesLump { Edges = [.. edges] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_SURFEDGES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_SURFEDGES on success, null on error</returns>
|
||||
private static SurfedgesLump? ParseSurfedgesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var surfedges = new List<int>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
surfedges.Add(data.ReadInt32());
|
||||
}
|
||||
|
||||
return new SurfedgesLump { Surfedges = [.. surfedges] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_MODELS
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_MODELS on success, null on error</returns>
|
||||
private static BspModelsLump? ParseModelsLump(Stream data, int offset, int length)
|
||||
{
|
||||
var models = new List<BspModel>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var model = data.ReadType<BspModel>();
|
||||
if (model != null)
|
||||
models.Add(model);
|
||||
}
|
||||
|
||||
return new BspModelsLump { Models = [.. models] };
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,9 +21,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new binary to fill
|
||||
var binary = new Binary();
|
||||
|
||||
@@ -235,9 +232,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<FileHeader>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != SignatureUInt64)
|
||||
if (header?.Signature != SignatureUInt64)
|
||||
return null;
|
||||
if (header.ByteOrder != 0xFFFE)
|
||||
return null;
|
||||
@@ -265,7 +260,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled sector full of sector numbers on success, null on error</returns>
|
||||
private static SectorNumber[] ParseSectorNumbers(Stream data, ushort sectorShift)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
int sectorCount = (int)(Math.Pow(2, sectorShift) / sizeof(uint));
|
||||
var sectorNumbers = new SectorNumber[sectorCount];
|
||||
|
||||
@@ -286,7 +280,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled sector full of directory entries on success, null on error</returns>
|
||||
private static DirectoryEntry[]? ParseDirectoryEntries(Stream data, ushort sectorShift, ushort majorVersion)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
// TODO: Fix the directory entry size const
|
||||
const int directoryEntrySize = 64 + 2 + 1 + 1 + 4 + 4 + 4 + 16 + 4 + 8 + 8 + 4 + 8;
|
||||
int sectorCount = (int)(Math.Pow(2, sectorShift) / directoryEntrySize);
|
||||
var directoryEntries = new DirectoryEntry[sectorCount];
|
||||
|
||||
@@ -19,9 +19,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new CIA archive to fill
|
||||
var cia = new Models.N3DS.CIA();
|
||||
|
||||
@@ -157,8 +154,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled certificate on success, null on error</returns>
|
||||
public static Certificate? ParseCertificate(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Certificate certificate = new Certificate();
|
||||
var certificate = new Certificate();
|
||||
|
||||
certificate.SignatureType = (SignatureType)data.ReadUInt32();
|
||||
switch (certificate.SignatureType)
|
||||
@@ -233,8 +229,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled ticket on success, null on error</returns>
|
||||
public static Ticket? ParseTicket(Stream data, bool fromCdn = false)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Ticket ticket = new Ticket();
|
||||
var ticket = new Ticket();
|
||||
|
||||
ticket.SignatureType = (SignatureType)data.ReadUInt32();
|
||||
switch (ticket.SignatureType)
|
||||
@@ -338,7 +333,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled title metadata on success, null on error</returns>
|
||||
public static TitleMetadata? ParseTitleMetadata(Stream data, bool fromCdn = false)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var titleMetadata = new TitleMetadata();
|
||||
|
||||
titleMetadata.SignatureType = (SignatureType)data.ReadUInt32();
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
@@ -173,7 +172,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
long directoryNamesEnd = data.Position + directoryHeader.NameSize;
|
||||
|
||||
// Create the string dictionary
|
||||
file.DirectoryNames = new Dictionary<long, string?>();
|
||||
file.DirectoryNames = [];
|
||||
|
||||
// Loop and read the null-terminated strings
|
||||
while (data.Position < directoryNamesEnd)
|
||||
@@ -184,10 +183,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
data.Seek(-directoryName?.Length ?? 0, SeekOrigin.Current);
|
||||
byte[]? endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
|
||||
if (endingData != null)
|
||||
directoryName = Encoding.ASCII.GetString(endingData);
|
||||
else
|
||||
directoryName = null;
|
||||
directoryName = endingData != null ? Encoding.ASCII.GetString(endingData) : null;
|
||||
}
|
||||
|
||||
file.DirectoryNames[nameOffset] = directoryName;
|
||||
@@ -612,7 +608,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled Half-Life Game Cache data block header on success, null on error</returns>
|
||||
private static DataBlockHeader? ParseDataBlockHeader(Stream data, uint minorVersion)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var dataBlockHeader = new DataBlockHeader();
|
||||
|
||||
// In version 3 the DataBlockHeader is missing the LastVersionPlayed field.
|
||||
|
||||
@@ -88,6 +88,8 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Create lists for each hash type
|
||||
var sfvList = new List<SFV>();
|
||||
var md2List = new List<MD2>();
|
||||
var md4List = new List<MD4>();
|
||||
var md5List = new List<MD5>();
|
||||
var sha1List = new List<SHA1>();
|
||||
var sha256List = new List<SHA256>();
|
||||
@@ -115,6 +117,22 @@ namespace SabreTools.Serialization.Deserializers
|
||||
};
|
||||
sfvList.Add(sfv);
|
||||
break;
|
||||
case HashType.MD2:
|
||||
var md2 = new MD2
|
||||
{
|
||||
Hash = lineParts[0],
|
||||
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
|
||||
};
|
||||
md2List.Add(md2);
|
||||
break;
|
||||
case HashType.MD4:
|
||||
var md4 = new MD4
|
||||
{
|
||||
Hash = lineParts[0],
|
||||
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
|
||||
};
|
||||
md4List.Add(md4);
|
||||
break;
|
||||
case HashType.MD5:
|
||||
var md5 = new MD5
|
||||
{
|
||||
@@ -172,6 +190,12 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case HashType.CRC32:
|
||||
dat.SFV = [.. sfvList];
|
||||
break;
|
||||
case HashType.MD2:
|
||||
dat.MD2 = [.. md2List];
|
||||
break;
|
||||
case HashType.MD4:
|
||||
dat.MD4 = [.. md4List];
|
||||
break;
|
||||
case HashType.MD5:
|
||||
dat.MD5 = [.. md5List];
|
||||
break;
|
||||
|
||||
@@ -17,9 +17,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new IRD to fill
|
||||
var ird = new Models.IRD.File();
|
||||
|
||||
|
||||
@@ -18,9 +18,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
@@ -100,7 +97,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature1 != 0x8C655D13) // TODO: Move constant to Models
|
||||
if (header?.Signature1 != Constants.HeaderSignature)
|
||||
return null;
|
||||
if (header.TocAddress >= data.Length)
|
||||
return null;
|
||||
|
||||
@@ -21,9 +21,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new cabinet to fill
|
||||
var cabinet = new Cabinet();
|
||||
|
||||
@@ -337,9 +334,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var commonHeader = data.ReadType<CommonHeader>();
|
||||
|
||||
if (commonHeader == null)
|
||||
return null;
|
||||
if (commonHeader.Signature != SignatureString)
|
||||
if (commonHeader?.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
return commonHeader;
|
||||
@@ -353,7 +348,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled volume header on success, null on error</returns>
|
||||
public static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
|
||||
{
|
||||
VolumeHeader volumeHeader = new VolumeHeader();
|
||||
var volumeHeader = new VolumeHeader();
|
||||
|
||||
// Read the descriptor based on version
|
||||
if (majorVersion <= 5)
|
||||
@@ -649,7 +644,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled file descriptor on success, null on error</returns>
|
||||
public static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset)
|
||||
{
|
||||
FileDescriptor fileDescriptor = new FileDescriptor();
|
||||
var fileDescriptor = new FileDescriptor();
|
||||
|
||||
// Read the descriptor based on version
|
||||
if (majorVersion <= 5)
|
||||
|
||||
@@ -427,9 +427,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var informationBlock = data.ReadType<InformationBlock>();
|
||||
|
||||
if (informationBlock == null)
|
||||
return null;
|
||||
if (informationBlock.Signature != LESignatureString && informationBlock.Signature != LXSignatureString)
|
||||
if (informationBlock?.Signature != LESignatureString && informationBlock?.Signature != LXSignatureString)
|
||||
return null;
|
||||
|
||||
return informationBlock;
|
||||
@@ -472,15 +470,13 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled resident names table entry on success, null on error</returns>
|
||||
public static ResidentNamesTableEntry ParseResidentNamesTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new ResidentNamesTableEntry();
|
||||
|
||||
entry.Length = data.ReadByteValue();
|
||||
if (entry.Length > 0 && data.Position + entry.Length <= data.Length)
|
||||
{
|
||||
byte[]? name = data.ReadBytes(entry.Length);
|
||||
if (name != null)
|
||||
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
|
||||
byte[] name = data.ReadBytes(entry.Length);
|
||||
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
|
||||
}
|
||||
entry.OrdinalNumber = data.ReadUInt16();
|
||||
|
||||
|
||||
@@ -21,9 +21,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
@@ -32,7 +29,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Check for User Data
|
||||
uint possibleSignature = data.ReadUInt32();
|
||||
data.Seek(-4, SeekOrigin.Current);
|
||||
if (possibleSignature == 0x1B51504D)
|
||||
if (possibleSignature == UserDataSignatureUInt32)
|
||||
{
|
||||
// Save the current position for offset correction
|
||||
long basePtr = data.Position;
|
||||
@@ -56,7 +53,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Check for the Header
|
||||
possibleSignature = data.ReadUInt32();
|
||||
data.Seek(-4, SeekOrigin.Current);
|
||||
if (possibleSignature == 0x1A51504D)
|
||||
if (possibleSignature == ArchiveHeaderSignatureUInt32)
|
||||
{
|
||||
// Try to parse the archive header
|
||||
var archiveHeader = ParseArchiveHeader(data);
|
||||
@@ -406,9 +403,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var userData = data.ReadType<UserData>();
|
||||
|
||||
if (userData == null)
|
||||
return null;
|
||||
if (userData.Signature != UserDataSignatureString)
|
||||
if (userData?.Signature != UserDataSignatureString)
|
||||
return null;
|
||||
|
||||
return userData;
|
||||
|
||||
@@ -20,9 +20,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new cart image to fill
|
||||
var cart = new Cart();
|
||||
|
||||
@@ -144,14 +141,10 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled NCSD header on success, null on error</returns>
|
||||
public static NCSDHeader? ParseNCSDHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var header = new NCSDHeader();
|
||||
|
||||
header.RSA2048Signature = data.ReadBytes(0x100);
|
||||
byte[]? magicNumber = data.ReadBytes(4);
|
||||
if (magicNumber == null)
|
||||
return null;
|
||||
|
||||
byte[] magicNumber = data.ReadBytes(4);
|
||||
header.MagicNumber = Encoding.ASCII.GetString(magicNumber).TrimEnd('\0'); ;
|
||||
if (header.MagicNumber != NCSDMagicNumber)
|
||||
return null;
|
||||
|
||||
@@ -91,10 +91,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
data.Seek(-directoryName?.Length ?? 0, SeekOrigin.Current);
|
||||
byte[]? endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
|
||||
if (endingData != null)
|
||||
directoryName = Encoding.ASCII.GetString(endingData);
|
||||
else
|
||||
directoryName = null;
|
||||
directoryName = endingData != null ? Encoding.ASCII.GetString(endingData) : null;
|
||||
}
|
||||
|
||||
file.DirectoryNames[nameOffset] = directoryName;
|
||||
|
||||
@@ -214,9 +214,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<ExecutableHeader>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Magic != SignatureString)
|
||||
if (header?.Magic != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
|
||||
@@ -19,9 +19,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new cart image to fill
|
||||
var cart = new Cart();
|
||||
|
||||
|
||||
@@ -18,9 +18,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new Half-Life Package to fill
|
||||
var file = new Models.PAK.File();
|
||||
|
||||
@@ -73,9 +70,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != SignatureString)
|
||||
if (header?.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
using System.IO;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.PFF;
|
||||
@@ -20,9 +19,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
@@ -56,7 +52,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var file = ParseSegment(data, header.FileSegmentSize);
|
||||
if (file == null)
|
||||
return null;
|
||||
continue;
|
||||
|
||||
archive.Segments[i] = file;
|
||||
}
|
||||
|
||||
@@ -168,7 +168,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
#region Local File
|
||||
|
||||
// Setup all of the collections
|
||||
var localFileHeaders = new List<LocalFileHeader?>();
|
||||
var localFileHeaders = new List<LocalFileHeader>();
|
||||
var encryptionHeaders = new List<byte[]?>();
|
||||
var fileData = new List<byte[]>(); // TODO: Should this data be read here?
|
||||
var dataDescriptors = new List<DataDescriptor?>();
|
||||
@@ -200,7 +200,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (localFileHeader == null)
|
||||
{
|
||||
// Add a placeholder null item
|
||||
localFileHeaders.Add(null);
|
||||
localFileHeaders.Add(new LocalFileHeader());
|
||||
encryptionHeaders.Add(null);
|
||||
fileData.Add([]);
|
||||
dataDescriptors.Add(null);
|
||||
|
||||
@@ -303,7 +303,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
private static UnknownBlock1 ParseUnknownBlock1(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
UnknownBlock1 unknownBlock1 = new UnknownBlock1();
|
||||
var unknownBlock1 = new UnknownBlock1();
|
||||
|
||||
unknownBlock1.Length = data.ReadUInt32();
|
||||
unknownBlock1.Data = data.ReadBytes((int)unknownBlock1.Length);
|
||||
@@ -319,7 +319,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
private static UnknownBlock3 ParseUnknownBlock3(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
UnknownBlock3 unknownBlock3 = new UnknownBlock3();
|
||||
var unknownBlock3 = new UnknownBlock3();
|
||||
|
||||
// No-op because we don't even know the length
|
||||
|
||||
@@ -334,7 +334,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
private static DataFile ParseDataFile(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DataFile dataFile = new DataFile();
|
||||
var dataFile = new DataFile();
|
||||
|
||||
dataFile.FileNameLength = data.ReadUInt16();
|
||||
byte[]? fileName = data.ReadBytes(dataFile.FileNameLength);
|
||||
|
||||
@@ -17,9 +17,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new playlist to fill
|
||||
var playlist = new Playlist();
|
||||
|
||||
@@ -46,7 +43,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
long currentOffset = data.Position;
|
||||
var entryHeader = PlayJAudio.DeserializeStream(data, currentOffset);
|
||||
if (entryHeader == null)
|
||||
return null;
|
||||
continue;
|
||||
|
||||
playlist.AudioFiles[i] = entryHeader;
|
||||
}
|
||||
@@ -64,7 +61,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
private static PlaylistHeader ParsePlaylistHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
PlaylistHeader playlistHeader = new PlaylistHeader();
|
||||
var playlistHeader = new PlaylistHeader();
|
||||
|
||||
playlistHeader.TrackCount = data.ReadUInt32();
|
||||
playlistHeader.Data = data.ReadBytes(52);
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
#if NET35_OR_GREATER || NETCOREAPP
|
||||
using System.Linq;
|
||||
#endif
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.PortableExecutable;
|
||||
@@ -1052,28 +1054,64 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// If we have import lookup tables
|
||||
if (importTable.ImportLookupTables != null && importLookupTables.Count > 0)
|
||||
{
|
||||
#if NET20
|
||||
var addresses = new List<int>();
|
||||
foreach (var kvp in importTable.ImportLookupTables)
|
||||
{
|
||||
if (kvp.Value == null)
|
||||
continue;
|
||||
|
||||
var vaddrs = Array.ConvertAll(kvp.Value, ilte => ilte == null
|
||||
? 0
|
||||
: (int)ilte.HintNameTableRVA.ConvertVirtualAddress(sections));
|
||||
addresses.AddRange(vaddrs);
|
||||
}
|
||||
#else
|
||||
var addresses = importTable.ImportLookupTables
|
||||
.SelectMany(kvp => kvp.Value ?? [])
|
||||
.Where(ilte => ilte != null)
|
||||
.Select(ilte => (int)ilte!.HintNameTableRVA.ConvertVirtualAddress(sections));
|
||||
#endif
|
||||
hintNameTableEntryAddresses.AddRange(addresses);
|
||||
}
|
||||
|
||||
// If we have import address tables
|
||||
if (importTable.ImportAddressTables != null && importTable.ImportAddressTables.Count > 0)
|
||||
{
|
||||
#if NET20
|
||||
var addresses = new List<int>();
|
||||
foreach (var kvp in importTable.ImportAddressTables)
|
||||
{
|
||||
if (kvp.Value == null)
|
||||
continue;
|
||||
|
||||
var vaddrs = Array.ConvertAll(kvp.Value, iate => iate == null
|
||||
? 0
|
||||
: (int)iate.HintNameTableRVA.ConvertVirtualAddress(sections));
|
||||
addresses.AddRange(vaddrs);
|
||||
}
|
||||
#else
|
||||
var addresses = importTable.ImportAddressTables
|
||||
.SelectMany(kvp => kvp.Value ?? [])
|
||||
.Where(iate => iate != null)
|
||||
.Select(iate => (int)iate!.HintNameTableRVA.ConvertVirtualAddress(sections));
|
||||
#endif
|
||||
hintNameTableEntryAddresses.AddRange(addresses);
|
||||
}
|
||||
|
||||
// Sanitize the addresses
|
||||
hintNameTableEntryAddresses = hintNameTableEntryAddresses.Where(addr => addr != 0)
|
||||
.Distinct()
|
||||
.OrderBy(a => a)
|
||||
.ToList();
|
||||
hintNameTableEntryAddresses = hintNameTableEntryAddresses.FindAll(addr => addr != 0);
|
||||
#if NET20
|
||||
var temp = new List<int>();
|
||||
foreach (int value in hintNameTableEntryAddresses)
|
||||
{
|
||||
if (!temp.Contains(value))
|
||||
temp.Add(value);
|
||||
}
|
||||
#else
|
||||
hintNameTableEntryAddresses = hintNameTableEntryAddresses.Distinct().ToList();
|
||||
#endif
|
||||
hintNameTableEntryAddresses.Sort();
|
||||
|
||||
// If we have any addresses, add them to the table
|
||||
if (hintNameTableEntryAddresses.Count > 0)
|
||||
@@ -1214,11 +1252,12 @@ namespace SabreTools.Serialization.Deserializers
|
||||
return resourceDirectoryTable;
|
||||
|
||||
// If we're not aligned to a section
|
||||
if (!sections.Any(s => s != null && s.PointerToRawData == initialOffset))
|
||||
var firstSection = Array.Find(sections, s => s != null && s.PointerToRawData == initialOffset);
|
||||
if (firstSection == null)
|
||||
return resourceDirectoryTable;
|
||||
|
||||
// Get the section size
|
||||
int size = (int)sections.First(s => s != null && s.PointerToRawData == initialOffset)!.SizeOfRawData;
|
||||
int size = (int)firstSection.SizeOfRawData;
|
||||
|
||||
// Align to the 512-byte boundary, we find the start of an MS-DOS header, or the end of the file
|
||||
while (data.Position - initialOffset < size && data.Position % 0x200 != 0 && data.Position < data.Length - 1)
|
||||
|
||||
@@ -19,9 +19,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
@@ -40,24 +37,21 @@ namespace SabreTools.Serialization.Deserializers
|
||||
#region File List
|
||||
|
||||
// If we have any files
|
||||
if (header.FileCount > 0)
|
||||
var fileDescriptors = new FileDescriptor[header.FileCount];
|
||||
|
||||
// Read all entries in turn
|
||||
for (int i = 0; i < header.FileCount; i++)
|
||||
{
|
||||
var fileDescriptors = new FileDescriptor[header.FileCount];
|
||||
var file = ParseFileDescriptor(data, header.MinorVersion);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
// Read all entries in turn
|
||||
for (int i = 0; i < header.FileCount; i++)
|
||||
{
|
||||
var file = ParseFileDescriptor(data, header.MinorVersion);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
fileDescriptors[i] = file;
|
||||
}
|
||||
|
||||
// Set the file list
|
||||
archive.FileList = fileDescriptors;
|
||||
fileDescriptors[i] = file;
|
||||
}
|
||||
|
||||
// Set the file list
|
||||
archive.FileList = fileDescriptors;
|
||||
|
||||
#endregion
|
||||
|
||||
// Cache the compressed data offset
|
||||
@@ -75,9 +69,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != SignatureString)
|
||||
if (header?.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
|
||||
@@ -17,9 +17,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Deserialize the SFB
|
||||
var sfb = data.ReadType<Models.PlayStation3.SFB>();
|
||||
if (sfb == null)
|
||||
|
||||
@@ -17,9 +17,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new SFO to fill
|
||||
var sfo = new Models.PlayStation3.SFO();
|
||||
|
||||
|
||||
@@ -20,9 +20,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new SGA to fill
|
||||
var file = new Models.SGA.File();
|
||||
|
||||
@@ -78,7 +75,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Versions 4 and 5 share the same header
|
||||
case 4:
|
||||
case 5:
|
||||
Header4 header4 = new Header4();
|
||||
var header4 = new Header4();
|
||||
|
||||
header4.Signature = signature;
|
||||
header4.MajorVersion = majorVersion;
|
||||
@@ -97,7 +94,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Versions 6 and 7 share the same header
|
||||
case 6:
|
||||
case 7:
|
||||
Header6 header6 = new Header6();
|
||||
var header6 = new Header6();
|
||||
|
||||
header6.Signature = signature;
|
||||
header6.MajorVersion = majorVersion;
|
||||
@@ -125,20 +122,24 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA directory on success, null on error</returns>
|
||||
private static Models.SGA.Directory? ParseDirectory(Stream data, ushort majorVersion)
|
||||
{
|
||||
#region Directory
|
||||
|
||||
// Create the appropriate type of directory
|
||||
Models.SGA.Directory directory;
|
||||
switch (majorVersion)
|
||||
return majorVersion switch
|
||||
{
|
||||
case 4: directory = new Directory4(); break;
|
||||
case 5: directory = new Directory5(); break;
|
||||
case 6: directory = new Directory6(); break;
|
||||
case 7: directory = new Directory7(); break;
|
||||
default: return null;
|
||||
}
|
||||
4 => ParseDirectory4(data),
|
||||
5 => ParseDirectory5(data),
|
||||
6 => ParseDirectory6(data),
|
||||
7 => ParseDirectory7(data),
|
||||
_ => null,
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SGA directory on success, null on error</returns>
|
||||
private static Directory4? ParseDirectory4(Stream data)
|
||||
{
|
||||
var directory = new Directory4();
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
@@ -146,36 +147,19 @@ namespace SabreTools.Serialization.Deserializers
|
||||
#region Directory Header
|
||||
|
||||
// Try to parse the directory header
|
||||
var directoryHeader = ParseDirectoryHeader(data, majorVersion);
|
||||
var directoryHeader = ParseDirectory4Header(data);
|
||||
if (directoryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the directory header
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.DirectoryHeader = directoryHeader as DirectoryHeader4; break;
|
||||
case 5: (directory as Directory5)!.DirectoryHeader = directoryHeader as DirectoryHeader5; break;
|
||||
case 6: (directory as Directory6)!.DirectoryHeader = directoryHeader as DirectoryHeader5; break;
|
||||
case 7: (directory as Directory7)!.DirectoryHeader = directoryHeader as DirectoryHeader7; break;
|
||||
default: return null;
|
||||
}
|
||||
directory.DirectoryHeader = directoryHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sections
|
||||
|
||||
// Get the sections offset
|
||||
long sectionOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sectionOffset = (directoryHeader as DirectoryHeader4)!.SectionOffset; break;
|
||||
case 5:
|
||||
case 6: sectionOffset = (directoryHeader as DirectoryHeader5)!.SectionOffset; break;
|
||||
case 7: sectionOffset = (directoryHeader as DirectoryHeader7)!.SectionOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the sections offset based on the directory
|
||||
// Get and adjust the sections offset
|
||||
long sectionOffset = directoryHeader.SectionOffset;
|
||||
sectionOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
@@ -185,67 +169,21 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Seek to the sections
|
||||
data.Seek(sectionOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the section count
|
||||
uint sectionCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sectionCount = (directoryHeader as DirectoryHeader4)!.SectionCount; break;
|
||||
case 5:
|
||||
case 6: sectionCount = (directoryHeader as DirectoryHeader5)!.SectionCount; break;
|
||||
case 7: sectionCount = (directoryHeader as DirectoryHeader7)!.SectionCount; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Create the sections array
|
||||
object[] sections;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sections = new Section4[sectionCount]; break;
|
||||
case 5:
|
||||
case 6:
|
||||
case 7: sections = new Section5[sectionCount]; break;
|
||||
default: return null;
|
||||
}
|
||||
directory.Sections = new Section4[directoryHeader.SectionCount];
|
||||
|
||||
// Try to parse the sections
|
||||
for (int i = 0; i < sections.Length; i++)
|
||||
for (int i = 0; i < directory.Sections.Length; i++)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sections[i] = ParseSection4(data); break;
|
||||
case 5:
|
||||
case 6:
|
||||
case 7: sections[i] = ParseSection5(data); break;
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Assign the sections
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.Sections = sections as Section4[]; break;
|
||||
case 5: (directory as Directory5)!.Sections = sections as Section5[]; break;
|
||||
case 6: (directory as Directory6)!.Sections = sections as Section5[]; break;
|
||||
case 7: (directory as Directory7)!.Sections = sections as Section5[]; break;
|
||||
default: return null;
|
||||
directory.Sections[i] = ParseSection4(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Folders
|
||||
|
||||
// Get the folders offset
|
||||
long folderOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folderOffset = (directoryHeader as DirectoryHeader4)!.FolderOffset; break;
|
||||
case 5: folderOffset = (directoryHeader as DirectoryHeader5)!.FolderOffset; break;
|
||||
case 6: folderOffset = (directoryHeader as DirectoryHeader5)!.FolderOffset; break;
|
||||
case 7: folderOffset = (directoryHeader as DirectoryHeader7)!.FolderOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the folders offset based on the directory
|
||||
// Get and adjust the folders offset
|
||||
long folderOffset = directoryHeader.FolderOffset;
|
||||
folderOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
@@ -255,67 +193,21 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Seek to the folders
|
||||
data.Seek(folderOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the folder count
|
||||
uint folderCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folderCount = (directoryHeader as DirectoryHeader4)!.FolderCount; break;
|
||||
case 5: folderCount = (directoryHeader as DirectoryHeader5)!.FolderCount; break;
|
||||
case 6: folderCount = (directoryHeader as DirectoryHeader5)!.FolderCount; break;
|
||||
case 7: folderCount = (directoryHeader as DirectoryHeader7)!.FolderCount; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Create the folders array
|
||||
object[] folders;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folders = new Folder4[folderCount]; break;
|
||||
case 5: folders = new Folder5[folderCount]; break;
|
||||
case 6: folders = new Folder5[folderCount]; break;
|
||||
case 7: folders = new Folder5[folderCount]; break;
|
||||
default: return null;
|
||||
}
|
||||
directory.Folders = new Folder4[directoryHeader.FolderCount];
|
||||
|
||||
// Try to parse the folders
|
||||
for (int i = 0; i < folders.Length; i++)
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folders[i] = ParseFolder4(data); break;
|
||||
case 5: folders[i] = ParseFolder5(data); break;
|
||||
case 6: folders[i] = ParseFolder5(data); break;
|
||||
case 7: folders[i] = ParseFolder5(data); break;
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Assign the folders
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.Folders = folders as Folder4[]; break;
|
||||
case 5: (directory as Directory5)!.Folders = folders as Folder5[]; break;
|
||||
case 6: (directory as Directory6)!.Folders = folders as Folder5[]; break;
|
||||
case 7: (directory as Directory7)!.Folders = folders as Folder5[]; break;
|
||||
default: return null;
|
||||
directory.Folders[i] = ParseFolder4(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Files
|
||||
|
||||
// Get the files offset
|
||||
long fileOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: fileOffset = (directoryHeader as DirectoryHeader4)!.FileOffset; break;
|
||||
case 5: fileOffset = (directoryHeader as DirectoryHeader5)!.FileOffset; break;
|
||||
case 6: fileOffset = (directoryHeader as DirectoryHeader5)!.FileOffset; break;
|
||||
case 7: fileOffset = (directoryHeader as DirectoryHeader7)!.FileOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the files offset based on the directory
|
||||
// Get and adjust the files offset
|
||||
long fileOffset = directoryHeader.FileOffset;
|
||||
fileOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
@@ -326,66 +218,23 @@ namespace SabreTools.Serialization.Deserializers
|
||||
data.Seek(fileOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the file count
|
||||
uint fileCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: fileCount = (directoryHeader as DirectoryHeader4)!.FileCount; break;
|
||||
case 5: fileCount = (directoryHeader as DirectoryHeader5)!.FileCount; break;
|
||||
case 6: fileCount = (directoryHeader as DirectoryHeader5)!.FileCount; break;
|
||||
case 7: fileCount = (directoryHeader as DirectoryHeader7)!.FileCount; break;
|
||||
default: return null;
|
||||
}
|
||||
uint fileCount = directoryHeader.FileCount;
|
||||
|
||||
// Create the files array
|
||||
object[] files;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: files = new File4[fileCount]; break;
|
||||
case 5: files = new File4[fileCount]; break;
|
||||
case 6: files = new File6[fileCount]; break;
|
||||
case 7: files = new File7[fileCount]; break;
|
||||
default: return null;
|
||||
}
|
||||
directory.Files = new File4[fileCount];
|
||||
|
||||
// Try to parse the files
|
||||
for (int i = 0; i < files.Length; i++)
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: files[i] = ParseFile4(data); break;
|
||||
case 5: files[i] = ParseFile4(data); break;
|
||||
case 6: files[i] = ParseFile6(data); break;
|
||||
case 7: files[i] = ParseFile7(data); break;
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Assign the files
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.Files = files as File4[]; break;
|
||||
case 5: (directory as Directory5)!.Files = files as File4[]; break;
|
||||
case 6: (directory as Directory6)!.Files = files as File6[]; break;
|
||||
case 7: (directory as Directory7)!.Files = files as File7[]; break;
|
||||
default: return null;
|
||||
directory.Files[i] = ParseFile4(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region String Table
|
||||
|
||||
// Get the string table offset
|
||||
long stringTableOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: stringTableOffset = (directoryHeader as DirectoryHeader4)!.StringTableOffset; break;
|
||||
case 5: stringTableOffset = (directoryHeader as DirectoryHeader5)!.StringTableOffset; break;
|
||||
case 6: stringTableOffset = (directoryHeader as DirectoryHeader5)!.StringTableOffset; break;
|
||||
case 7: stringTableOffset = (directoryHeader as DirectoryHeader7)!.StringTableOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the string table offset based on the directory
|
||||
// Get and adjust the string table offset
|
||||
long stringTableOffset = directoryHeader.StringTableOffset;
|
||||
stringTableOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
@@ -395,87 +244,40 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Seek to the string table
|
||||
data.Seek(stringTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the string table count
|
||||
uint stringCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: stringCount = (directoryHeader as DirectoryHeader4)!.StringTableCount; break;
|
||||
case 5: stringCount = (directoryHeader as DirectoryHeader5)!.StringTableCount; break;
|
||||
case 6: stringCount = (directoryHeader as DirectoryHeader5)!.StringTableCount; break;
|
||||
case 7: stringCount = (directoryHeader as DirectoryHeader7)!.StringTableCount; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// TODO: Are these strings actually indexed by number and not position?
|
||||
// TODO: If indexed by position, I think it needs to be adjusted by start of table
|
||||
|
||||
// Create the strings dictionary
|
||||
Dictionary<long, string?> strings = new Dictionary<long, string?>((int)stringCount);
|
||||
directory.StringTable = new Dictionary<long, string?>((int)directoryHeader.StringTableCount);
|
||||
|
||||
// Get the current position to adjust the offsets
|
||||
long stringTableStart = data.Position;
|
||||
|
||||
// Try to parse the strings
|
||||
for (int i = 0; i < stringCount; i++)
|
||||
for (int i = 0; i < directoryHeader.StringTableCount; i++)
|
||||
{
|
||||
long currentPosition = data.Position - stringTableStart;
|
||||
strings[currentPosition] = data.ReadNullTerminatedAnsiString();
|
||||
}
|
||||
|
||||
// Assign the files
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.StringTable = strings; break;
|
||||
case 5: (directory as Directory5)!.StringTable = strings; break;
|
||||
case 6: (directory as Directory6)!.StringTable = strings; break;
|
||||
case 7: (directory as Directory7)!.StringTable = strings; break;
|
||||
default: return null;
|
||||
directory.StringTable[currentPosition] = data.ReadNullTerminatedAnsiString();
|
||||
}
|
||||
|
||||
// Loop through all folders to assign names
|
||||
for (int i = 0; i < folderCount; i++)
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
uint nameOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: nameOffset = (directory as Directory4)!.Folders![i]!.NameOffset; break;
|
||||
case 5: nameOffset = (directory as Directory5)!.Folders![i]!.NameOffset; break;
|
||||
case 6: nameOffset = (directory as Directory6)!.Folders![i]!.NameOffset; break;
|
||||
case 7: nameOffset = (directory as Directory7)!.Folders![i]!.NameOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
var folder = directory.Folders[i];
|
||||
if (folder == null)
|
||||
continue;
|
||||
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.Folders![i]!.Name = strings[nameOffset]; break;
|
||||
case 5: (directory as Directory5)!.Folders![i]!.Name = strings[nameOffset]; break;
|
||||
case 6: (directory as Directory6)!.Folders![i]!.Name = strings[nameOffset]; break;
|
||||
case 7: (directory as Directory7)!.Folders![i]!.Name = strings[nameOffset]; break;
|
||||
default: return null;
|
||||
}
|
||||
folder.Name = directory.StringTable[folder.NameOffset];
|
||||
}
|
||||
|
||||
// Loop through all files to assign names
|
||||
for (int i = 0; i < fileCount; i++)
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
uint nameOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: nameOffset = (directory as Directory4)!.Files![i]!.NameOffset; break;
|
||||
case 5: nameOffset = (directory as Directory5)!.Files![i]!.NameOffset; break;
|
||||
case 6: nameOffset = (directory as Directory6)!.Files![i]!.NameOffset; break;
|
||||
case 7: nameOffset = (directory as Directory7)!.Files![i]!.NameOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
var file = directory.Files[i];
|
||||
if (file == null)
|
||||
continue;
|
||||
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.Files![i]!.Name = strings[nameOffset]; break;
|
||||
case 5: (directory as Directory5)!.Files![i]!.Name = strings[nameOffset]; break;
|
||||
case 6: (directory as Directory6)!.Files![i]!.Name = strings[nameOffset]; break;
|
||||
case 7: (directory as Directory7)!.Files![i]!.Name = strings[nameOffset]; break;
|
||||
default: return null;
|
||||
}
|
||||
file.Name = directory.StringTable[file.NameOffset];
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -484,21 +286,453 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory header
|
||||
/// Parse a Stream into an SGA directory
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">SGA major version</param>
|
||||
/// <returns>Filled SGA directory header on success, null on error</returns>
|
||||
private static object? ParseDirectoryHeader(Stream data, ushort majorVersion)
|
||||
/// <returns>Filled SGA directory on success, null on error</returns>
|
||||
private static Directory5? ParseDirectory5(Stream data)
|
||||
{
|
||||
switch (majorVersion)
|
||||
var directory = new Directory5();
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
|
||||
#region Directory Header
|
||||
|
||||
// Try to parse the directory header
|
||||
var directoryHeader = ParseDirectory5Header(data);
|
||||
if (directoryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the directory header
|
||||
directory.DirectoryHeader = directoryHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sections
|
||||
|
||||
// Get and adjust the sections offset
|
||||
long sectionOffset = directoryHeader.SectionOffset;
|
||||
sectionOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (sectionOffset < 0 || sectionOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the sections
|
||||
data.Seek(sectionOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the sections array
|
||||
directory.Sections = new Section5[directoryHeader.SectionCount];
|
||||
|
||||
// Try to parse the sections
|
||||
for (int i = 0; i < directory.Sections.Length; i++)
|
||||
{
|
||||
case 4: return ParseDirectory4Header(data);
|
||||
case 5: return ParseDirectory5Header(data);
|
||||
case 6: return ParseDirectory5Header(data);
|
||||
case 7: return ParseDirectory7Header(data);
|
||||
default: return null;
|
||||
directory.Sections[i] = ParseSection5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Folders
|
||||
|
||||
// Get and adjust the folders offset
|
||||
long folderOffset = directoryHeader.FolderOffset;
|
||||
folderOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (folderOffset < 0 || folderOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the folders
|
||||
data.Seek(folderOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the folders array
|
||||
directory.Folders = new Folder5[directoryHeader.FolderCount];
|
||||
|
||||
// Try to parse the folders
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
directory.Folders[i] = ParseFolder5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Files
|
||||
|
||||
// Get and adjust the files offset
|
||||
long fileOffset = directoryHeader.FileOffset;
|
||||
fileOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (fileOffset < 0 || fileOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the files
|
||||
data.Seek(fileOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the files array
|
||||
directory.Files = new File4[directoryHeader.FileCount];
|
||||
|
||||
// Try to parse the files
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
directory.Files[i] = ParseFile4(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region String Table
|
||||
|
||||
// Get and adjust the string table offset
|
||||
long stringTableOffset = directoryHeader.StringTableOffset;
|
||||
stringTableOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (stringTableOffset < 0 || stringTableOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the string table
|
||||
data.Seek(stringTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// TODO: Are these strings actually indexed by number and not position?
|
||||
// TODO: If indexed by position, I think it needs to be adjusted by start of table
|
||||
|
||||
// Create the strings dictionary
|
||||
directory.StringTable = new Dictionary<long, string?>((int)directoryHeader.StringTableCount);
|
||||
|
||||
// Get the current position to adjust the offsets
|
||||
long stringTableStart = data.Position;
|
||||
|
||||
// Try to parse the strings
|
||||
for (int i = 0; i < directoryHeader.StringTableCount; i++)
|
||||
{
|
||||
long currentPosition = data.Position - stringTableStart;
|
||||
directory.StringTable[currentPosition] = data.ReadNullTerminatedAnsiString();
|
||||
}
|
||||
|
||||
// Loop through all folders to assign names
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
var folder = directory.Folders[i];
|
||||
if (folder == null)
|
||||
continue;
|
||||
|
||||
folder.Name = directory.StringTable[folder.NameOffset];
|
||||
}
|
||||
|
||||
// Loop through all files to assign names
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
var file = directory.Files[i];
|
||||
if (file == null)
|
||||
continue;
|
||||
|
||||
file.Name = directory.StringTable[file.NameOffset];
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return directory;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SGA directory on success, null on error</returns>
|
||||
private static Directory6? ParseDirectory6(Stream data)
|
||||
{
|
||||
var directory = new Directory6();
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
|
||||
#region Directory Header
|
||||
|
||||
// Try to parse the directory header
|
||||
var directoryHeader = ParseDirectory5Header(data);
|
||||
if (directoryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the directory header
|
||||
directory.DirectoryHeader = directoryHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sections
|
||||
|
||||
// Get and adjust the sections offset
|
||||
long sectionOffset = directoryHeader.SectionOffset;
|
||||
sectionOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (sectionOffset < 0 || sectionOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the sections
|
||||
data.Seek(sectionOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the sections array
|
||||
directory.Sections = new Section5[directoryHeader.SectionCount];
|
||||
|
||||
// Try to parse the sections
|
||||
for (int i = 0; i < directory.Sections.Length; i++)
|
||||
{
|
||||
directory.Sections[i] = ParseSection5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Folders
|
||||
|
||||
// Get and adjust the folders offset
|
||||
long folderOffset = directoryHeader.FolderOffset;
|
||||
folderOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (folderOffset < 0 || folderOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the folders
|
||||
data.Seek(folderOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the folders array
|
||||
directory.Folders = new Folder5[directoryHeader.FolderCount];
|
||||
|
||||
// Try to parse the folders
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
directory.Folders[i] = ParseFolder5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Files
|
||||
|
||||
// Get and adjust the files offset
|
||||
long fileOffset = directoryHeader.FileOffset;
|
||||
fileOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (fileOffset < 0 || fileOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the files
|
||||
data.Seek(fileOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the files array
|
||||
directory.Files = new File6[directoryHeader.FileCount];
|
||||
|
||||
// Try to parse the files
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
directory.Files[i] = ParseFile6(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region String Table
|
||||
|
||||
// Get and adjust the string table offset
|
||||
long stringTableOffset = directoryHeader.StringTableOffset;
|
||||
stringTableOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (stringTableOffset < 0 || stringTableOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the string table
|
||||
data.Seek(stringTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// TODO: Are these strings actually indexed by number and not position?
|
||||
// TODO: If indexed by position, I think it needs to be adjusted by start of table
|
||||
|
||||
// Create the strings dictionary
|
||||
directory.StringTable = new Dictionary<long, string?>((int)directoryHeader.StringTableCount);
|
||||
|
||||
// Get the current position to adjust the offsets
|
||||
long stringTableStart = data.Position;
|
||||
|
||||
// Try to parse the strings
|
||||
for (int i = 0; i < directoryHeader.StringTableCount; i++)
|
||||
{
|
||||
long currentPosition = data.Position - stringTableStart;
|
||||
directory.StringTable[currentPosition] = data.ReadNullTerminatedAnsiString();
|
||||
}
|
||||
|
||||
// Loop through all folders to assign names
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
var folder = directory.Folders[i];
|
||||
if (folder == null)
|
||||
continue;
|
||||
|
||||
folder.Name = directory.StringTable[folder.NameOffset];
|
||||
}
|
||||
|
||||
// Loop through all files to assign names
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
var file = directory.Files[i];
|
||||
if (file == null)
|
||||
continue;
|
||||
|
||||
file.Name = directory.StringTable[file.NameOffset];
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return directory;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SGA directory on success, null on error</returns>
|
||||
private static Directory7? ParseDirectory7(Stream data)
|
||||
{
|
||||
var directory = new Directory7();
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
|
||||
#region Directory Header
|
||||
|
||||
// Try to parse the directory header
|
||||
var directoryHeader = ParseDirectory7Header(data);
|
||||
if (directoryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the directory header
|
||||
directory.DirectoryHeader = directoryHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sections
|
||||
|
||||
// Get and adjust the sections offset
|
||||
long sectionOffset = directoryHeader.SectionOffset;
|
||||
sectionOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (sectionOffset < 0 || sectionOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the sections
|
||||
data.Seek(sectionOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the sections array
|
||||
directory.Sections = new Section5[directoryHeader.SectionCount];
|
||||
|
||||
// Try to parse the sections
|
||||
for (int i = 0; i < directory.Sections.Length; i++)
|
||||
{
|
||||
directory.Sections[i] = ParseSection5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Folders
|
||||
|
||||
// Get and adjust the folders offset
|
||||
long folderOffset = directoryHeader.FolderOffset;
|
||||
folderOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (folderOffset < 0 || folderOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the folders
|
||||
data.Seek(folderOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the folders array
|
||||
directory.Folders = new Folder5[directoryHeader.FolderCount];
|
||||
|
||||
// Try to parse the folders
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
directory.Folders[i] = ParseFolder5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Files
|
||||
|
||||
// Get and adjust the files offset
|
||||
long fileOffset = directoryHeader.FileOffset;
|
||||
fileOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (fileOffset < 0 || fileOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the files
|
||||
data.Seek(fileOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the files array
|
||||
directory.Files = new File7[directoryHeader.FileCount];
|
||||
|
||||
// Try to parse the files
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
directory.Files[i] = ParseFile7(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region String Table
|
||||
|
||||
// Get and adjust the string table offset
|
||||
long stringTableOffset = directoryHeader.StringTableOffset;
|
||||
stringTableOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (stringTableOffset < 0 || stringTableOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the string table
|
||||
data.Seek(stringTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// TODO: Are these strings actually indexed by number and not position?
|
||||
// TODO: If indexed by position, I think it needs to be adjusted by start of table
|
||||
|
||||
// Create the strings dictionary
|
||||
directory.StringTable = new Dictionary<long, string?>((int)directoryHeader.StringTableCount);
|
||||
|
||||
// Get the current position to adjust the offsets
|
||||
long stringTableStart = data.Position;
|
||||
|
||||
// Try to parse the strings
|
||||
for (int i = 0; i < directoryHeader.StringTableCount; i++)
|
||||
{
|
||||
long currentPosition = data.Position - stringTableStart;
|
||||
directory.StringTable[currentPosition] = data.ReadNullTerminatedAnsiString();
|
||||
}
|
||||
|
||||
// Loop through all folders to assign names
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
var folder = directory.Folders[i];
|
||||
if (folder == null)
|
||||
continue;
|
||||
|
||||
folder.Name = directory.StringTable[folder.NameOffset];
|
||||
}
|
||||
|
||||
// Loop through all files to assign names
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
var file = directory.Files[i];
|
||||
if (file == null)
|
||||
continue;
|
||||
|
||||
file.Name = directory.StringTable[file.NameOffset];
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return directory;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -508,7 +742,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA directory header version 4 on success, null on error</returns>
|
||||
private static DirectoryHeader4 ParseDirectory4Header(Stream data)
|
||||
{
|
||||
DirectoryHeader4 directoryHeader4 = new DirectoryHeader4();
|
||||
var directoryHeader4 = new DirectoryHeader4();
|
||||
|
||||
directoryHeader4.SectionOffset = data.ReadUInt32();
|
||||
directoryHeader4.SectionCount = data.ReadUInt16();
|
||||
@@ -529,7 +763,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA directory header version 5 on success, null on error</returns>
|
||||
private static DirectoryHeader5 ParseDirectory5Header(Stream data)
|
||||
{
|
||||
DirectoryHeader5 directoryHeader5 = new DirectoryHeader5();
|
||||
var directoryHeader5 = new DirectoryHeader5();
|
||||
|
||||
directoryHeader5.SectionOffset = data.ReadUInt32();
|
||||
directoryHeader5.SectionCount = data.ReadUInt32();
|
||||
@@ -550,7 +784,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA directory header version 7 on success, null on error</returns>
|
||||
private static DirectoryHeader7 ParseDirectory7Header(Stream data)
|
||||
{
|
||||
DirectoryHeader7 directoryHeader7 = new DirectoryHeader7();
|
||||
var directoryHeader7 = new DirectoryHeader7();
|
||||
|
||||
directoryHeader7.SectionOffset = data.ReadUInt32();
|
||||
directoryHeader7.SectionCount = data.ReadUInt32();
|
||||
@@ -574,7 +808,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA section version 4 on success, null on error</returns>
|
||||
private static Section4 ParseSection4(Stream data)
|
||||
{
|
||||
Section4 section4 = new Section4();
|
||||
var section4 = new Section4();
|
||||
|
||||
byte[]? section4Alias = data.ReadBytes(64);
|
||||
if (section4Alias != null)
|
||||
@@ -599,7 +833,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA section version 5 on success, null on error</returns>
|
||||
private static Section5 ParseSection5(Stream data)
|
||||
{
|
||||
Section5 section5 = new Section5();
|
||||
var section5 = new Section5();
|
||||
|
||||
byte[]? section5Alias = data.ReadBytes(64);
|
||||
if (section5Alias != null)
|
||||
@@ -624,7 +858,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA folder version 4 on success, null on error</returns>
|
||||
private static Folder4 ParseFolder4(Stream data)
|
||||
{
|
||||
Folder4 folder4 = new Folder4();
|
||||
var folder4 = new Folder4();
|
||||
|
||||
folder4.NameOffset = data.ReadUInt32();
|
||||
folder4.Name = null; // Read from string table
|
||||
@@ -644,7 +878,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA folder version 5 on success, null on error</returns>
|
||||
private static Folder5 ParseFolder5(Stream data)
|
||||
{
|
||||
Folder5 folder5 = new Folder5();
|
||||
var folder5 = new Folder5();
|
||||
|
||||
folder5.NameOffset = data.ReadUInt32();
|
||||
folder5.Name = null; // Read from string table
|
||||
@@ -664,7 +898,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA file version 4 on success, null on error</returns>
|
||||
private static File4 ParseFile4(Stream data)
|
||||
{
|
||||
File4 file4 = new File4();
|
||||
var file4 = new File4();
|
||||
|
||||
file4.NameOffset = data.ReadUInt32();
|
||||
file4.Name = null; // Read from string table
|
||||
@@ -686,7 +920,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA file version 6 on success, null on error</returns>
|
||||
private static File6 ParseFile6(Stream data)
|
||||
{
|
||||
File6 file6 = new File6();
|
||||
var file6 = new File6();
|
||||
|
||||
file6.NameOffset = data.ReadUInt32();
|
||||
file6.Name = null; // Read from string table
|
||||
@@ -709,7 +943,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA file version 7 on success, null on error</returns>
|
||||
private static File7 ParseFile7(Stream data)
|
||||
{
|
||||
File7 file7 = new File7();
|
||||
var file7 = new File7();
|
||||
|
||||
file7.NameOffset = data.ReadUInt32();
|
||||
file7.Name = null; // Read from string table
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -40,7 +40,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
#region Extended Header
|
||||
|
||||
if (header?.Version == 2)
|
||||
if (header.Version == 2)
|
||||
{
|
||||
// Try to parse the extended header
|
||||
var extendedHeader = ParseExtendedHeader(data);
|
||||
@@ -69,8 +69,8 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
if (header?.Version == 2
|
||||
&& file.ExtendedHeader != null
|
||||
&& file.ExtendedHeader.ArchiveHashLength > 0
|
||||
&& data.Position + file.ExtendedHeader.ArchiveHashLength <= data.Length)
|
||||
&& file.ExtendedHeader.ArchiveMD5SectionSize > 0
|
||||
&& data.Position + file.ExtendedHeader.ArchiveMD5SectionSize <= data.Length)
|
||||
{
|
||||
// Create the archive hashes list
|
||||
var archiveHashes = new List<ArchiveHash>();
|
||||
@@ -79,7 +79,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
initialOffset = data.Position;
|
||||
|
||||
// Try to parse the directory items
|
||||
while (data.Position < initialOffset + file.ExtendedHeader.ArchiveHashLength)
|
||||
while (data.Position < initialOffset + file.ExtendedHeader.ArchiveMD5SectionSize)
|
||||
{
|
||||
var archiveHash = ParseArchiveHash(data);
|
||||
if (archiveHash == null)
|
||||
|
||||
@@ -1,226 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.WAD;
|
||||
using static SabreTools.Models.WAD.Constants;
|
||||
|
||||
namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
public class WAD : BaseBinaryDeserializer<Models.WAD.File>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public override Models.WAD.File? Deserialize(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new Half-Life Texture Package to fill
|
||||
var file = new Models.WAD.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the package header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Lumps
|
||||
|
||||
// Get the lump offset
|
||||
uint lumpOffset = header.LumpOffset;
|
||||
if (lumpOffset < 0 || lumpOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the lump offset
|
||||
data.Seek(lumpOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the lump array
|
||||
file.Lumps = new Lump[header.LumpCount];
|
||||
for (int i = 0; i < header.LumpCount; i++)
|
||||
{
|
||||
var lump = ParseLump(data);
|
||||
if (lump == null)
|
||||
return null;
|
||||
|
||||
file.Lumps[i] = lump;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Lump Infos
|
||||
|
||||
// Create the lump info array
|
||||
file.LumpInfos = new LumpInfo?[header.LumpCount];
|
||||
for (int i = 0; i < header.LumpCount; i++)
|
||||
{
|
||||
var lump = file.Lumps[i];
|
||||
if (lump == null)
|
||||
{
|
||||
file.LumpInfos[i] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (lump.Compression != 0)
|
||||
{
|
||||
file.LumpInfos[i] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the lump info offset
|
||||
uint lumpInfoOffset = lump.Offset;
|
||||
if (lumpInfoOffset < 0 || lumpInfoOffset >= data.Length)
|
||||
{
|
||||
file.LumpInfos[i] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Seek to the lump info offset
|
||||
data.Seek(lumpInfoOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the lump info -- TODO: Do we ever set the mipmap level?
|
||||
var lumpInfo = ParseLumpInfo(data, lump.Type);
|
||||
file.LumpInfos[i] = lumpInfo;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package header on success, null on error</returns>
|
||||
private static Header? ParseHeader(Stream data)
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package lump
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package lump on success, null on error</returns>
|
||||
private static Lump? ParseLump(Stream data)
|
||||
{
|
||||
return data.ReadType<Lump>();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package lump info
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="type">Lump type</param>
|
||||
/// <param name="mipmap">Mipmap level</param>
|
||||
/// <returns>Filled Half-Life Texture Package lump info on success, null on error</returns>
|
||||
private static LumpInfo? ParseLumpInfo(Stream data, byte type, uint mipmap = 0)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
LumpInfo lumpInfo = new LumpInfo();
|
||||
|
||||
// Cache the initial offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Type 0x42 has no name, type 0x43 does. Are these flags?
|
||||
if (type == 0x42)
|
||||
{
|
||||
if (mipmap > 0)
|
||||
return null;
|
||||
|
||||
lumpInfo.Width = data.ReadUInt32();
|
||||
lumpInfo.Height = data.ReadUInt32();
|
||||
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
|
||||
lumpInfo.PaletteSize = data.ReadUInt16();
|
||||
}
|
||||
else if (type == 0x43)
|
||||
{
|
||||
if (mipmap > 3)
|
||||
return null;
|
||||
|
||||
byte[]? name = data.ReadBytes(16);
|
||||
if (name != null)
|
||||
lumpInfo.Name = Encoding.ASCII.GetString(name);
|
||||
lumpInfo.Width = data.ReadUInt32();
|
||||
lumpInfo.Height = data.ReadUInt32();
|
||||
lumpInfo.PixelOffset = data.ReadUInt32();
|
||||
lumpInfo.UnknownData = data.ReadBytes(12);
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
|
||||
// Seek to the pixel data
|
||||
data.Seek(initialOffset + lumpInfo.PixelOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the pixel data
|
||||
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
|
||||
|
||||
// Seek back to the offset
|
||||
data.Seek(currentOffset, SeekOrigin.Begin);
|
||||
|
||||
uint pixelSize = lumpInfo.Width * lumpInfo.Height;
|
||||
|
||||
// Mipmap data -- TODO: How do we determine this during initial parsing?
|
||||
switch (mipmap)
|
||||
{
|
||||
case 1: _ = data.ReadBytes((int)pixelSize); break;
|
||||
case 2: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4))); break;
|
||||
case 3: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16))); break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
_ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16) + (pixelSize / 64))); // Pixel data
|
||||
lumpInfo.PaletteSize = data.ReadUInt16();
|
||||
lumpInfo.PaletteData = data.ReadBytes((int)lumpInfo.PaletteSize * 3);
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Adjust based on mipmap level
|
||||
switch (mipmap)
|
||||
{
|
||||
case 1:
|
||||
lumpInfo.Width /= 2;
|
||||
lumpInfo.Height /= 2;
|
||||
break;
|
||||
|
||||
case 2:
|
||||
lumpInfo.Width /= 4;
|
||||
lumpInfo.Height /= 4;
|
||||
break;
|
||||
|
||||
case 3:
|
||||
lumpInfo.Width /= 8;
|
||||
lumpInfo.Height /= 8;
|
||||
break;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
return lumpInfo;
|
||||
}
|
||||
}
|
||||
}
|
||||
263
SabreTools.Serialization/Deserializers/WAD3.cs
Normal file
263
SabreTools.Serialization/Deserializers/WAD3.cs
Normal file
@@ -0,0 +1,263 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.WAD3;
|
||||
using static SabreTools.Models.WAD3.Constants;
|
||||
|
||||
namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
public class WAD3 : BaseBinaryDeserializer<Models.WAD3.File>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public override Models.WAD3.File? Deserialize(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a new Half-Life Texture Package to fill
|
||||
var file = new Models.WAD3.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the package header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Entries
|
||||
|
||||
// Get the directory offset
|
||||
uint dirOffset = header.DirOffset;
|
||||
if (dirOffset < 0 || dirOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the lump offset
|
||||
data.Seek(dirOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the lump array
|
||||
file.DirEntries = new DirEntry[header.NumDirs];
|
||||
for (int i = 0; i < header.NumDirs; i++)
|
||||
{
|
||||
var lump = ParseDirEntry(data);
|
||||
if (lump == null)
|
||||
return null;
|
||||
|
||||
file.DirEntries[i] = lump;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region File Entries
|
||||
|
||||
// Create the file entry array
|
||||
file.FileEntries = new FileEntry?[header.NumDirs];
|
||||
for (int i = 0; i < header.NumDirs; i++)
|
||||
{
|
||||
var dirEntry = file.DirEntries[i];
|
||||
if (dirEntry == null)
|
||||
continue;
|
||||
|
||||
// TODO: Handle compressed entries
|
||||
if (dirEntry.Compression != 0)
|
||||
continue;
|
||||
|
||||
// Get the file entry offset
|
||||
uint fileEntryOffset = dirEntry.Offset;
|
||||
if (fileEntryOffset < 0 || fileEntryOffset >= data.Length)
|
||||
continue;
|
||||
|
||||
// Seek to the file entry offset
|
||||
data.Seek(fileEntryOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the file entry
|
||||
var fileEntry = ParseFileEntry(data, dirEntry.Type);
|
||||
if (fileEntry != null)
|
||||
file.FileEntries[i] = fileEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package header on success, null on error</returns>
|
||||
private static Header? ParseHeader(Stream data)
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package directory entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package directory entry on success, null on error</returns>
|
||||
private static DirEntry? ParseDirEntry(Stream data)
|
||||
{
|
||||
return data.ReadType<DirEntry>();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package file entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="type">File entry type</param>
|
||||
/// <returns>Filled Half-Life Texture Package file entry on success, null on error</returns>
|
||||
private static FileEntry? ParseFileEntry(Stream data, FileType type)
|
||||
{
|
||||
return type switch
|
||||
{
|
||||
FileType.Spraydecal
|
||||
or FileType.Miptex => ParseMipTex(data),
|
||||
FileType.Qpic => ParseQpicImage(data),
|
||||
FileType.Font => ParseFont(data),
|
||||
_ => null,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package MipTex
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package MipTex on success, null on error</returns>
|
||||
private static MipTex ParseMipTex(Stream data)
|
||||
{
|
||||
var miptex = new MipTex();
|
||||
|
||||
byte[] nameBytes = data.ReadBytes(16);
|
||||
miptex.Name = Encoding.ASCII.GetString(nameBytes).TrimEnd('\0');
|
||||
miptex.Width = data.ReadUInt32();
|
||||
miptex.Height = data.ReadUInt32();
|
||||
miptex.MipOffsets = new uint[4];
|
||||
for (int i = 0; i < miptex.MipOffsets.Length; i++)
|
||||
{
|
||||
miptex.MipOffsets[i] = data.ReadUInt32();
|
||||
}
|
||||
miptex.MipImages = new MipMap[4];
|
||||
for (int i = 0; i < miptex.MipImages.Length; i++)
|
||||
{
|
||||
miptex.MipImages[i] = ParseMipMap(data, miptex.Width, miptex.Height);
|
||||
}
|
||||
miptex.ColorsUsed = data.ReadUInt16();
|
||||
miptex.Palette = new byte[miptex.ColorsUsed][];
|
||||
for (int i = 0; i < miptex.ColorsUsed; i++)
|
||||
{
|
||||
miptex.Palette[i] = data.ReadBytes(3);
|
||||
}
|
||||
|
||||
return miptex;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package MipMap
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package MipMap on success, null on error</returns>
|
||||
private static MipMap ParseMipMap(Stream data, uint width, uint height)
|
||||
{
|
||||
var mipmap = new MipMap();
|
||||
|
||||
mipmap.Data = new byte[width][];
|
||||
for (int i = 0; i < width; i++)
|
||||
{
|
||||
mipmap.Data[i] = data.ReadBytes((int)height);
|
||||
}
|
||||
|
||||
return mipmap;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package Qpic image
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package Qpic image on success, null on error</returns>
|
||||
private static QpicImage ParseQpicImage(Stream data)
|
||||
{
|
||||
var qpic = new QpicImage();
|
||||
|
||||
qpic.Width = data.ReadUInt32();
|
||||
qpic.Height = data.ReadUInt32();
|
||||
qpic.Data = new byte[qpic.Height][];
|
||||
for (int i = 0; i < qpic.Height; i++)
|
||||
{
|
||||
qpic.Data[i] = data.ReadBytes((int)qpic.Width);
|
||||
}
|
||||
qpic.ColorsUsed = data.ReadUInt16();
|
||||
qpic.Palette = new byte[qpic.ColorsUsed][];
|
||||
for (int i = 0; i < qpic.ColorsUsed; i++)
|
||||
{
|
||||
qpic.Palette[i] = data.ReadBytes(3);
|
||||
}
|
||||
|
||||
return qpic;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package font
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package font on success, null on error</returns>
|
||||
private static Font ParseFont(Stream data)
|
||||
{
|
||||
var font = new Font();
|
||||
|
||||
font.Width = data.ReadUInt32();
|
||||
font.Height = data.ReadUInt32();
|
||||
font.RowCount = data.ReadUInt32();
|
||||
font.RowHeight = data.ReadUInt32();
|
||||
font.FontInfo = new CharInfo[256];
|
||||
for (int i = 0; i < font.FontInfo.Length; i++)
|
||||
{
|
||||
font.FontInfo[i] = ParseCharInfo(data);
|
||||
}
|
||||
font.Data = new byte[font.Height][];
|
||||
for (int i = 0; i < font.Height; i++)
|
||||
{
|
||||
font.Data[i] = data.ReadBytes((int)font.Width);
|
||||
}
|
||||
font.ColorsUsed = data.ReadUInt16();
|
||||
font.Palette = new byte[font.ColorsUsed][];
|
||||
for (int i = 0; i < font.ColorsUsed; i++)
|
||||
{
|
||||
font.Palette[i] = data.ReadBytes(3);
|
||||
}
|
||||
|
||||
return font;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package CharInfo
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package CharInfo on success, null on error</returns>
|
||||
private static CharInfo ParseCharInfo(Stream data)
|
||||
{
|
||||
var charinfo = new CharInfo();
|
||||
|
||||
charinfo.StartOffset = data.ReadUInt16();
|
||||
charinfo.CharWidth = data.ReadUInt16();
|
||||
|
||||
return charinfo;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.XZP;
|
||||
using static SabreTools.Models.XZP.Constants;
|
||||
@@ -19,9 +18,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new XBox Package File to fill
|
||||
var file = new Models.XZP.File();
|
||||
|
||||
@@ -43,11 +39,11 @@ namespace SabreTools.Serialization.Deserializers
|
||||
file.DirectoryEntries = new DirectoryEntry[header.DirectoryEntryCount];
|
||||
|
||||
// Try to parse the directory entries
|
||||
for (int i = 0; i < header.DirectoryEntryCount; i++)
|
||||
for (int i = 0; i < file.DirectoryEntries.Length; i++)
|
||||
{
|
||||
var directoryEntry = ParseDirectoryEntry(data);
|
||||
if (directoryEntry == null)
|
||||
return null;
|
||||
continue;
|
||||
|
||||
file.DirectoryEntries[i] = directoryEntry;
|
||||
}
|
||||
@@ -62,11 +58,11 @@ namespace SabreTools.Serialization.Deserializers
|
||||
file.PreloadDirectoryEntries = new DirectoryEntry[header.PreloadDirectoryEntryCount];
|
||||
|
||||
// Try to parse the preload directory entries
|
||||
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
|
||||
for (int i = 0; i < file.PreloadDirectoryEntries.Length; i++)
|
||||
{
|
||||
var directoryEntry = ParseDirectoryEntry(data);
|
||||
if (directoryEntry == null)
|
||||
return null;
|
||||
continue;
|
||||
|
||||
file.PreloadDirectoryEntries[i] = directoryEntry;
|
||||
}
|
||||
@@ -82,11 +78,11 @@ namespace SabreTools.Serialization.Deserializers
|
||||
file.PreloadDirectoryMappings = new DirectoryMapping[header.PreloadDirectoryEntryCount];
|
||||
|
||||
// Try to parse the preload directory mappings
|
||||
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
|
||||
for (int i = 0; i < file.PreloadDirectoryMappings.Length; i++)
|
||||
{
|
||||
var directoryMapping = ParseDirectoryMapping(data);
|
||||
if (directoryMapping == null)
|
||||
return null;
|
||||
continue;
|
||||
|
||||
file.PreloadDirectoryMappings[i] = directoryMapping;
|
||||
}
|
||||
@@ -110,7 +106,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
file.DirectoryItems = new DirectoryItem[header.DirectoryItemCount];
|
||||
|
||||
// Try to parse the directory items
|
||||
for (int i = 0; i < header.DirectoryItemCount; i++)
|
||||
for (int i = 0; i < file.DirectoryItems.Length; i++)
|
||||
{
|
||||
var directoryItem = ParseDirectoryItem(data);
|
||||
file.DirectoryItems[i] = directoryItem;
|
||||
@@ -146,9 +142,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != HeaderSignatureString)
|
||||
if (header?.Signature != HeaderSignatureString)
|
||||
return null;
|
||||
if (header.Version != 6)
|
||||
return null;
|
||||
@@ -214,9 +208,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var footer = data.ReadType<Footer>();
|
||||
|
||||
if (footer == null)
|
||||
return null;
|
||||
if (footer.Signature != FooterSignatureString)
|
||||
if (footer?.Signature != FooterSignatureString)
|
||||
return null;
|
||||
|
||||
return footer;
|
||||
|
||||
9
SabreTools.Serialization/ExtensionAttribute.cs
Normal file
9
SabreTools.Serialization/ExtensionAttribute.cs
Normal file
@@ -0,0 +1,9 @@
|
||||
#if NET20
|
||||
|
||||
namespace System.Runtime.CompilerServices
|
||||
{
|
||||
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Method)]
|
||||
internal sealed class ExtensionAttribute : Attribute {}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -63,7 +63,7 @@ namespace SabreTools.Serialization
|
||||
Wrapper.SGA item => item.PrettyPrint(),
|
||||
Wrapper.VBSP item => item.PrettyPrint(),
|
||||
Wrapper.VPK item => item.PrettyPrint(),
|
||||
Wrapper.WAD item => item.PrettyPrint(),
|
||||
Wrapper.WAD3 item => item.PrettyPrint(),
|
||||
Wrapper.XeMID item => item.PrettyPrint(),
|
||||
Wrapper.XMID item => item.PrettyPrint(),
|
||||
Wrapper.XZP item => item.PrettyPrint(),
|
||||
@@ -108,7 +108,7 @@ namespace SabreTools.Serialization
|
||||
Wrapper.SGA item => item.ExportJSON(),
|
||||
Wrapper.VBSP item => item.ExportJSON(),
|
||||
Wrapper.VPK item => item.ExportJSON(),
|
||||
Wrapper.WAD item => item.ExportJSON(),
|
||||
Wrapper.WAD3 item => item.ExportJSON(),
|
||||
Wrapper.XeMID item => item.ExportJSON(),
|
||||
Wrapper.XMID item => item.ExportJSON(),
|
||||
Wrapper.XZP item => item.ExportJSON(),
|
||||
@@ -328,7 +328,7 @@ namespace SabreTools.Serialization
|
||||
PIC.Print(builder, item.Model);
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
@@ -412,10 +412,10 @@ namespace SabreTools.Serialization
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
private static StringBuilder PrettyPrint(this Wrapper.WAD item)
|
||||
private static StringBuilder PrettyPrint(this Wrapper.WAD3 item)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
WAD.Print(builder, item.Model);
|
||||
WAD3.Print(builder, item.Model);
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine(svm.Day, "Day");
|
||||
builder.AppendLine(svm.Unknown2, "Unknown 2");
|
||||
builder.AppendLine(svm.Length, "Length");
|
||||
//builder.AppendLine(svm.Data, "Data");
|
||||
builder.AppendLine(svm.Length, "Data skipped...");
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +1,26 @@
|
||||
using System.Text;
|
||||
using SabreTools.Models.BSP;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
using static SabreTools.Models.BSP.Constants;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
public class BSP : IPrinter<File>
|
||||
public class BSP : IPrinter<BspFile>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public void PrintInformation(StringBuilder builder, File model)
|
||||
public void PrintInformation(StringBuilder builder, BspFile model)
|
||||
=> Print(builder, model);
|
||||
|
||||
public static void Print(StringBuilder builder, File file)
|
||||
public static void Print(StringBuilder builder, BspFile file)
|
||||
{
|
||||
builder.AppendLine("BSP Information:");
|
||||
builder.AppendLine("-------------------------");
|
||||
builder.AppendLine();
|
||||
|
||||
Print(builder, file.Header);
|
||||
Print(builder, file.Lumps);
|
||||
Print(builder, file.TextureHeader);
|
||||
Print(builder, file.Textures);
|
||||
PrintLumps(builder, file);
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Header? header)
|
||||
private static void Print(StringBuilder builder, BspHeader? header)
|
||||
{
|
||||
builder.AppendLine(" Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
@@ -38,30 +35,21 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Lump?[]? lumps)
|
||||
private static void PrintLumps(StringBuilder builder, BspFile? model)
|
||||
{
|
||||
builder.AppendLine(" Lumps Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (lumps == null || lumps.Length == 0)
|
||||
if (model?.Header?.Lumps == null || model.Header.Lumps.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No lumps");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < lumps.Length; i++)
|
||||
for (int i = 0; i < model.Header.Lumps.Length; i++)
|
||||
{
|
||||
var lump = lumps[i];
|
||||
string specialLumpName = string.Empty;
|
||||
switch (i)
|
||||
{
|
||||
case HL_BSP_LUMP_ENTITIES:
|
||||
specialLumpName = " (entities)";
|
||||
break;
|
||||
case HL_BSP_LUMP_TEXTUREDATA:
|
||||
specialLumpName = " (texture data)";
|
||||
break;
|
||||
}
|
||||
var lump = model.Header.Lumps[i];
|
||||
string specialLumpName = GetLumpName(i);
|
||||
|
||||
builder.AppendLine($" Lump {i}{specialLumpName}");
|
||||
if (lump == null)
|
||||
@@ -72,79 +60,360 @@ namespace SabreTools.Serialization.Printers
|
||||
|
||||
builder.AppendLine(lump.Offset, " Offset");
|
||||
builder.AppendLine(lump.Length, " Length");
|
||||
switch ((LumpType)i)
|
||||
{
|
||||
case LumpType.LUMP_ENTITIES:
|
||||
Print(builder, model.Entities);
|
||||
break;
|
||||
case LumpType.LUMP_PLANES:
|
||||
Print(builder, model.PlanesLump);
|
||||
break;
|
||||
case LumpType.LUMP_TEXTURES:
|
||||
Print(builder, model.TextureLump);
|
||||
break;
|
||||
case LumpType.LUMP_VERTICES:
|
||||
Print(builder, model.VerticesLump);
|
||||
break;
|
||||
case LumpType.LUMP_VISIBILITY:
|
||||
Print(builder, model.VisibilityLump);
|
||||
break;
|
||||
case LumpType.LUMP_NODES:
|
||||
Print(builder, model.NodesLump);
|
||||
break;
|
||||
case LumpType.LUMP_TEXINFO:
|
||||
Print(builder, model.TexinfoLump);
|
||||
break;
|
||||
case LumpType.LUMP_FACES:
|
||||
Print(builder, model.FacesLump);
|
||||
break;
|
||||
case LumpType.LUMP_LIGHTING:
|
||||
Print(builder, model.LightmapLump);
|
||||
break;
|
||||
case LumpType.LUMP_CLIPNODES:
|
||||
Print(builder, model.ClipnodesLump);
|
||||
break;
|
||||
case LumpType.LUMP_LEAVES:
|
||||
Print(builder, model.LeavesLump);
|
||||
break;
|
||||
case LumpType.LUMP_MARKSURFACES:
|
||||
Print(builder, model.MarksurfacesLump);
|
||||
break;
|
||||
case LumpType.LUMP_EDGES:
|
||||
Print(builder, model.EdgesLump);
|
||||
break;
|
||||
case LumpType.LUMP_SURFEDGES:
|
||||
Print(builder, model.SurfedgesLump);
|
||||
break;
|
||||
case LumpType.LUMP_MODELS:
|
||||
Print(builder, model.ModelsLump);
|
||||
break;
|
||||
default:
|
||||
builder.AppendLine($" Unsupported lump type: {(LumpType)i} (0x{i:X4})");
|
||||
break;
|
||||
}
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, TextureHeader? header)
|
||||
private static string GetLumpName(int i)
|
||||
{
|
||||
builder.AppendLine(" Texture Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
return (LumpType)i switch
|
||||
{
|
||||
builder.AppendLine(" No texture header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.TextureCount, " Texture count");
|
||||
builder.AppendLine(" Offsets:");
|
||||
if (header.Offsets == null || header.Offsets.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No offsets");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < header.Offsets.Length; i++)
|
||||
{
|
||||
builder.AppendLine(header.Offsets[i], $" Offset {i}");
|
||||
}
|
||||
builder.AppendLine();
|
||||
LumpType.LUMP_ENTITIES => " - LUMP_ENTITIES",
|
||||
LumpType.LUMP_PLANES => " - LUMP_PLANES",
|
||||
LumpType.LUMP_TEXTURES => " - LUMP_TEXTURES",
|
||||
LumpType.LUMP_VERTICES => " - LUMP_VERTICES",
|
||||
LumpType.LUMP_VISIBILITY => " - LUMP_VISIBILITY",
|
||||
LumpType.LUMP_NODES => " - LUMP_NODES",
|
||||
LumpType.LUMP_TEXINFO => " - LUMP_TEXINFO",
|
||||
LumpType.LUMP_FACES => " - LUMP_FACES",
|
||||
LumpType.LUMP_LIGHTING => " - LUMP_LIGHTING",
|
||||
LumpType.LUMP_CLIPNODES => " - LUMP_CLIPNODES",
|
||||
LumpType.LUMP_LEAVES => " - LUMP_LEAVES",
|
||||
LumpType.LUMP_MARKSURFACES => " - LUMP_MARKSURFACES",
|
||||
LumpType.LUMP_EDGES => " - LUMP_EDGES",
|
||||
LumpType.LUMP_SURFEDGES => " - LUMP_SURFEDGES",
|
||||
LumpType.LUMP_MODELS => " - LUMP_MODELS",
|
||||
_ => string.Empty,
|
||||
};
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Texture?[]? textures)
|
||||
private static void Print(StringBuilder builder, EntitiesLump? lump)
|
||||
{
|
||||
builder.AppendLine(" Textures Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (textures == null || textures.Length == 0)
|
||||
if (lump?.Entities == null || lump.Entities.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No textures");
|
||||
builder.AppendLine();
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < textures.Length; i++)
|
||||
for (int j = 0; j < lump.Entities.Length; j++)
|
||||
{
|
||||
var texture = textures[i];
|
||||
builder.AppendLine($" Texture {i}");
|
||||
if (texture == null)
|
||||
{
|
||||
builder.AppendLine(" [NULL]");
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.AppendLine(texture.Name, " Name");
|
||||
builder.AppendLine(texture.Width, " Width");
|
||||
builder.AppendLine(texture.Height, " Height");
|
||||
builder.AppendLine(" Offsets:");
|
||||
if (texture.Offsets == null || texture.Offsets.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No offsets");
|
||||
continue;
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int j = 0; j < texture.Offsets.Length; j++)
|
||||
{
|
||||
builder.AppendLine(texture.Offsets[i], $" Offset {j}");
|
||||
}
|
||||
}
|
||||
// Skip texture data
|
||||
builder.AppendLine(texture.PaletteSize, " Palette size");
|
||||
// Skip palette data
|
||||
// TODO: Implement entity printing
|
||||
var entity = lump.Entities[j];
|
||||
builder.AppendLine($" Entity {j}");
|
||||
builder.AppendLine(" Entity data is not parsed properly");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, PlanesLump? lump)
|
||||
{
|
||||
if (lump?.Planes == null || lump.Planes.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Planes.Length; j++)
|
||||
{
|
||||
var plane = lump.Planes[j];
|
||||
builder.AppendLine($" Plane {j}");
|
||||
builder.AppendLine($" Normal vector: ({plane.NormalVector.X}, {plane.NormalVector.Y}, {plane.NormalVector.Z})");
|
||||
builder.AppendLine(plane.Distance, " Distance");
|
||||
builder.AppendLine($" Plane type: {plane.PlaneType} (0x{plane.PlaneType:X})");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, TextureLump? lump)
|
||||
{
|
||||
if (lump == null)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
if (lump?.Header == null)
|
||||
{
|
||||
builder.AppendLine(" No texture header");
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(" Texture Header:");
|
||||
builder.AppendLine(lump.Header.MipTextureCount, " MipTexture count");
|
||||
builder.AppendLine(lump.Header.Offsets, " Offsets");
|
||||
}
|
||||
|
||||
if (lump?.Textures == null || lump.Textures.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No texture data");
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(" Textures:");
|
||||
for (int j = 0; j < lump.Textures.Length; j++)
|
||||
{
|
||||
var texture = lump.Textures[j];
|
||||
builder.AppendLine($" Texture {j}");
|
||||
builder.AppendLine(texture.Name, " Name");
|
||||
builder.AppendLine(texture.Width, " Width");
|
||||
builder.AppendLine(texture.Height, " Height");
|
||||
builder.AppendLine(texture.Offsets, " Offsets");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, VerticesLump? lump)
|
||||
{
|
||||
if (lump?.Vertices == null || lump.Vertices.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Vertices.Length; j++)
|
||||
{
|
||||
var vertex = lump.Vertices[j];
|
||||
builder.AppendLine($" Vertex {j}: ({vertex.X}, {vertex.Y}, {vertex.Z})");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, VisibilityLump? lump)
|
||||
{
|
||||
if (lump == null)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(lump.NumClusters, " Cluster count");
|
||||
builder.AppendLine(" Byte offsets skipped...");
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, BspNodesLump? lump)
|
||||
{
|
||||
if (lump?.Nodes == null || lump.Nodes.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Nodes.Length; j++)
|
||||
{
|
||||
var node = lump.Nodes[j];
|
||||
builder.AppendLine($" Node {j}");
|
||||
builder.AppendLine(node.Children, " Children");
|
||||
builder.AppendLine(node.Mins, " Mins");
|
||||
builder.AppendLine(node.Maxs, " Maxs");
|
||||
builder.AppendLine(node.FirstFace, " First face index");
|
||||
builder.AppendLine(node.FaceCount, " Count of faces");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, BspTexinfoLump? lump)
|
||||
{
|
||||
if (lump?.Texinfos == null || lump.Texinfos.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Texinfos.Length; j++)
|
||||
{
|
||||
var texinfo = lump.Texinfos[j];
|
||||
builder.AppendLine($" Texinfo {j}");
|
||||
builder.AppendLine($" S-Vector: ({texinfo.SVector.X}, {texinfo.SVector.Y}, {texinfo.SVector.Z})");
|
||||
builder.AppendLine(texinfo.TextureSShift, " Texture shift in S direction");
|
||||
builder.AppendLine($" T-Vector: ({texinfo.TVector.X}, {texinfo.TVector.Y}, {texinfo.TVector.Z})");
|
||||
builder.AppendLine(texinfo.TextureTShift, " Texture shift in T direction");
|
||||
builder.AppendLine(texinfo.MiptexIndex, " Miptex index");
|
||||
builder.AppendLine($" Flags: {texinfo.Flags} (0x{texinfo.Flags:X})");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, BspFacesLump? lump)
|
||||
{
|
||||
if (lump?.Faces == null || lump.Faces.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Faces.Length; j++)
|
||||
{
|
||||
var face = lump.Faces[j];
|
||||
builder.AppendLine($" Face {j}");
|
||||
builder.AppendLine(face.PlaneIndex, " Plane index");
|
||||
builder.AppendLine(face.PlaneSideCount, " Plane side count");
|
||||
builder.AppendLine(face.FirstEdgeIndex, " First surfedge index");
|
||||
builder.AppendLine(face.NumberOfEdges, " Surfedge count");
|
||||
builder.AppendLine(face.TextureInfoIndex, " Texture info index");
|
||||
builder.AppendLine(face.LightingStyles, " Lighting styles");
|
||||
builder.AppendLine(face.LightmapOffset, " Lightmap offset");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, LightmapLump? lump)
|
||||
{
|
||||
if (lump?.Lightmap == null || lump.Lightmap.Length == 0)
|
||||
builder.AppendLine(" No data");
|
||||
else
|
||||
builder.AppendLine(" Lightmap data skipped...");
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, ClipnodesLump? lump)
|
||||
{
|
||||
if (lump?.Clipnodes == null || lump.Clipnodes.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Clipnodes.Length; j++)
|
||||
{
|
||||
var clipnode = lump.Clipnodes[j];
|
||||
builder.AppendLine($" Clipnode {j}");
|
||||
builder.AppendLine(clipnode.PlaneIndex, " Plane index");
|
||||
builder.AppendLine(clipnode.ChildrenIndices, " Children indices");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, BspLeavesLump? lump)
|
||||
{
|
||||
if (lump?.Leaves == null || lump.Leaves.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Leaves.Length; j++)
|
||||
{
|
||||
var leaf = lump.Leaves[j];
|
||||
builder.AppendLine($" Leaf {j}");
|
||||
builder.AppendLine($" Contents: {leaf.Contents} (0x{leaf.Contents:X})");
|
||||
builder.AppendLine(leaf.VisOffset, " Visibility offset");
|
||||
builder.AppendLine(leaf.Mins, " Mins");
|
||||
builder.AppendLine(leaf.Maxs, " Maxs");
|
||||
builder.AppendLine(leaf.FirstMarkSurfaceIndex, " First marksurface index");
|
||||
builder.AppendLine(leaf.MarkSurfacesCount, " Marksurfaces count");
|
||||
builder.AppendLine(leaf.AmbientLevels, " Ambient sound levels");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, MarksurfacesLump? lump)
|
||||
{
|
||||
if (lump?.Marksurfaces == null || lump.Marksurfaces.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Marksurfaces.Length; j++)
|
||||
{
|
||||
var marksurface = lump.Marksurfaces[j];
|
||||
builder.AppendLine(marksurface, $" Marksurface {j}");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, EdgesLump? lump)
|
||||
{
|
||||
if (lump?.Edges == null || lump.Edges.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Edges.Length; j++)
|
||||
{
|
||||
var edge = lump.Edges[j];
|
||||
builder.AppendLine($" Edge {j}");
|
||||
builder.AppendLine(edge.VertexIndices, " Vertex indices");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, SurfedgesLump? lump)
|
||||
{
|
||||
if (lump?.Surfedges == null || lump.Surfedges.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Surfedges.Length; j++)
|
||||
{
|
||||
var surfedge = lump.Surfedges[j];
|
||||
builder.AppendLine(surfedge, $" Surfedge {j}");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, BspModelsLump? lump)
|
||||
{
|
||||
if (lump?.Models == null || lump.Models.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Models.Length; j++)
|
||||
{
|
||||
var bmodel = lump.Models[j];
|
||||
builder.AppendLine($" Model {j}");
|
||||
builder.AppendLine($" Mins: {bmodel.Mins.X}, {bmodel.Mins.Y}, {bmodel.Mins.Z}");
|
||||
builder.AppendLine($" Maxs: {bmodel.Maxs.X}, {bmodel.Maxs.Y}, {bmodel.Maxs.Z}");
|
||||
builder.AppendLine($" Origin vector: {bmodel.OriginVector.X}, {bmodel.OriginVector.Y}, {bmodel.OriginVector.Z}");
|
||||
builder.AppendLine(bmodel.HeadnodesIndex, " Headnodes index");
|
||||
builder.AppendLine(bmodel.VisLeafsCount, " ??? (VisLeafsCount)");
|
||||
builder.AppendLine(bmodel.FirstFaceIndex, " First face index");
|
||||
builder.AppendLine(bmodel.FacesCount, " Faces count");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1400,6 +1400,18 @@ namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded OLE Library File]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x03 && magic[3] == 0x04)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x05 && magic[3] == 0x06)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded empty PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x07 && magic[3] == 0x08)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded spanned PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(magic, $"{padding}Data");
|
||||
@@ -1911,6 +1923,18 @@ namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded OLE Library File]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x03 && magic[3] == 0x04)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x05 && magic[3] == 0x06)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded empty PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x07 && magic[3] == 0x08)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded spanned PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(magic, $"{padding}Data");
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -35,7 +35,7 @@ namespace SabreTools.Serialization.Printers
|
||||
|
||||
builder.AppendLine(header.Signature, " Signature");
|
||||
builder.AppendLine(header.Version, " Version");
|
||||
builder.AppendLine(header.DirectoryLength, " Directory length");
|
||||
builder.AppendLine(header.TreeSize, " Tree size");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
@@ -50,10 +50,10 @@ namespace SabreTools.Serialization.Printers
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.Dummy0, " Dummy 0");
|
||||
builder.AppendLine(header.ArchiveHashLength, " Archive hash length");
|
||||
builder.AppendLine(header.ExtraLength, " Extra length");
|
||||
builder.AppendLine(header.Dummy1, " Dummy 1");
|
||||
builder.AppendLine(header.FileDataSectionSize, " File data section size");
|
||||
builder.AppendLine(header.ArchiveMD5SectionSize, " Archive MD5 section size");
|
||||
builder.AppendLine(header.OtherMD5SectionSize, " Other MD5 section size");
|
||||
builder.AppendLine(header.SignatureSectionSize, " Signature section size");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
using System.Text;
|
||||
using SabreTools.Models.WAD;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
public class WAD : IPrinter<File>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public void PrintInformation(StringBuilder builder, File model)
|
||||
=> Print(builder, model);
|
||||
|
||||
public static void Print(StringBuilder builder, File file)
|
||||
{
|
||||
builder.AppendLine("WAD Information:");
|
||||
builder.AppendLine("-------------------------");
|
||||
builder.AppendLine();
|
||||
|
||||
Print(builder, file.Header);
|
||||
Print(builder, file.Lumps);
|
||||
Print(builder, file.LumpInfos);
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Header? header)
|
||||
{
|
||||
builder.AppendLine(" Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
{
|
||||
builder.AppendLine(" No header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.Signature, " Signature");
|
||||
builder.AppendLine(header.LumpCount, " Lump count");
|
||||
builder.AppendLine(header.LumpOffset, " Lump offset");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Lump?[]? entries)
|
||||
{
|
||||
builder.AppendLine(" Lumps Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (entries == null || entries.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No lumps");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < entries.Length; i++)
|
||||
{
|
||||
var entry = entries[i];
|
||||
builder.AppendLine($" Lump {i}");
|
||||
if (entry == null)
|
||||
{
|
||||
builder.AppendLine(" [NULL]");
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.AppendLine(entry.Offset, " Offset");
|
||||
builder.AppendLine(entry.DiskLength, " Disk length");
|
||||
builder.AppendLine(entry.Length, " Length");
|
||||
builder.AppendLine(entry.Type, " Type");
|
||||
builder.AppendLine(entry.Compression, " Compression");
|
||||
builder.AppendLine(entry.Padding0, " Padding 0");
|
||||
builder.AppendLine(entry.Padding1, " Padding 1");
|
||||
builder.AppendLine(entry.Name, " Name");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, LumpInfo?[]? entries)
|
||||
{
|
||||
builder.AppendLine(" Lump Infos Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (entries == null || entries.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No lump infos");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < entries.Length; i++)
|
||||
{
|
||||
var entry = entries[i];
|
||||
builder.AppendLine($" Lump Info {i}");
|
||||
if (entry == null)
|
||||
{
|
||||
builder.AppendLine(" Lump is compressed");
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.AppendLine(entry.Name, " Name");
|
||||
builder.AppendLine(entry.Width, " Width");
|
||||
builder.AppendLine(entry.Height, " Height");
|
||||
builder.AppendLine(entry.PixelOffset, " Pixel offset");
|
||||
// TODO: Print unknown data?
|
||||
// TODO: Print pixel data?
|
||||
builder.AppendLine(entry.PaletteSize, " Palette size");
|
||||
// TODO: Print palette data?
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
131
SabreTools.Serialization/Printers/WAD3.cs
Normal file
131
SabreTools.Serialization/Printers/WAD3.cs
Normal file
@@ -0,0 +1,131 @@
|
||||
using System.Text;
|
||||
using SabreTools.Models.WAD3;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
public class WAD3 : IPrinter<File>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public void PrintInformation(StringBuilder builder, File model)
|
||||
=> Print(builder, model);
|
||||
|
||||
public static void Print(StringBuilder builder, File file)
|
||||
{
|
||||
builder.AppendLine("WAD Information:");
|
||||
builder.AppendLine("-------------------------");
|
||||
builder.AppendLine();
|
||||
|
||||
Print(builder, file.Header);
|
||||
Print(builder, file.DirEntries);
|
||||
Print(builder, file.FileEntries);
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Header? header)
|
||||
{
|
||||
builder.AppendLine(" Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
{
|
||||
builder.AppendLine(" No header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.Signature, " Signature");
|
||||
builder.AppendLine(header.NumDirs, " Number of directory entries");
|
||||
builder.AppendLine(header.DirOffset, " Offset to first directory entry");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, DirEntry?[]? entries)
|
||||
{
|
||||
builder.AppendLine(" Directory Entries Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (entries == null || entries.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No directory entries");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < entries.Length; i++)
|
||||
{
|
||||
var entry = entries[i];
|
||||
builder.AppendLine($" Directory Entry {i}");
|
||||
if (entry == null)
|
||||
{
|
||||
builder.AppendLine(" [NULL]");
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.AppendLine(entry.Offset, " Offset");
|
||||
builder.AppendLine(entry.DiskLength, " Disk length");
|
||||
builder.AppendLine(entry.Length, " Length");
|
||||
builder.AppendLine($" File type: {entry.Type} (0x{entry.Type:X})");
|
||||
builder.AppendLine(entry.Compression, " Compression");
|
||||
builder.AppendLine(entry.Padding, " Padding");
|
||||
builder.AppendLine(entry.Name, " Name");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, FileEntry?[]? entries)
|
||||
{
|
||||
builder.AppendLine(" File Entries Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (entries == null || entries.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No file entries");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < entries.Length; i++)
|
||||
{
|
||||
var entry = entries[i];
|
||||
builder.AppendLine($" File Entry {i}");
|
||||
if (entry == null)
|
||||
{
|
||||
builder.AppendLine(" [NULL]");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry is MipTex mipTex)
|
||||
{
|
||||
builder.AppendLine(mipTex.Name, " Name");
|
||||
builder.AppendLine(mipTex.Width, " Width");
|
||||
builder.AppendLine(mipTex.Height, " Height");
|
||||
builder.AppendLine(mipTex.MipOffsets, " Mipmap Offsets");
|
||||
builder.AppendLine(" Mipmap Images skipped...");
|
||||
builder.AppendLine(mipTex.ColorsUsed, " Colors used");
|
||||
builder.AppendLine(" Palette skipped...");
|
||||
}
|
||||
else if (entry is QpicImage qpic)
|
||||
{
|
||||
builder.AppendLine(qpic.Width, " Width");
|
||||
builder.AppendLine(qpic.Height, " Height");
|
||||
builder.AppendLine(" Image data skipped...");
|
||||
builder.AppendLine(qpic.ColorsUsed, " Colors used");
|
||||
builder.AppendLine(" Palette skipped...");
|
||||
}
|
||||
else if (entry is Font font)
|
||||
{
|
||||
builder.AppendLine(font.Width, " Width");
|
||||
builder.AppendLine(font.Height, " Height");
|
||||
builder.AppendLine(font.RowCount, " Row count");
|
||||
builder.AppendLine(font.RowHeight, " Row height");
|
||||
builder.AppendLine(" Font info skipped...");
|
||||
builder.AppendLine(" Image data skipped...");
|
||||
builder.AppendLine(font.ColorsUsed, " Colors used");
|
||||
builder.AppendLine(" Palette skipped...");
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(" Unrecognized entry type");
|
||||
}
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,7 +10,7 @@
|
||||
<Nullable>enable</Nullable>
|
||||
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<Version>1.7.4</Version>
|
||||
<Version>1.7.6</Version>
|
||||
|
||||
<!-- Package Properties -->
|
||||
<Authors>Matt Nadareski</Authors>
|
||||
@@ -24,35 +24,16 @@
|
||||
<PackageLicenseExpression>MIT</PackageLicenseExpression>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Support All Frameworks -->
|
||||
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
|
||||
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
|
||||
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`)) OR $(TargetFramework.StartsWith(`net9`))">
|
||||
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
|
||||
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Include="../README.md" Pack="true" PackagePath="" />
|
||||
</ItemGroup>
|
||||
|
||||
<!-- Support for old .NET versions -->
|
||||
<ItemGroup Condition="$(TargetFramework.StartsWith(`net2`))">
|
||||
<PackageReference Include="Net30.LinqBridge" Version="1.3.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="SabreTools.ASN1" Version="1.4.0" />
|
||||
<PackageReference Include="SabreTools.ASN1" Version="1.4.2" />
|
||||
<PackageReference Include="SabreTools.Hashing" Version="1.4.0" />
|
||||
<PackageReference Include="SabreTools.IO" Version="1.5.0" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.5.1" />
|
||||
<PackageReference Include="SabreTools.IO" Version="1.5.1" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.5.3" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -84,6 +84,16 @@ namespace SabreTools.Serialization
|
||||
return sb.AppendLine($"{prefixString}: {valueString}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Append a line containing a Single to a StringBuilder
|
||||
/// </summary>
|
||||
public static StringBuilder AppendLine(this StringBuilder sb, float? value, string prefixString)
|
||||
{
|
||||
value ??= 0;
|
||||
string valueString = $"{value} (0x{value:X8})";
|
||||
return sb.AppendLine($"{prefixString}: {valueString}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Append a line containing a Int64 to a StringBuilder
|
||||
/// </summary>
|
||||
@@ -104,6 +114,16 @@ namespace SabreTools.Serialization
|
||||
return sb.AppendLine($"{prefixString}: {valueString}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Append a line containing a Double to a StringBuilder
|
||||
/// </summary>
|
||||
public static StringBuilder AppendLine(this StringBuilder sb, double? value, string prefixString)
|
||||
{
|
||||
value ??= 0;
|
||||
string valueString = $"{value} (0x{value:X16})";
|
||||
return sb.AppendLine($"{prefixString}: {valueString}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Append a line containing a string to a StringBuilder
|
||||
/// </summary>
|
||||
@@ -216,6 +236,21 @@ namespace SabreTools.Serialization
|
||||
return sb.AppendLine($"{prefixString}: {valueString}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Append a line containing a Single[] value to a StringBuilder
|
||||
/// </summary>
|
||||
public static StringBuilder AppendLine(this StringBuilder sb, float[]? value, string prefixString)
|
||||
{
|
||||
string valueString = "[NULL]";
|
||||
if (value != null)
|
||||
{
|
||||
var valueArr = Array.ConvertAll(value, u => u.ToString());
|
||||
valueString = string.Join(", ", valueArr);
|
||||
}
|
||||
|
||||
return sb.AppendLine($"{prefixString}: {valueString}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Append a line containing a Int64[] value to a StringBuilder
|
||||
/// </summary>
|
||||
@@ -246,6 +281,21 @@ namespace SabreTools.Serialization
|
||||
return sb.AppendLine($"{prefixString}: {valueString}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Append a line containing a Double[] value to a StringBuilder
|
||||
/// </summary>
|
||||
public static StringBuilder AppendLine(this StringBuilder sb, double[]? value, string prefixString)
|
||||
{
|
||||
string valueString = "[NULL]";
|
||||
if (value != null)
|
||||
{
|
||||
var valueArr = Array.ConvertAll(value, u => u.ToString());
|
||||
valueString = string.Join(", ", valueArr);
|
||||
}
|
||||
|
||||
return sb.AppendLine($"{prefixString}: {valueString}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Append a line containing a UInt64[] value to a StringBuilder
|
||||
/// </summary>
|
||||
|
||||
@@ -2,7 +2,7 @@ using System.IO;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
{
|
||||
public class BSP : WrapperBase<Models.BSP.File>
|
||||
public class BSP : WrapperBase<Models.BSP.BspFile>
|
||||
{
|
||||
#region Descriptive Properties
|
||||
|
||||
@@ -14,14 +14,14 @@ namespace SabreTools.Serialization.Wrappers
|
||||
#region Constructors
|
||||
|
||||
/// <inheritdoc/>
|
||||
public BSP(Models.BSP.File? model, byte[]? data, int offset)
|
||||
public BSP(Models.BSP.BspFile? model, byte[]? data, int offset)
|
||||
: base(model, data, offset)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public BSP(Models.BSP.File? model, Stream? data)
|
||||
public BSP(Models.BSP.BspFile? model, Stream? data)
|
||||
: base(model, data)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SabreTools.Models.InstallShieldCabinet;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
@@ -289,7 +289,7 @@ namespace SabreTools.Serialization.Wrappers
|
||||
if (Model.FileGroups == null)
|
||||
return null;
|
||||
|
||||
return Model.FileGroups.FirstOrDefault(fg => fg != null && string.Equals(fg.Name, name));
|
||||
return Array.Find(Model.FileGroups, fg => fg != null && string.Equals(fg.Name, name));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
{
|
||||
// TODO: Figure out extension properties
|
||||
public partial class MoPaQ : WrapperBase<Models.MoPaQ.Archive>
|
||||
{
|
||||
#region Descriptive Properties
|
||||
|
||||
@@ -331,24 +331,22 @@ namespace SabreTools.Serialization.Wrappers
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the initial value for the plain counter
|
||||
/// Get if the NoCrypto bit is set
|
||||
/// </summary>
|
||||
public byte[] PlainIV(int index)
|
||||
public bool PossiblyDecrypted(int index)
|
||||
{
|
||||
if (Partitions == null)
|
||||
return [];
|
||||
if (index < 0 || index >= Partitions.Length)
|
||||
return [];
|
||||
|
||||
var header = Partitions[index];
|
||||
if (header == null || header.MagicID != NCCHMagicNumber)
|
||||
return [];
|
||||
|
||||
byte[] partitionIdBytes = BitConverter.GetBytes(header.PartitionId);
|
||||
Array.Reverse(partitionIdBytes);
|
||||
return [.. partitionIdBytes, .. PlainCounter];
|
||||
var bitMasks = GetBitMasks(index);
|
||||
#if NET20 || NET35
|
||||
return (bitMasks & BitMasks.NoCrypto) != 0;
|
||||
#else
|
||||
return bitMasks.HasFlag(BitMasks.NoCrypto);
|
||||
#endif
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Encryption
|
||||
|
||||
/// <summary>
|
||||
/// Get the initial value for the ExeFS counter
|
||||
/// </summary>
|
||||
@@ -368,6 +366,25 @@ namespace SabreTools.Serialization.Wrappers
|
||||
return [.. partitionIdBytes, .. ExefsCounter];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the initial value for the plain counter
|
||||
/// </summary>
|
||||
public byte[] PlainIV(int index)
|
||||
{
|
||||
if (Partitions == null)
|
||||
return [];
|
||||
if (index < 0 || index >= Partitions.Length)
|
||||
return [];
|
||||
|
||||
var header = Partitions[index];
|
||||
if (header == null || header.MagicID != NCCHMagicNumber)
|
||||
return [];
|
||||
|
||||
byte[] partitionIdBytes = BitConverter.GetBytes(header.PartitionId);
|
||||
Array.Reverse(partitionIdBytes);
|
||||
return [.. partitionIdBytes, .. PlainCounter];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the initial value for the RomFS counter
|
||||
/// </summary>
|
||||
@@ -387,19 +404,6 @@ namespace SabreTools.Serialization.Wrappers
|
||||
return [.. partitionIdBytes, .. RomfsCounter];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get if the NoCrypto bit is set
|
||||
/// </summary>
|
||||
public bool PossiblyDecrypted(int index)
|
||||
{
|
||||
var bitMasks = GetBitMasks(index);
|
||||
#if NET20 || NET35
|
||||
return (bitMasks & BitMasks.NoCrypto) != 0;
|
||||
#else
|
||||
return bitMasks.HasFlag(BitMasks.NoCrypto);
|
||||
#endif
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Offsets
|
||||
@@ -436,6 +440,38 @@ namespace SabreTools.Serialization.Wrappers
|
||||
return (partitionOffsetMU + exeFsOffsetMU) * MediaUnitSize;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the offset of a partition logo region
|
||||
/// </summary>
|
||||
/// <returns>Offset to the logo region of the partition, 0 on error</returns>
|
||||
public uint GetLogoRegionOffset(int index)
|
||||
{
|
||||
// No partitions means no size is available
|
||||
if (PartitionsTable == null || Partitions == null)
|
||||
return 0;
|
||||
if (index < 0 || index >= Partitions.Length)
|
||||
return 0;
|
||||
|
||||
// Invalid partition table entry means no size is available
|
||||
var entry = PartitionsTable[index];
|
||||
if (entry == null)
|
||||
return 0;
|
||||
|
||||
// Invalid partition means no size is available
|
||||
var header = Partitions[index];
|
||||
if (header == null || header.MagicID != NCCHMagicNumber)
|
||||
return 0;
|
||||
|
||||
// If the offset is 0, return 0
|
||||
uint logoOffsetMU = header.LogoRegionOffsetInMediaUnits;
|
||||
if (logoOffsetMU == 0)
|
||||
return 0;
|
||||
|
||||
// Return the adjusted offset
|
||||
uint partitionOffsetMU = entry.Offset;
|
||||
return (partitionOffsetMU + logoOffsetMU) * MediaUnitSize;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the offset of a partition
|
||||
/// </summary>
|
||||
@@ -462,6 +498,38 @@ namespace SabreTools.Serialization.Wrappers
|
||||
return partitionOffsetMU * MediaUnitSize;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the offset of a partition plain region
|
||||
/// </summary>
|
||||
/// <returns>Offset to the plain region of the partition, 0 on error</returns>
|
||||
public uint GetPlainRegionOffset(int index)
|
||||
{
|
||||
// No partitions means no size is available
|
||||
if (PartitionsTable == null || Partitions == null)
|
||||
return 0;
|
||||
if (index < 0 || index >= Partitions.Length)
|
||||
return 0;
|
||||
|
||||
// Invalid partition table entry means no size is available
|
||||
var entry = PartitionsTable[index];
|
||||
if (entry == null)
|
||||
return 0;
|
||||
|
||||
// Invalid partition means no size is available
|
||||
var header = Partitions[index];
|
||||
if (header == null || header.MagicID != NCCHMagicNumber)
|
||||
return 0;
|
||||
|
||||
// If the offset is 0, return 0
|
||||
uint prOffsetMU = header.PlainRegionOffsetInMediaUnits;
|
||||
if (prOffsetMU == 0)
|
||||
return 0;
|
||||
|
||||
// Return the adjusted offset
|
||||
uint partitionOffsetMU = entry.Offset;
|
||||
return (partitionOffsetMU + prOffsetMU) * MediaUnitSize;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the offset of a partition RomFS
|
||||
/// </summary>
|
||||
@@ -540,6 +608,48 @@ namespace SabreTools.Serialization.Wrappers
|
||||
return header.ExtendedHeaderSizeInBytes;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the size of a partition logo region
|
||||
/// </summary>
|
||||
/// <returns>Size of the partition logo region in bytes, 0 on error</returns>
|
||||
public uint GetLogoRegionSize(int index)
|
||||
{
|
||||
// Empty partitions array means no size is available
|
||||
if (Partitions == null)
|
||||
return 0;
|
||||
if (index < 0 || index >= Partitions.Length)
|
||||
return 0;
|
||||
|
||||
// Invalid partition header means no size is available
|
||||
var header = Partitions[index];
|
||||
if (header == null)
|
||||
return 0;
|
||||
|
||||
// Return the adjusted size
|
||||
return header.LogoRegionSizeInMediaUnits * MediaUnitSize;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the size of a partition plain region
|
||||
/// </summary>
|
||||
/// <returns>Size of the partition plain region in bytes, 0 on error</returns>
|
||||
public uint GetPlainRegionSize(int index)
|
||||
{
|
||||
// Empty partitions array means no size is available
|
||||
if (Partitions == null)
|
||||
return 0;
|
||||
if (index < 0 || index >= Partitions.Length)
|
||||
return 0;
|
||||
|
||||
// Invalid partition header means no size is available
|
||||
var header = Partitions[index];
|
||||
if (header == null)
|
||||
return 0;
|
||||
|
||||
// Return the adjusted size
|
||||
return header.PlainRegionSizeInMediaUnits * MediaUnitSize;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the size of a partition RomFS
|
||||
/// </summary>
|
||||
|
||||
@@ -2,7 +2,6 @@ using System.IO;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
{
|
||||
// TODO: Figure out extension properties
|
||||
public class PIC : WrapperBase<Models.PIC.DiscInformation>
|
||||
{
|
||||
#region Descriptive Properties
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
#if NET35_OR_GREATER || NETCOREAPP
|
||||
using System.Linq;
|
||||
#endif
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
|
||||
@@ -41,10 +43,17 @@ namespace SabreTools.Serialization.Wrappers
|
||||
|
||||
// Populate the raw header padding data based on the source
|
||||
uint headerStartAddress = Model.Stub.Header.NewExeHeaderAddr;
|
||||
uint firstSectionAddress = Model.SectionTable
|
||||
.Select(s => s?.PointerToRawData ?? 0)
|
||||
.Where(s => s != 0 && s >= headerStartAddress)
|
||||
.Min();
|
||||
uint firstSectionAddress = uint.MaxValue;
|
||||
foreach (var s in Model.SectionTable)
|
||||
{
|
||||
if (s == null || s.PointerToRawData == 0)
|
||||
continue;
|
||||
if (s.PointerToRawData < headerStartAddress)
|
||||
continue;
|
||||
|
||||
if (s.PointerToRawData < firstSectionAddress)
|
||||
firstSectionAddress = s.PointerToRawData;
|
||||
}
|
||||
|
||||
// Check if the header length is more than 0 before reading data
|
||||
int headerLength = (int)(firstSectionAddress - headerStartAddress);
|
||||
@@ -82,10 +91,17 @@ namespace SabreTools.Serialization.Wrappers
|
||||
|
||||
// Populate the header padding strings based on the source
|
||||
uint headerStartAddress = Model.Stub.Header.NewExeHeaderAddr;
|
||||
uint firstSectionAddress = Model.SectionTable
|
||||
.Select(s => s?.PointerToRawData ?? 0)
|
||||
.Where(s => s != 0 && s >= headerStartAddress)
|
||||
.Min();
|
||||
uint firstSectionAddress = uint.MaxValue;
|
||||
foreach (var s in Model.SectionTable)
|
||||
{
|
||||
if (s == null || s.PointerToRawData == 0)
|
||||
continue;
|
||||
if (s.PointerToRawData < headerStartAddress)
|
||||
continue;
|
||||
|
||||
if (s.PointerToRawData < firstSectionAddress)
|
||||
firstSectionAddress = s.PointerToRawData;
|
||||
}
|
||||
|
||||
// Check if the header length is more than 0 before reading strings
|
||||
int headerLength = (int)(firstSectionAddress - headerStartAddress);
|
||||
@@ -643,10 +659,9 @@ namespace SabreTools.Serialization.Wrappers
|
||||
get
|
||||
{
|
||||
var manifest = GetAssemblyManifest();
|
||||
return manifest?
|
||||
.AssemblyIdentities?
|
||||
.FirstOrDefault(ai => !string.IsNullOrEmpty(ai?.Version))?
|
||||
.Version;
|
||||
var identities = manifest?.AssemblyIdentities ?? [];
|
||||
var versionIdentity = Array.Find(identities, ai => !string.IsNullOrEmpty(ai?.Version));
|
||||
return versionIdentity?.Version;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -837,12 +852,28 @@ namespace SabreTools.Serialization.Wrappers
|
||||
return null;
|
||||
|
||||
// Try to find a key that matches
|
||||
#if NET20
|
||||
Models.PortableExecutable.StringData? match = null;
|
||||
foreach (var st in stringTable)
|
||||
{
|
||||
if (st?.Children == null)
|
||||
continue;
|
||||
|
||||
// Return the match if found
|
||||
match = Array.Find(st.Children, sd => sd != null && key.Equals(sd.Key, StringComparison.OrdinalIgnoreCase));
|
||||
if (match != null)
|
||||
return match.Value?.TrimEnd('\0');
|
||||
}
|
||||
|
||||
return null;
|
||||
#else
|
||||
var match = stringTable
|
||||
.SelectMany(st => st?.Children ?? [])
|
||||
.FirstOrDefault(sd => sd != null && key.Equals(sd.Key, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
// Return either the match or null
|
||||
return match?.Value?.TrimEnd('\0');
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -871,70 +902,92 @@ namespace SabreTools.Serialization.Wrappers
|
||||
/// Find CodeView debug data by path
|
||||
/// </summary>
|
||||
/// <param name="path">Partial path to check for</param>
|
||||
/// <returns>Enumerable of matching debug data</returns>
|
||||
public IEnumerable<object?> FindCodeViewDebugTableByPath(string path)
|
||||
/// <returns>List of matching debug data</returns>
|
||||
public List<object?> FindCodeViewDebugTableByPath(string path)
|
||||
{
|
||||
// Ensure that we have the debug data cached
|
||||
if (DebugData == null)
|
||||
return [];
|
||||
|
||||
var nb10Found = DebugData.Select(r => r.Value)
|
||||
.Select(r => r as SabreTools.Models.PortableExecutable.NB10ProgramDatabase)
|
||||
.Where(n => n != null)
|
||||
.Where(n => n?.PdbFileName?.Contains(path) == true)
|
||||
.Select(n => n as object);
|
||||
var debugFound = new List<object?>();
|
||||
foreach (var data in DebugData.Values)
|
||||
{
|
||||
if (data == null)
|
||||
continue;
|
||||
|
||||
var rsdsFound = DebugData.Select(r => r.Value)
|
||||
.Select(r => r as SabreTools.Models.PortableExecutable.RSDSProgramDatabase)
|
||||
.Where(r => r != null)
|
||||
.Where(r => r?.PathAndFileName?.Contains(path) == true)
|
||||
.Select(r => r as object);
|
||||
if (data is Models.PortableExecutable.NB10ProgramDatabase n)
|
||||
{
|
||||
if (n.PdbFileName == null || !n.PdbFileName.Contains(path))
|
||||
continue;
|
||||
|
||||
return nb10Found.Concat(rsdsFound);
|
||||
debugFound.Add(n);
|
||||
}
|
||||
else if (data is Models.PortableExecutable.RSDSProgramDatabase r)
|
||||
{
|
||||
if (r.PathAndFileName == null || !r.PathAndFileName.Contains(path))
|
||||
continue;
|
||||
|
||||
debugFound.Add(r);
|
||||
}
|
||||
}
|
||||
|
||||
return debugFound;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Find unparsed debug data by string value
|
||||
/// </summary>
|
||||
/// <param name="value">String value to check for</param>
|
||||
/// <returns>Enumerable of matching debug data</returns>
|
||||
public IEnumerable<byte[]?> FindGenericDebugTableByValue(string value)
|
||||
/// <returns>List of matching debug data</returns>
|
||||
public List<byte[]?> FindGenericDebugTableByValue(string value)
|
||||
{
|
||||
// Ensure that we have the resource data cached
|
||||
if (DebugData == null)
|
||||
return [];
|
||||
|
||||
return DebugData.Select(r => r.Value)
|
||||
.Select(b => b as byte[])
|
||||
.Where(b => b != null)
|
||||
.Where(b =>
|
||||
var table = new List<byte[]?>();
|
||||
foreach (var data in DebugData.Values)
|
||||
{
|
||||
if (data == null)
|
||||
continue;
|
||||
if (data is not byte[] b || b == null)
|
||||
continue;
|
||||
|
||||
try
|
||||
{
|
||||
try
|
||||
string? arrayAsASCII = Encoding.ASCII.GetString(b);
|
||||
if (arrayAsASCII.Contains(value))
|
||||
{
|
||||
string? arrayAsASCII = Encoding.ASCII.GetString(b!);
|
||||
if (arrayAsASCII.Contains(value))
|
||||
return true;
|
||||
table.Add(b);
|
||||
continue;
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
catch { }
|
||||
|
||||
try
|
||||
try
|
||||
{
|
||||
string? arrayAsUTF8 = Encoding.UTF8.GetString(b);
|
||||
if (arrayAsUTF8.Contains(value))
|
||||
{
|
||||
string? arrayAsUTF8 = Encoding.UTF8.GetString(b!);
|
||||
if (arrayAsUTF8.Contains(value))
|
||||
return true;
|
||||
table.Add(b);
|
||||
continue;
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
catch { }
|
||||
|
||||
try
|
||||
try
|
||||
{
|
||||
string? arrayAsUnicode = Encoding.Unicode.GetString(b);
|
||||
if (arrayAsUnicode.Contains(value))
|
||||
{
|
||||
string? arrayAsUnicode = Encoding.Unicode.GetString(b!);
|
||||
if (arrayAsUnicode.Contains(value))
|
||||
return true;
|
||||
table.Add(b);
|
||||
continue;
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
return table;
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -1020,132 +1073,193 @@ namespace SabreTools.Serialization.Wrappers
|
||||
/// Find dialog box resources by title
|
||||
/// </summary>
|
||||
/// <param name="title">Dialog box title to check for</param>
|
||||
/// <returns>Enumerable of matching resources</returns>
|
||||
public IEnumerable<Models.PortableExecutable.DialogBoxResource?> FindDialogByTitle(string title)
|
||||
/// <returns>List of matching resources</returns>
|
||||
public List<Models.PortableExecutable.DialogBoxResource?> FindDialogByTitle(string title)
|
||||
{
|
||||
// Ensure that we have the resource data cached
|
||||
if (ResourceData == null)
|
||||
return [];
|
||||
|
||||
return ResourceData.Select(r => r.Value)
|
||||
.Select(r => r as SabreTools.Models.PortableExecutable.DialogBoxResource)
|
||||
.Where(d => d != null)
|
||||
.Where(d =>
|
||||
{
|
||||
return (d?.DialogTemplate?.TitleResource?.Contains(title) ?? false)
|
||||
|| (d?.ExtendedDialogTemplate?.TitleResource?.Contains(title) ?? false);
|
||||
});
|
||||
var resources = new List<Models.PortableExecutable.DialogBoxResource?>();
|
||||
foreach (var resource in ResourceData.Values)
|
||||
{
|
||||
if (resource == null)
|
||||
continue;
|
||||
if (resource is not Models.PortableExecutable.DialogBoxResource dbr || dbr == null)
|
||||
continue;
|
||||
|
||||
if (dbr.DialogTemplate?.TitleResource?.Contains(title) ?? false)
|
||||
resources.Add(dbr);
|
||||
else if (dbr.ExtendedDialogTemplate?.TitleResource?.Contains(title) ?? false)
|
||||
resources.Add(dbr);
|
||||
}
|
||||
|
||||
return resources;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Find dialog box resources by contained item title
|
||||
/// </summary>
|
||||
/// <param name="title">Dialog box item title to check for</param>
|
||||
/// <returns>Enumerable of matching resources</returns>
|
||||
public IEnumerable<Models.PortableExecutable.DialogBoxResource?> FindDialogBoxByItemTitle(string title)
|
||||
/// <returns>List of matching resources</returns>
|
||||
public List<Models.PortableExecutable.DialogBoxResource?> FindDialogBoxByItemTitle(string title)
|
||||
{
|
||||
// Ensure that we have the resource data cached
|
||||
if (ResourceData == null)
|
||||
return [];
|
||||
|
||||
return ResourceData.Select(r => r.Value)
|
||||
.Select(r => r as SabreTools.Models.PortableExecutable.DialogBoxResource)
|
||||
.Where(d => d != null)
|
||||
.Where(d =>
|
||||
{
|
||||
if (d?.DialogItemTemplates != null)
|
||||
{
|
||||
return d.DialogItemTemplates
|
||||
.Where(dit => dit?.TitleResource != null)
|
||||
.Any(dit => dit?.TitleResource?.Contains(title) == true);
|
||||
}
|
||||
else if (d?.ExtendedDialogItemTemplates != null)
|
||||
{
|
||||
return d.ExtendedDialogItemTemplates
|
||||
.Where(edit => edit?.TitleResource != null)
|
||||
.Any(edit => edit?.TitleResource?.Contains(title) == true);
|
||||
}
|
||||
var resources = new List<Models.PortableExecutable.DialogBoxResource?>();
|
||||
foreach (var resource in ResourceData.Values)
|
||||
{
|
||||
if (resource == null)
|
||||
continue;
|
||||
if (resource is not Models.PortableExecutable.DialogBoxResource dbr || dbr == null)
|
||||
continue;
|
||||
|
||||
return false;
|
||||
});
|
||||
if (dbr.DialogItemTemplates != null)
|
||||
{
|
||||
var templates = Array.FindAll(dbr.DialogItemTemplates, dit => dit?.TitleResource != null);
|
||||
if (Array.FindIndex(templates, dit => dit?.TitleResource?.Contains(title) == true) > -1)
|
||||
resources.Add(dbr);
|
||||
}
|
||||
else if (dbr.ExtendedDialogItemTemplates != null)
|
||||
{
|
||||
var templates = Array.FindAll(dbr.ExtendedDialogItemTemplates, edit => edit?.TitleResource != null);
|
||||
if (Array.FindIndex(templates, edit => edit?.TitleResource?.Contains(title) == true) > -1)
|
||||
resources.Add(dbr);
|
||||
}
|
||||
}
|
||||
|
||||
return resources;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Find string table resources by contained string entry
|
||||
/// </summary>
|
||||
/// <param name="entry">String entry to check for</param>
|
||||
/// <returns>Enumerable of matching resources</returns>
|
||||
public IEnumerable<Dictionary<int, string?>?> FindStringTableByEntry(string entry)
|
||||
/// <returns>List of matching resources</returns>
|
||||
public List<Dictionary<int, string?>?> FindStringTableByEntry(string entry)
|
||||
{
|
||||
// Ensure that we have the resource data cached
|
||||
if (ResourceData == null)
|
||||
return [];
|
||||
|
||||
return ResourceData.Select(r => r.Value)
|
||||
#if NET20
|
||||
var stringTables = new List<Dictionary<int, string?>?>();
|
||||
foreach (var resource in ResourceData.Values)
|
||||
{
|
||||
if (resource == null)
|
||||
continue;
|
||||
if (resource is not Dictionary<int, string?> st || st == null)
|
||||
continue;
|
||||
|
||||
foreach (var s in st.Values)
|
||||
{
|
||||
if (s == null || !s.Contains(entry))
|
||||
continue;
|
||||
|
||||
stringTables.Add(st);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return stringTables;
|
||||
#else
|
||||
return ResourceData.Values
|
||||
.Select(r => r as Dictionary<int, string?>)
|
||||
.Where(st => st != null)
|
||||
.Where(st => st?.Select(kvp => kvp.Value)?
|
||||
.Any(s => s != null && s.Contains(entry)) == true);
|
||||
.Any(s => s != null && s.Contains(entry)) == true)
|
||||
.ToList();
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Find unparsed resources by type name
|
||||
/// </summary>
|
||||
/// <param name="typeName">Type name to check for</param>
|
||||
/// <returns>Enumerable of matching resources</returns>
|
||||
public IEnumerable<byte[]?> FindResourceByNamedType(string typeName)
|
||||
/// <returns>List of matching resources</returns>
|
||||
public List<byte[]?> FindResourceByNamedType(string typeName)
|
||||
{
|
||||
// Ensure that we have the resource data cached
|
||||
if (ResourceData == null)
|
||||
return [];
|
||||
|
||||
#if NET20
|
||||
var resources = new List<byte[]?>();
|
||||
foreach (var kvp in ResourceData)
|
||||
{
|
||||
if (!kvp.Key.Contains(typeName))
|
||||
continue;
|
||||
if (kvp.Value == null || kvp.Value is not byte[] b || b == null)
|
||||
continue;
|
||||
|
||||
resources.Add(b);
|
||||
}
|
||||
|
||||
return resources;
|
||||
#else
|
||||
return ResourceData.Where(kvp => kvp.Key.Contains(typeName))
|
||||
.Select(kvp => kvp.Value as byte[])
|
||||
.Where(b => b != null);
|
||||
.Where(b => b != null)
|
||||
.ToList();
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Find unparsed resources by string value
|
||||
/// </summary>
|
||||
/// <param name="value">String value to check for</param>
|
||||
/// <returns>Enumerable of matching resources</returns>
|
||||
public IEnumerable<byte[]?> FindGenericResource(string value)
|
||||
/// <returns>List of matching resources</returns>
|
||||
public List<byte[]?> FindGenericResource(string value)
|
||||
{
|
||||
// Ensure that we have the resource data cached
|
||||
if (ResourceData == null)
|
||||
return [];
|
||||
|
||||
return ResourceData.Select(r => r.Value)
|
||||
.Select(r => r as byte[])
|
||||
.Where(b => b != null)
|
||||
.Where(b =>
|
||||
var resources = new List<byte[]?>();
|
||||
foreach (var resource in ResourceData.Values)
|
||||
{
|
||||
if (resource == null)
|
||||
continue;
|
||||
if (resource is not byte[] b || b == null)
|
||||
continue;
|
||||
|
||||
try
|
||||
{
|
||||
try
|
||||
string? arrayAsASCII = Encoding.ASCII.GetString(b!);
|
||||
if (arrayAsASCII.Contains(value))
|
||||
{
|
||||
string? arrayAsASCII = Encoding.ASCII.GetString(b!);
|
||||
if (arrayAsASCII.Contains(value))
|
||||
return true;
|
||||
resources.Add(b);
|
||||
continue;
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
catch { }
|
||||
|
||||
try
|
||||
try
|
||||
{
|
||||
string? arrayAsUTF8 = Encoding.UTF8.GetString(b!);
|
||||
if (arrayAsUTF8.Contains(value))
|
||||
{
|
||||
string? arrayAsUTF8 = Encoding.UTF8.GetString(b!);
|
||||
if (arrayAsUTF8.Contains(value))
|
||||
return true;
|
||||
resources.Add(b);
|
||||
continue;
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
catch { }
|
||||
|
||||
try
|
||||
try
|
||||
{
|
||||
string? arrayAsUnicode = Encoding.Unicode.GetString(b!);
|
||||
if (arrayAsUnicode.Contains(value))
|
||||
{
|
||||
string? arrayAsUnicode = Encoding.Unicode.GetString(b!);
|
||||
if (arrayAsUnicode.Contains(value))
|
||||
return true;
|
||||
resources.Add(b);
|
||||
continue;
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
return resources;
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -1321,11 +1435,11 @@ namespace SabreTools.Serialization.Wrappers
|
||||
|
||||
// If we're checking exactly, return only exact matches
|
||||
if (exact)
|
||||
return SectionNames.Any(n => n.Equals(sectionName));
|
||||
return Array.FindIndex(SectionNames, n => n.Equals(sectionName)) > -1;
|
||||
|
||||
// Otherwise, check if section name starts with the value
|
||||
else
|
||||
return SectionNames.Any(n => n.StartsWith(sectionName));
|
||||
return Array.FindIndex(SectionNames, n => n.StartsWith(sectionName)) > -1;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -2,7 +2,7 @@ using System.IO;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
{
|
||||
public class VBSP : WrapperBase<Models.VBSP.File>
|
||||
public class VBSP : WrapperBase<Models.BSP.VbspFile>
|
||||
{
|
||||
#region Descriptive Properties
|
||||
|
||||
@@ -14,14 +14,14 @@ namespace SabreTools.Serialization.Wrappers
|
||||
#region Constructors
|
||||
|
||||
/// <inheritdoc/>
|
||||
public VBSP(Models.VBSP.File? model, byte[]? data, int offset)
|
||||
public VBSP(Models.BSP.VbspFile? model, byte[]? data, int offset)
|
||||
: base(model, data, offset)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public VBSP(Models.VBSP.File? model, Stream? data)
|
||||
public VBSP(Models.BSP.VbspFile? model, Stream? data)
|
||||
: base(model, data)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
|
||||
@@ -2,38 +2,38 @@ using System.IO;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
{
|
||||
public class WAD : WrapperBase<Models.WAD.File>
|
||||
public class WAD3 : WrapperBase<Models.WAD3.File>
|
||||
{
|
||||
#region Descriptive Properties
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string DescriptionString => "Half-Life Texture Package File (WAD)";
|
||||
public override string DescriptionString => "Half-Life Texture Package File (WAD3)";
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <inheritdoc/>
|
||||
public WAD(Models.WAD.File? model, byte[]? data, int offset)
|
||||
public WAD3(Models.WAD3.File? model, byte[]? data, int offset)
|
||||
: base(model, data, offset)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public WAD(Models.WAD.File? model, Stream? data)
|
||||
public WAD3(Models.WAD3.File? model, Stream? data)
|
||||
: base(model, data)
|
||||
{
|
||||
// All logic is handled by the base class
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a WAD from a byte array and offset
|
||||
/// Create a WAD3 from a byte array and offset
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array representing the WAD</param>
|
||||
/// <param name="data">Byte array representing the WAD3</param>
|
||||
/// <param name="offset">Offset within the array to parse</param>
|
||||
/// <returns>A WAD wrapper on success, null on failure</returns>
|
||||
public static WAD? Create(byte[]? data, int offset)
|
||||
/// <returns>A WAD3 wrapper on success, null on failure</returns>
|
||||
public static WAD3? Create(byte[]? data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
@@ -49,23 +49,23 @@ namespace SabreTools.Serialization.Wrappers
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a WAD from a Stream
|
||||
/// Create a WAD3 from a Stream
|
||||
/// </summary>
|
||||
/// <param name="data">Stream representing the WAD</param>
|
||||
/// <returns>An WAD wrapper on success, null on failure</returns>
|
||||
public static WAD? Create(Stream? data)
|
||||
/// <param name="data">Stream representing the WAD3</param>
|
||||
/// <returns>An WAD3 wrapper on success, null on failure</returns>
|
||||
public static WAD3? Create(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var file = Deserializers.WAD.DeserializeStream(data);
|
||||
var file = Deserializers.WAD3.DeserializeStream(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
try
|
||||
{
|
||||
return new WAD(file, data);
|
||||
return new WAD3(file, data);
|
||||
}
|
||||
catch
|
||||
{
|
||||
@@ -1,3 +1,8 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
@@ -27,4 +32,345 @@ namespace SabreTools.Serialization.Wrappers
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
public abstract class WrapperBase<T> : WrapperBase, IWrapper<T>
|
||||
{
|
||||
#region Properties
|
||||
|
||||
/// <inheritdoc/>
|
||||
public T GetModel() => Model;
|
||||
|
||||
/// <summary>
|
||||
/// Internal model
|
||||
/// </summary>
|
||||
public T Model { get; private set; }
|
||||
|
||||
#endregion
|
||||
|
||||
#region Instance Variables
|
||||
|
||||
/// <summary>
|
||||
/// Source of the original data
|
||||
/// </summary>
|
||||
protected DataSource _dataSource = DataSource.UNKNOWN;
|
||||
|
||||
/// <summary>
|
||||
/// Lock object for reading from the source
|
||||
/// </summary>
|
||||
private readonly object _streamDataLock = new();
|
||||
|
||||
/// <summary>
|
||||
/// Source byte array data
|
||||
/// </summary>
|
||||
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
|
||||
protected byte[]? _byteArrayData = null;
|
||||
|
||||
/// <summary>
|
||||
/// Source byte array data offset
|
||||
/// </summary>
|
||||
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
|
||||
protected int _byteArrayOffset = -1;
|
||||
|
||||
/// <summary>
|
||||
/// Source Stream data
|
||||
/// </summary>
|
||||
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.Stream"/></remarks>
|
||||
protected Stream? _streamData = null;
|
||||
|
||||
#if !NETFRAMEWORK
|
||||
/// <summary>
|
||||
/// JSON serializer options for output printing
|
||||
/// </summary>
|
||||
protected System.Text.Json.JsonSerializerOptions _jsonSerializerOptions
|
||||
{
|
||||
get
|
||||
{
|
||||
#if NETCOREAPP3_1
|
||||
var serializer = new System.Text.Json.JsonSerializerOptions { WriteIndented = true };
|
||||
#else
|
||||
var serializer = new System.Text.Json.JsonSerializerOptions { IncludeFields = true, WriteIndented = true };
|
||||
#endif
|
||||
serializer.Converters.Add(new ConcreteAbstractSerializer());
|
||||
serializer.Converters.Add(new ConcreteInterfaceSerializer());
|
||||
serializer.Converters.Add(new System.Text.Json.Serialization.JsonStringEnumConverter());
|
||||
return serializer;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new instance of the wrapper from a byte array
|
||||
/// </summary>
|
||||
protected WrapperBase(T? model, byte[]? data, int offset)
|
||||
{
|
||||
if (model == null)
|
||||
throw new ArgumentNullException(nameof(model));
|
||||
if (data == null)
|
||||
throw new ArgumentNullException(nameof(data));
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
|
||||
Model = model;
|
||||
_dataSource = DataSource.ByteArray;
|
||||
_byteArrayData = data;
|
||||
_byteArrayOffset = offset;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new instance of the wrapper from a Stream
|
||||
/// </summary>
|
||||
protected WrapperBase(T? model, Stream? data)
|
||||
{
|
||||
if (model == null)
|
||||
throw new ArgumentNullException(nameof(model));
|
||||
if (data == null)
|
||||
throw new ArgumentNullException(nameof(data));
|
||||
if (data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
throw new ArgumentOutOfRangeException(nameof(data));
|
||||
|
||||
Model = model;
|
||||
_dataSource = DataSource.Stream;
|
||||
_streamData = data;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Data
|
||||
|
||||
/// <summary>
|
||||
/// Validate the backing data source
|
||||
/// </summary>
|
||||
/// <returns>True if the data source is valid, false otherwise</returns>
|
||||
public bool DataSourceIsValid()
|
||||
{
|
||||
return _dataSource switch
|
||||
{
|
||||
// Byte array data requires both a valid array and offset
|
||||
DataSource.ByteArray => _byteArrayData != null && _byteArrayOffset >= 0,
|
||||
|
||||
// Stream data requires both a valid stream
|
||||
DataSource.Stream => _streamData != null && _streamData.CanRead && _streamData.CanSeek,
|
||||
|
||||
// Everything else is invalid
|
||||
_ => false,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Check if a data segment is valid in the data source
|
||||
/// </summary>
|
||||
/// <param name="position">Position in the source</param>
|
||||
/// <param name="length">Length of the data to check</param>
|
||||
/// <returns>True if the positional data is valid, false otherwise</returns>
|
||||
public bool SegmentValid(int position, int length)
|
||||
{
|
||||
// Validate the data souece
|
||||
if (!DataSourceIsValid())
|
||||
return false;
|
||||
|
||||
// If we have an invalid position
|
||||
if (position < 0 || position >= GetEndOfFile())
|
||||
return false;
|
||||
|
||||
return _dataSource switch
|
||||
{
|
||||
DataSource.ByteArray => _byteArrayOffset + position + length <= _byteArrayData!.Length,
|
||||
DataSource.Stream => position + length <= _streamData!.Length,
|
||||
|
||||
// Everything else is invalid
|
||||
_ => false,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read data from the source
|
||||
/// </summary>
|
||||
/// <param name="position">Position in the source to read from</param>
|
||||
/// <param name="length">Length of the requested data</param>
|
||||
/// <returns>Byte array containing the requested data, null on error</returns>
|
||||
public byte[]? ReadFromDataSource(int position, int length)
|
||||
{
|
||||
// Validate the data source
|
||||
if (!DataSourceIsValid())
|
||||
return null;
|
||||
|
||||
// Validate the requested segment
|
||||
if (!SegmentValid(position, length))
|
||||
return null;
|
||||
|
||||
// Read and return the data
|
||||
byte[]? sectionData = null;
|
||||
switch (_dataSource)
|
||||
{
|
||||
case DataSource.ByteArray:
|
||||
sectionData = new byte[length];
|
||||
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
|
||||
break;
|
||||
|
||||
case DataSource.Stream:
|
||||
lock (_streamDataLock)
|
||||
{
|
||||
long currentLocation = _streamData!.Position;
|
||||
_streamData.Seek(position, SeekOrigin.Begin);
|
||||
sectionData = _streamData.ReadBytes(length);
|
||||
_streamData.Seek(currentLocation, SeekOrigin.Begin);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return sectionData;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read string data from the source
|
||||
/// </summary>
|
||||
/// <param name="position">Position in the source to read from</param>
|
||||
/// <param name="length">Length of the requested data</param>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string</param>
|
||||
/// <returns>String list containing the requested data, null on error</returns>
|
||||
public List<string>? ReadStringsFromDataSource(int position, int length, int charLimit = 5)
|
||||
{
|
||||
// Read the data as a byte array first
|
||||
byte[]? sourceData = ReadFromDataSource(position, length);
|
||||
if (sourceData == null)
|
||||
return null;
|
||||
|
||||
// Check for ASCII strings
|
||||
var asciiStrings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.ASCII);
|
||||
|
||||
// Check for UTF-8 strings
|
||||
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
|
||||
var utf8Strings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.UTF8);
|
||||
|
||||
// Check for Unicode strings
|
||||
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
|
||||
var unicodeStrings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.Unicode);
|
||||
|
||||
// Ignore duplicate strings across encodings
|
||||
List<string> sourceStrings = [.. asciiStrings, .. utf8Strings, .. unicodeStrings];
|
||||
|
||||
// Sort the strings and return
|
||||
sourceStrings.Sort();
|
||||
return sourceStrings;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the ending offset of the source
|
||||
/// </summary>
|
||||
/// <returns>Value greater than 0 for a valid end of file, -1 on error</returns>
|
||||
public int GetEndOfFile()
|
||||
{
|
||||
// Validate the data souece
|
||||
if (!DataSourceIsValid())
|
||||
return -1;
|
||||
|
||||
// Return the effective endpoint
|
||||
return _dataSource switch
|
||||
{
|
||||
DataSource.ByteArray => _byteArrayData!.Length - _byteArrayOffset,
|
||||
DataSource.Stream => (int)_streamData!.Length,
|
||||
_ => -1,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read string data from the source with an encoding
|
||||
/// </summary>
|
||||
/// <param name="sourceData">Byte array representing the source data</param>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string</param>
|
||||
/// <param name="encoding">Character encoding to use for checking</param>
|
||||
/// <returns>String list containing the requested data, empty on error</returns>
|
||||
/// <remarks>TODO: Move to IO?</remarks>
|
||||
#if NET20
|
||||
private List<string> ReadStringsWithEncoding(byte[] sourceData, int charLimit, Encoding encoding)
|
||||
#else
|
||||
private HashSet<string> ReadStringsWithEncoding(byte[] sourceData, int charLimit, Encoding encoding)
|
||||
#endif
|
||||
{
|
||||
// If we have an invalid character limit, default to 5
|
||||
if (charLimit <= 0)
|
||||
charLimit = 5;
|
||||
|
||||
// Create the string hash set to return
|
||||
#if NET20
|
||||
var sourceStrings = new List<string>();
|
||||
#else
|
||||
var sourceStrings = new HashSet<string>();
|
||||
#endif
|
||||
|
||||
// Setup cached data
|
||||
int sourceDataIndex = 0;
|
||||
List<char> cachedChars = [];
|
||||
|
||||
// Check for strings
|
||||
while (sourceDataIndex < sourceData.Length)
|
||||
{
|
||||
// Read the next character
|
||||
char ch = encoding.GetChars(sourceData, sourceDataIndex, 1)[0];
|
||||
|
||||
// If we have a control character or an invalid byte
|
||||
bool isValid = !char.IsControl(ch) && (ch & 0xFF00) == 0;
|
||||
if (!isValid)
|
||||
{
|
||||
// If we have no cached string
|
||||
if (cachedChars.Count == 0)
|
||||
{
|
||||
sourceDataIndex++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we have a cached string greater than the limit
|
||||
if (cachedChars.Count >= charLimit)
|
||||
sourceStrings.Add(new string([.. cachedChars]));
|
||||
|
||||
cachedChars.Clear();
|
||||
sourceDataIndex++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If a long repeating string is found, discard it
|
||||
if (cachedChars.Count >= 64 && cachedChars.TrueForAll(c => c == cachedChars[0]))
|
||||
{
|
||||
cachedChars.Clear();
|
||||
sourceDataIndex++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Append the character to the cached string
|
||||
cachedChars.Add(ch);
|
||||
sourceDataIndex++;
|
||||
}
|
||||
|
||||
// If we have a cached string greater than the limit
|
||||
if (cachedChars.Count >= charLimit)
|
||||
{
|
||||
// Get the string from the cached characters
|
||||
string cachedString = new([.. cachedChars]);
|
||||
cachedString = cachedString.Trim();
|
||||
|
||||
// Only include trimmed strings over the limit
|
||||
if (cachedString.Length >= charLimit)
|
||||
sourceStrings.Add(cachedString);
|
||||
}
|
||||
|
||||
return sourceStrings;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region JSON Export
|
||||
|
||||
#if !NETFRAMEWORK
|
||||
/// <summary>
|
||||
/// Export the item information as JSON
|
||||
/// </summary>
|
||||
public override string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
|
||||
#endif
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,329 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.Wrappers
|
||||
{
|
||||
public abstract class WrapperBase<T> : WrapperBase, IWrapper<T>
|
||||
{
|
||||
#region Properties
|
||||
|
||||
/// <inheritdoc/>
|
||||
public T GetModel() => Model;
|
||||
|
||||
/// <summary>
|
||||
/// Internal model
|
||||
/// </summary>
|
||||
public T Model { get; private set; }
|
||||
|
||||
#endregion
|
||||
|
||||
#region Instance Variables
|
||||
|
||||
/// <summary>
|
||||
/// Source of the original data
|
||||
/// </summary>
|
||||
protected DataSource _dataSource = DataSource.UNKNOWN;
|
||||
|
||||
/// <summary>
|
||||
/// Lock object for reading from the source
|
||||
/// </summary>
|
||||
private readonly object _streamDataLock = new();
|
||||
|
||||
/// <summary>
|
||||
/// Source byte array data
|
||||
/// </summary>
|
||||
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
|
||||
protected byte[]? _byteArrayData = null;
|
||||
|
||||
/// <summary>
|
||||
/// Source byte array data offset
|
||||
/// </summary>
|
||||
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
|
||||
protected int _byteArrayOffset = -1;
|
||||
|
||||
/// <summary>
|
||||
/// Source Stream data
|
||||
/// </summary>
|
||||
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.Stream"/></remarks>
|
||||
protected Stream? _streamData = null;
|
||||
|
||||
#if !NETFRAMEWORK
|
||||
/// <summary>
|
||||
/// JSON serializer options for output printing
|
||||
/// </summary>
|
||||
protected System.Text.Json.JsonSerializerOptions _jsonSerializerOptions
|
||||
{
|
||||
get
|
||||
{
|
||||
#if NETCOREAPP3_1
|
||||
var serializer = new System.Text.Json.JsonSerializerOptions { WriteIndented = true };
|
||||
#else
|
||||
var serializer = new System.Text.Json.JsonSerializerOptions { IncludeFields = true, WriteIndented = true };
|
||||
#endif
|
||||
serializer.Converters.Add(new ConcreteAbstractSerializer());
|
||||
serializer.Converters.Add(new ConcreteInterfaceSerializer());
|
||||
serializer.Converters.Add(new System.Text.Json.Serialization.JsonStringEnumConverter());
|
||||
return serializer;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new instance of the wrapper from a byte array
|
||||
/// </summary>
|
||||
protected WrapperBase(T? model, byte[]? data, int offset)
|
||||
{
|
||||
if (model == null)
|
||||
throw new ArgumentNullException(nameof(model));
|
||||
if (data == null)
|
||||
throw new ArgumentNullException(nameof(data));
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||
|
||||
Model = model;
|
||||
_dataSource = DataSource.ByteArray;
|
||||
_byteArrayData = data;
|
||||
_byteArrayOffset = offset;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Construct a new instance of the wrapper from a Stream
|
||||
/// </summary>
|
||||
protected WrapperBase(T? model, Stream? data)
|
||||
{
|
||||
if (model == null)
|
||||
throw new ArgumentNullException(nameof(model));
|
||||
if (data == null)
|
||||
throw new ArgumentNullException(nameof(data));
|
||||
if (data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
throw new ArgumentOutOfRangeException(nameof(data));
|
||||
|
||||
Model = model;
|
||||
_dataSource = DataSource.Stream;
|
||||
_streamData = data;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Data
|
||||
|
||||
/// <summary>
|
||||
/// Validate the backing data source
|
||||
/// </summary>
|
||||
/// <returns>True if the data source is valid, false otherwise</returns>
|
||||
public bool DataSourceIsValid()
|
||||
{
|
||||
return _dataSource switch
|
||||
{
|
||||
// Byte array data requires both a valid array and offset
|
||||
DataSource.ByteArray => _byteArrayData != null && _byteArrayOffset >= 0,
|
||||
|
||||
// Stream data requires both a valid stream
|
||||
DataSource.Stream => _streamData != null && _streamData.CanRead && _streamData.CanSeek,
|
||||
|
||||
// Everything else is invalid
|
||||
_ => false,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Check if a data segment is valid in the data source
|
||||
/// </summary>
|
||||
/// <param name="position">Position in the source</param>
|
||||
/// <param name="length">Length of the data to check</param>
|
||||
/// <returns>True if the positional data is valid, false otherwise</returns>
|
||||
public bool SegmentValid(int position, int length)
|
||||
{
|
||||
// Validate the data souece
|
||||
if (!DataSourceIsValid())
|
||||
return false;
|
||||
|
||||
// If we have an invalid position
|
||||
if (position < 0 || position >= GetEndOfFile())
|
||||
return false;
|
||||
|
||||
return _dataSource switch
|
||||
{
|
||||
DataSource.ByteArray => _byteArrayOffset + position + length <= _byteArrayData!.Length,
|
||||
DataSource.Stream => position + length <= _streamData!.Length,
|
||||
|
||||
// Everything else is invalid
|
||||
_ => false,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read data from the source
|
||||
/// </summary>
|
||||
/// <param name="position">Position in the source to read from</param>
|
||||
/// <param name="length">Length of the requested data</param>
|
||||
/// <returns>Byte array containing the requested data, null on error</returns>
|
||||
public byte[]? ReadFromDataSource(int position, int length)
|
||||
{
|
||||
// Validate the data source
|
||||
if (!DataSourceIsValid())
|
||||
return null;
|
||||
|
||||
// Validate the requested segment
|
||||
if (!SegmentValid(position, length))
|
||||
return null;
|
||||
|
||||
// Read and return the data
|
||||
byte[]? sectionData = null;
|
||||
switch (_dataSource)
|
||||
{
|
||||
case DataSource.ByteArray:
|
||||
sectionData = new byte[length];
|
||||
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
|
||||
break;
|
||||
|
||||
case DataSource.Stream:
|
||||
lock (_streamDataLock)
|
||||
{
|
||||
long currentLocation = _streamData!.Position;
|
||||
_streamData.Seek(position, SeekOrigin.Begin);
|
||||
sectionData = _streamData.ReadBytes(length);
|
||||
_streamData.Seek(currentLocation, SeekOrigin.Begin);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return sectionData;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read string data from the source
|
||||
/// </summary>
|
||||
/// <param name="position">Position in the source to read from</param>
|
||||
/// <param name="length">Length of the requested data</param>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string</param>
|
||||
/// <returns>String list containing the requested data, null on error</returns>
|
||||
public List<string>? ReadStringsFromDataSource(int position, int length, int charLimit = 5)
|
||||
{
|
||||
// Read the data as a byte array first
|
||||
byte[]? sourceData = ReadFromDataSource(position, length);
|
||||
if (sourceData == null)
|
||||
return null;
|
||||
|
||||
// Check for ASCII strings
|
||||
var asciiStrings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.ASCII);
|
||||
|
||||
// Check for UTF-8 strings
|
||||
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
|
||||
var utf8Strings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.UTF8);
|
||||
|
||||
// Check for Unicode strings
|
||||
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
|
||||
var unicodeStrings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.Unicode);
|
||||
|
||||
// Ignore duplicate strings across encodings
|
||||
List<string> sourceStrings = [.. asciiStrings, .. utf8Strings, .. unicodeStrings];
|
||||
|
||||
// Sort the strings and return
|
||||
sourceStrings.Sort();
|
||||
return sourceStrings;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the ending offset of the source
|
||||
/// </summary>
|
||||
/// <returns>Value greater than 0 for a valid end of file, -1 on error</returns>
|
||||
public int GetEndOfFile()
|
||||
{
|
||||
// Validate the data souece
|
||||
if (!DataSourceIsValid())
|
||||
return -1;
|
||||
|
||||
// Return the effective endpoint
|
||||
return _dataSource switch
|
||||
{
|
||||
DataSource.ByteArray => _byteArrayData!.Length - _byteArrayOffset,
|
||||
DataSource.Stream => (int)_streamData!.Length,
|
||||
_ => -1,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read string data from the source with an encoding
|
||||
/// </summary>
|
||||
/// <param name="sourceData">Byte array representing the source data</param>
|
||||
/// <param name="charLimit">Number of characters needed to be a valid string</param>
|
||||
/// <param name="encoding">Character encoding to use for checking</param>
|
||||
/// <returns>String list containing the requested data, empty on error</returns>
|
||||
/// <remarks>TODO: Move to IO?</remarks>
|
||||
private HashSet<string> ReadStringsWithEncoding(byte[] sourceData, int charLimit, Encoding encoding)
|
||||
{
|
||||
// If we have an invalid character limit, default to 5
|
||||
if (charLimit <= 0)
|
||||
charLimit = 5;
|
||||
|
||||
// Create the string hash set to return
|
||||
var sourceStrings = new HashSet<string>();
|
||||
|
||||
// Setup cached data
|
||||
int sourceDataIndex = 0;
|
||||
List<char> cachedChars = [];
|
||||
|
||||
// Check for strings
|
||||
while (sourceDataIndex < sourceData.Length)
|
||||
{
|
||||
// Read the next character
|
||||
char ch = encoding.GetChars(sourceData, sourceDataIndex, 1)[0];
|
||||
|
||||
// If we have a control character or an invalid byte
|
||||
bool isValid = !char.IsControl(ch) && (ch & 0xFF00) == 0;
|
||||
if (!isValid)
|
||||
{
|
||||
// If we have no cached string
|
||||
if (cachedChars.Count == 0)
|
||||
continue;
|
||||
|
||||
// If we have a cached string greater than the limit
|
||||
if (cachedChars.Count >= charLimit)
|
||||
sourceStrings.Add(new string([.. cachedChars]));
|
||||
|
||||
cachedChars.Clear();
|
||||
continue;
|
||||
}
|
||||
|
||||
// If a long repeating string is found, discard it
|
||||
if (cachedChars.Count >= 64 && cachedChars.TrueForAll(c => c == cachedChars[0]))
|
||||
{
|
||||
cachedChars.Clear();
|
||||
continue;
|
||||
}
|
||||
|
||||
// Append the character to the cached string
|
||||
cachedChars.Add(ch);
|
||||
sourceDataIndex++;
|
||||
}
|
||||
|
||||
// If we have a cached string greater than the limit
|
||||
if (cachedChars.Count >= charLimit)
|
||||
sourceStrings.Add(new string([.. cachedChars]));
|
||||
|
||||
return sourceStrings;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region JSON Export
|
||||
|
||||
#if !NETFRAMEWORK
|
||||
/// <summary>
|
||||
/// Export the item information as JSON
|
||||
/// </summary>
|
||||
public override string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
|
||||
#endif
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -54,7 +54,7 @@ namespace SabreTools.Serialization.Wrappers
|
||||
WrapperType.Textfile => null,// TODO: Implement wrapper
|
||||
WrapperType.VBSP => VBSP.Create(data),
|
||||
WrapperType.VPK => VPK.Create(data),
|
||||
WrapperType.WAD => WAD.Create(data),
|
||||
WrapperType.WAD => WAD3.Create(data),
|
||||
WrapperType.XZ => null,// TODO: Implement wrapper
|
||||
WrapperType.XZP => XZP.Create(data),
|
||||
_ => null,
|
||||
|
||||
Reference in New Issue
Block a user