TODO cleanup

This commit is contained in:
Matt Nadareski
2024-11-20 14:54:22 -05:00
parent 1bc9316bc1
commit 4e40cc19d5
11 changed files with 70 additions and 43 deletions

View File

@@ -66,8 +66,7 @@ namespace SabreTools.Serialization.Deserializers
file.VerticesLump = ParseVerticesLump(data, lumpEntry.Offset, lumpEntry.Length);
break;
case LumpType.LUMP_VISIBILITY:
// TODO: Assign when Models supports it
_ = ParseVisibilityLump(data, lumpEntry.Offset, lumpEntry.Length);
file.VisibilityLump = ParseVisibilityLump(data, lumpEntry.Offset, lumpEntry.Length);
break;
case LumpType.LUMP_NODES:
file.NodesLump = ParseNodesLump(data, lumpEntry.Offset, lumpEntry.Length);
@@ -118,19 +117,13 @@ namespace SabreTools.Serialization.Deserializers
/// <remarks>Only recognized versions are 29 and 30</remarks>
private static BspHeader? ParseHeader(Stream data)
{
// TODO: Use marshalling here later
var header = new BspHeader();
header.Version = data.ReadInt32();
var header = data.ReadType<BspHeader>();
if (header == null)
return null;
if (header.Version < 29 || header.Version > 30)
return null;
header.Lumps = new BspLumpEntry[BSP_HEADER_LUMPS];
for (int i = 0; i < BSP_HEADER_LUMPS; i++)
{
header.Lumps[i] = data.ReadType<BspLumpEntry>()!;
}
return header;
}
@@ -223,7 +216,6 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life Level texture header on success, null on error</returns>
private static TextureHeader ParseTextureHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var textureHeader = new TextureHeader();
textureHeader.MipTextureCount = data.ReadUInt32();

View File

@@ -260,7 +260,6 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled sector full of sector numbers on success, null on error</returns>
private static SectorNumber[] ParseSectorNumbers(Stream data, ushort sectorShift)
{
// TODO: Use marshalling here instead of building
int sectorCount = (int)(Math.Pow(2, sectorShift) / sizeof(uint));
var sectorNumbers = new SectorNumber[sectorCount];
@@ -281,7 +280,7 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled sector full of directory entries on success, null on error</returns>
private static DirectoryEntry[]? ParseDirectoryEntries(Stream data, ushort sectorShift, ushort majorVersion)
{
// TODO: Use marshalling here instead of building
// TODO: Fix the directory entry size const
const int directoryEntrySize = 64 + 2 + 1 + 1 + 4 + 4 + 4 + 16 + 4 + 8 + 8 + 4 + 8;
int sectorCount = (int)(Math.Pow(2, sectorShift) / directoryEntrySize);
var directoryEntries = new DirectoryEntry[sectorCount];

View File

@@ -154,8 +154,7 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled certificate on success, null on error</returns>
public static Certificate? ParseCertificate(Stream data)
{
// TODO: Use marshalling here instead of building
Certificate certificate = new Certificate();
var certificate = new Certificate();
certificate.SignatureType = (SignatureType)data.ReadUInt32();
switch (certificate.SignatureType)
@@ -230,8 +229,7 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled ticket on success, null on error</returns>
public static Ticket? ParseTicket(Stream data, bool fromCdn = false)
{
// TODO: Use marshalling here instead of building
Ticket ticket = new Ticket();
var ticket = new Ticket();
ticket.SignatureType = (SignatureType)data.ReadUInt32();
switch (ticket.SignatureType)
@@ -335,7 +333,6 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled title metadata on success, null on error</returns>
public static TitleMetadata? ParseTitleMetadata(Stream data, bool fromCdn = false)
{
// TODO: Use marshalling here instead of building
var titleMetadata = new TitleMetadata();
titleMetadata.SignatureType = (SignatureType)data.ReadUInt32();

View File

@@ -608,7 +608,6 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled Half-Life Game Cache data block header on success, null on error</returns>
private static DataBlockHeader? ParseDataBlockHeader(Stream data, uint minorVersion)
{
// TODO: Use marshalling here instead of building
var dataBlockHeader = new DataBlockHeader();
// In version 3 the DataBlockHeader is missing the LastVersionPlayed field.

View File

@@ -470,15 +470,13 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled resident names table entry on success, null on error</returns>
public static ResidentNamesTableEntry ParseResidentNamesTableEntry(Stream data)
{
// TODO: Use marshalling here instead of building
var entry = new ResidentNamesTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0 && data.Position + entry.Length <= data.Length)
{
byte[]? name = data.ReadBytes(entry.Length);
if (name != null)
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
byte[] name = data.ReadBytes(entry.Length);
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
}
entry.OrdinalNumber = data.ReadUInt16();

View File

@@ -141,14 +141,10 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled NCSD header on success, null on error</returns>
public static NCSDHeader? ParseNCSDHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new NCSDHeader();
header.RSA2048Signature = data.ReadBytes(0x100);
byte[]? magicNumber = data.ReadBytes(4);
if (magicNumber == null)
return null;
byte[] magicNumber = data.ReadBytes(4);
header.MagicNumber = Encoding.ASCII.GetString(magicNumber).TrimEnd('\0'); ;
if (header.MagicNumber != NCSDMagicNumber)
return null;

View File

@@ -84,7 +84,7 @@ namespace SabreTools.Serialization.Deserializers
file.OcclusionLump = ParseOcclusionLump(data, lumpEntry.Offset, lumpEntry.Length);
break;
case LumpType.LUMP_LEAVES:
file.LeavesLump = ParseLeavesLump(data, lumpEntry.Offset, lumpEntry.Length);
file.LeavesLump = ParseLeavesLump(data, lumpEntry.Version, lumpEntry.Offset, lumpEntry.Length);
break;
case LumpType.LUMP_MARKSURFACES:
file.MarksurfacesLump = ParseMarksurfacesLump(data, lumpEntry.Offset, lumpEntry.Length);
@@ -561,13 +561,12 @@ namespace SabreTools.Serialization.Deserializers
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled LUMP_LEAVES on success, null on error</returns>
private static VbspLeavesLump? ParseLeavesLump(Stream data, int offset, int length)
private static VbspLeavesLump? ParseLeavesLump(Stream data, uint version, int offset, int length)
{
var leaves = new List<VbspLeaf>();
while (data.Position < offset + length)
{
// TODO: Fix parsing between V0 and V1+
var leaf = data.ReadType<VbspLeaf>();
var leaf = ParseVbspLeaf(data, version);
if (leaf != null)
leaves.Add(leaf);
}
@@ -575,6 +574,42 @@ namespace SabreTools.Serialization.Deserializers
return new VbspLeavesLump { Leaves = [.. leaves] };
}
/// <summary>
/// Parse a Stream into VbspLeaf
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled VbspLeaf on success, null on error</returns>
private static VbspLeaf? ParseVbspLeaf(Stream data, uint version)
{
var leaf = new VbspLeaf();
leaf.Contents = (VbspContents)data.ReadUInt32();
leaf.Cluster = data.ReadInt16();
leaf.AreaFlags = data.ReadInt16();
leaf.Mins = new short[3];
for (int i = 0; i < leaf.Mins.Length; i++)
{
leaf.Mins[i] = data.ReadInt16();
}
leaf.Maxs = new short[3];
for (int i = 0; i < leaf.Maxs.Length; i++)
{
leaf.Maxs[i] = data.ReadInt16();
}
leaf.FirstLeafFace = data.ReadUInt16();
leaf.NumLeafFaces = data.ReadUInt16();
leaf.FirstLeafBrush = data.ReadUInt16();
leaf.NumLeafBrushes = data.ReadUInt16();
leaf.LeafWaterDataID = data.ReadInt16();
if (version == 1)
leaf.AmbientLighting = data.ReadType<CompressedLightCube>();
else
leaf.Padding = data.ReadInt16();
return leaf;
}
/// <summary>
/// Parse a Stream into LUMP_FACEIDS
/// </summary>

View File

@@ -75,8 +75,7 @@ namespace SabreTools.Serialization.Printers
Print(builder, model.VerticesLump);
break;
case LumpType.LUMP_VISIBILITY:
// TODO: Implement when added to Models
// Print(builder, model.VisibilityLump);
Print(builder, model.VisibilityLump);
break;
case LumpType.LUMP_NODES:
Print(builder, model.NodesLump);

View File

@@ -635,10 +635,10 @@ namespace SabreTools.Serialization.Printers
return;
}
for (int j = 0; j < lump.Leaves.Length; j++)
for (int i = 0; i < lump.Leaves.Length; i++)
{
var leaf = lump.Leaves[j];
builder.AppendLine($" Leaf {j}");
var leaf = lump.Leaves[i];
builder.AppendLine($" Leaf {i}");
builder.AppendLine($" Contents: {leaf.Contents} (0x{leaf.Contents:X})");
builder.AppendLine(leaf.Cluster, " Cluster");
builder.AppendLine(leaf.AreaFlags, " AreaFlags");
@@ -651,8 +651,22 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(leaf.LeafWaterDataID, " Leaf water data ID");
if (version == 0)
{
// TODO: Figure out how to print the colors array
builder.AppendLine(" Colors array skipped...");
if (leaf.AmbientLighting.Colors == null || leaf.AmbientLighting.Colors.Length == 0)
{
builder.AppendLine(" No ambient lighting colors");
}
else
{
for (int j = 0; j < leaf.AmbientLighting.Colors.Length; j++)
{
var color = leaf.AmbientLighting.Colors[j];
builder.AppendLine($" Ambient Lighting Color {j}");
builder.AppendLine(color.Red, " Red");
builder.AppendLine(color.Green, " Green");
builder.AppendLine(color.Blue, " Blue");
builder.AppendLine(color.Exponent, " Exponent");
}
}
}
else
{

View File

@@ -2,7 +2,6 @@
namespace SabreTools.Serialization.Wrappers
{
// TODO: Figure out extension properties
public partial class MoPaQ : WrapperBase<Models.MoPaQ.Archive>
{
#region Descriptive Properties

View File

@@ -2,7 +2,6 @@ using System.IO;
namespace SabreTools.Serialization.Wrappers
{
// TODO: Figure out extension properties
public class PIC : WrapperBase<Models.PIC.DiscInformation>
{
#region Descriptive Properties