diff --git a/SabreTools.Serialization.Test/SabreTools.Serialization.Test.csproj b/SabreTools.Serialization.Test/SabreTools.Serialization.Test.csproj
index 7ab48ba7..11f50911 100644
--- a/SabreTools.Serialization.Test/SabreTools.Serialization.Test.csproj
+++ b/SabreTools.Serialization.Test/SabreTools.Serialization.Test.csproj
@@ -26,8 +26,8 @@
runtime; build; native; contentfiles; analyzers; buildtransitive
all
-
-
+
+
runtime; build; native; contentfiles; analyzers; buildtransitive
diff --git a/SabreTools.Serialization/Deserializers/BSP.cs b/SabreTools.Serialization/Deserializers/BSP.cs
index 01c89f71..5a60912d 100644
--- a/SabreTools.Serialization/Deserializers/BSP.cs
+++ b/SabreTools.Serialization/Deserializers/BSP.cs
@@ -262,12 +262,13 @@ namespace SabreTools.Serialization.Deserializers
var lump = new VisibilityLump();
lump.NumClusters = data.ReadInt32();
- lump.ByteOffsets = new int[lump.NumClusters, 2];
+ lump.ByteOffsets = new int[lump.NumClusters][];
for (int i = 0; i < lump.NumClusters; i++)
{
+ lump.ByteOffsets[i] = new int[2];
for (int j = 0; j < 2; j++)
{
- lump.ByteOffsets[i, j] = data.ReadInt32();
+ lump.ByteOffsets[i][j] = data.ReadInt32();
}
}
@@ -336,14 +337,11 @@ namespace SabreTools.Serialization.Deserializers
private static LightmapLump? ParseLightmapLump(Stream data, int offset, int length)
{
var lump = new LightmapLump();
- lump.Lightmap = new byte[length / 3, 3];
+ lump.Lightmap = new byte[length / 3][];
for (int i = 0; i < length / 3; i++)
{
- for (int j = 0; j < 3; j++)
- {
- lump.Lightmap[i, j] = data.ReadByteValue();
- }
+ lump.Lightmap[i] = data.ReadBytes(3);
}
return lump;
diff --git a/SabreTools.Serialization/Deserializers/VBSP.cs b/SabreTools.Serialization/Deserializers/VBSP.cs
index 52309265..71c433b6 100644
--- a/SabreTools.Serialization/Deserializers/VBSP.cs
+++ b/SabreTools.Serialization/Deserializers/VBSP.cs
@@ -132,7 +132,7 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Support LUMP_CLUSTERPORTALS / LUMP_UNUSED3 / LUMP_PROPTRIS [25] when in Models
break;
case LumpType.LUMP_DISPINFO:
- file.DispInfoLump = ParseDispInfosLump(data, lumpEntry.Offset, lumpEntry.Length);
+ file.DispInfosLump = ParseDispInfosLump(data, lumpEntry.Offset, lumpEntry.Length);
break;
case LumpType.LUMP_ORIGINALFACES:
file.OriginalFacesLump = ParseFacesLump(data, lumpEntry.Offset, lumpEntry.Length);
@@ -141,7 +141,7 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Support LUMP_PHYSDISP [28] when in Models
break;
case LumpType.LUMP_PHYSCOLLIDE:
- // TODO: Support LUMP_PHYSCOLLIDE [29] when in Models
+ file.PhysCollideLump = ParsePhysCollideLump(data, lumpEntry.Offset, lumpEntry.Length);
break;
case LumpType.LUMP_VERTNORMALS:
// TODO: Support LUMP_VERTNORMALS [30] when in Models
@@ -153,7 +153,7 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Support LUMP_DISP_LIGHTMAP_ALPHAS [32] when in Models
break;
case LumpType.LUMP_DISP_VERTS:
- file.DispVertLump = ParseDispVertsLump(data, lumpEntry.Offset, lumpEntry.Length);
+ file.DispVertsLump = ParseDispVertsLump(data, lumpEntry.Offset, lumpEntry.Length);
break;
case LumpType.LUMP_DISP_LIGHTMAP_SAMPLE_POSITIONS:
// TODO: Support LUMP_DISP_LIGHTMAP_SAMPLE_POSITIONS [34] when in Models
@@ -174,13 +174,13 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Support LUMP_PRIMINDICES [39] when in Models
break;
case LumpType.LUMP_PAKFILE:
- // TODO: Support LUMP_PAKFILE [40] when in Models
+ file.PakfileLump = ParsePakfileLump(data, lumpEntry.Offset, lumpEntry.Length);
break;
case LumpType.LUMP_CLIPPORTALVERTS:
// TODO: Support LUMP_CLIPPORTALVERTS [41] when in Models
break;
case LumpType.LUMP_CUBEMAPS:
- file.CubemapLump = ParseCubemapsLump(data, lumpEntry.Offset, lumpEntry.Length);
+ file.CubemapsLump = ParseCubemapsLump(data, lumpEntry.Offset, lumpEntry.Length);
break;
case LumpType.LUMP_TEXDATA_STRING_DATA:
file.TexdataStringData = ParseTexdataStringData(data, lumpEntry.Offset, lumpEntry.Length);
@@ -216,7 +216,7 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Support LUMP_LIGHTING_HDR [53] when in Models
break;
case LumpType.LUMP_WORLDLIGHTS_HDR:
- file.WorldLightsLump = ParseWorldLightsLump(data, lumpEntry.Offset, lumpEntry.Length);
+ file.HDRWorldLightsLump = ParseWorldLightsLump(data, lumpEntry.Offset, lumpEntry.Length);
break;
case LumpType.LUMP_LEAF_AMBIENT_LIGHTING_HDR:
file.HDRAmbientLightingLump = ParseAmbientLightingLump(data, lumpEntry.Offset, lumpEntry.Length);
@@ -378,12 +378,13 @@ namespace SabreTools.Serialization.Deserializers
var lump = new VisibilityLump();
lump.NumClusters = data.ReadInt32();
- lump.ByteOffsets = new int[lump.NumClusters, 2];
+ lump.ByteOffsets = new int[lump.NumClusters][];
for (int i = 0; i < lump.NumClusters; i++)
{
+ lump.ByteOffsets[i] = new int[2];
for (int j = 0; j < 2; j++)
{
- lump.ByteOffsets[i, j] = data.ReadInt32();
+ lump.ByteOffsets[i][j] = data.ReadInt32();
}
}
@@ -444,6 +445,64 @@ namespace SabreTools.Serialization.Deserializers
return new VbspFacesLump { Faces = [.. faces] };
}
+ ///
+ /// Parse a Stream into LUMP_PHYSCOLLIDE
+ ///
+ /// Stream to parse
+ /// Filled LUMP_PHYSCOLLIDE on success, null on error
+ private static PhysCollideLump? ParsePhysCollideLump(Stream data, int offset, int length)
+ {
+ var models = new List();
+ while (data.Position < offset + length)
+ {
+ var model = ParsePhysModel(data);
+ if (model != null)
+ models.Add(model);
+ }
+
+ return new PhysCollideLump { Models = [.. models] };
+ }
+
+ ///
+ /// Parse a Stream into PhysModel
+ ///
+ /// Stream to parse
+ /// Filled PhysModel on success, null on error
+ private static PhysModel? ParsePhysModel(Stream data)
+ {
+ var model = new PhysModel();
+
+ model.ModelIndex = data.ReadInt32();
+ model.DataSize = data.ReadInt32();
+ model.KeydataSize = data.ReadInt32();
+ model.SolidCount = data.ReadInt32();
+ model.Solids = new PhysSolid[model.SolidCount];
+ for (int i = 0; i < model.Solids.Length; i++)
+ {
+ var solid = ParsePhysSolid(data);
+ if (solid != null)
+ model.Solids[i] = solid;
+ }
+
+ return model;
+ }
+
+ ///
+ /// Parse a Stream into PhysSolid
+ ///
+ /// Stream to parse
+ /// Filled PhysSolid on success, null on error
+ private static PhysSolid? ParsePhysSolid(Stream data)
+ {
+ var solid = new PhysSolid();
+
+ solid.Size = data.ReadInt32();
+ if (solid.Size > 0)
+ solid.CollisionData = data.ReadBytes(solid.Size);
+
+ return solid;
+ }
+
///
/// Parse a Stream into LUMP_LIGHTING
///
@@ -452,14 +511,11 @@ namespace SabreTools.Serialization.Deserializers
private static LightmapLump? ParseLightmapLump(Stream data, int offset, int length)
{
var lump = new LightmapLump();
- lump.Lightmap = new byte[length / 3, 3];
+ lump.Lightmap = new byte[length / 3][];
for (int i = 0; i < length / 3; i++)
{
- for (int j = 0; j < 3; j++)
- {
- lump.Lightmap[i, j] = data.ReadByteValue();
- }
+ lump.Lightmap[i] = data.ReadBytes(3);
}
return lump;
@@ -491,10 +547,10 @@ namespace SabreTools.Serialization.Deserializers
lump.PolyData[i] = polyData;
}
lump.VertexIndexCount = data.ReadInt32();
- lump.VertexIndices = new int[lump.VertexIndexCount];
+ lump.VertexIndicies = new int[lump.VertexIndexCount];
for (int i = 0; i < lump.VertexIndexCount; i++)
{
- lump.VertexIndices[i] = data.ReadInt32();
+ lump.VertexIndicies[i] = data.ReadInt32();
}
return lump;
@@ -748,6 +804,20 @@ namespace SabreTools.Serialization.Deserializers
return new CubemapsLump { Cubemaps = [.. cubemaps] };
}
+ ///
+ /// Parse a Stream into LUMP_PAKFILE
+ ///
+ /// Stream to parse
+ /// Filled LUMP_PAKFILE on success, null on error
+ private static PakfileLump? ParsePakfileLump(Stream data, int offset, int length)
+ {
+ var lump = new PakfileLump();
+
+ lump.Data = data.ReadBytes(length);
+
+ return lump;
+ }
+
///
/// Parse a Stream into LUMP_TEXDATA_STRING_DATA
///
diff --git a/SabreTools.Serialization/Deserializers/WAD3.cs b/SabreTools.Serialization/Deserializers/WAD3.cs
index a8181436..0dba9279 100644
--- a/SabreTools.Serialization/Deserializers/WAD3.cs
+++ b/SabreTools.Serialization/Deserializers/WAD3.cs
@@ -159,11 +159,10 @@ namespace SabreTools.Serialization.Deserializers
miptex.MipImages[i] = ParseMipMap(data, miptex.Width, miptex.Height);
}
miptex.ColorsUsed = data.ReadUInt16();
- miptex.Palette = new byte[miptex.ColorsUsed, 3];
+ miptex.Palette = new byte[miptex.ColorsUsed][];
for (int i = 0; i < miptex.ColorsUsed; i++)
- for (int j = 0; j < 3; j++)
{
- miptex.Palette[i, j] = data.ReadByteValue();
+ miptex.Palette[i] = data.ReadBytes(3);
}
return miptex;
@@ -178,11 +177,10 @@ namespace SabreTools.Serialization.Deserializers
{
var mipmap = new MipMap();
- mipmap.Data = new byte[width, height];
+ mipmap.Data = new byte[width][];
for (int i = 0; i < width; i++)
- for (int j = 0; j < height; j++)
{
- mipmap.Data[i, j] = data.ReadByteValue();
+ mipmap.Data[i] = data.ReadBytes((int)height);
}
return mipmap;
@@ -199,18 +197,16 @@ namespace SabreTools.Serialization.Deserializers
qpic.Width = data.ReadUInt32();
qpic.Height = data.ReadUInt32();
- qpic.Data = new byte[qpic.Height, qpic.Width];
+ qpic.Data = new byte[qpic.Height][];
for (int i = 0; i < qpic.Height; i++)
- for (int j = 0; j < qpic.Width; j++)
{
- qpic.Data[i, j] = data.ReadByteValue();
+ qpic.Data[i] = data.ReadBytes((int)qpic.Width);
}
qpic.ColorsUsed = data.ReadUInt16();
- qpic.Palette = new byte[qpic.ColorsUsed, 3];
+ qpic.Palette = new byte[qpic.ColorsUsed][];
for (int i = 0; i < qpic.ColorsUsed; i++)
- for (int j = 0; j < 3; j++)
{
- qpic.Palette[i, j] = data.ReadByteValue();
+ qpic.Palette[i] = data.ReadBytes(3);
}
return qpic;
@@ -234,18 +230,16 @@ namespace SabreTools.Serialization.Deserializers
{
font.FontInfo[i] = ParseCharInfo(data);
}
- font.Data = new byte[font.Height, font.Width];
+ font.Data = new byte[font.Height][];
for (int i = 0; i < font.Height; i++)
- for (int j = 0; j < font.Width; j++)
{
- font.Data[i, j] = data.ReadByteValue();
+ font.Data[i] = data.ReadBytes((int)font.Width);
}
font.ColorsUsed = data.ReadUInt16();
- font.Palette = new byte[font.ColorsUsed, 3];
+ font.Palette = new byte[font.ColorsUsed][];
for (int i = 0; i < font.ColorsUsed; i++)
- for (int j = 0; j < 3; j++)
{
- font.Palette[i, j] = data.ReadByteValue();
+ font.Palette[i] = data.ReadBytes(3);
}
return font;
diff --git a/SabreTools.Serialization/Printers/VBSP.cs b/SabreTools.Serialization/Printers/VBSP.cs
index 6e76d46d..36c15496 100644
--- a/SabreTools.Serialization/Printers/VBSP.cs
+++ b/SabreTools.Serialization/Printers/VBSP.cs
@@ -149,7 +149,7 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(" Data not parsed...");
break;
case LumpType.LUMP_DISPINFO:
- Print(builder, model.DispInfoLump);
+ Print(builder, model.DispInfosLump);
break;
case LumpType.LUMP_ORIGINALFACES:
Print(builder, model.OriginalFacesLump);
@@ -159,8 +159,7 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(" Data not parsed...");
break;
case LumpType.LUMP_PHYSCOLLIDE:
- // TODO: Support LUMP_PHYSCOLLIDE [29] when in Models
- builder.AppendLine(" Data not parsed...");
+ Print(builder, model.PhysCollideLump);
break;
case LumpType.LUMP_VERTNORMALS:
// TODO: Support LUMP_VERTNORMALS [30] when in Models
@@ -175,7 +174,7 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(" Data not parsed...");
break;
case LumpType.LUMP_DISP_VERTS:
- Print(builder, model.DispVertLump);
+ Print(builder, model.DispVertsLump);
break;
case LumpType.LUMP_DISP_LIGHTMAP_SAMPLE_POSITIONS:
// TODO: Support LUMP_DISP_LIGHTMAP_SAMPLE_POSITIONS [34] when in Models
@@ -201,15 +200,14 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(" Data not parsed...");
break;
case LumpType.LUMP_PAKFILE:
- // TODO: Support LUMP_PAKFILE [40] when in Models
- builder.AppendLine(" Data not parsed...");
+ Print(builder, model.PakfileLump);
break;
case LumpType.LUMP_CLIPPORTALVERTS:
// TODO: Support LUMP_CLIPPORTALVERTS [41] when in Models
builder.AppendLine(" Data not parsed...");
break;
case LumpType.LUMP_CUBEMAPS:
- Print(builder, model.CubemapLump);
+ Print(builder, model.CubemapsLump);
break;
case LumpType.LUMP_TEXDATA_STRING_DATA:
Print(builder, model.TexdataStringData);
@@ -250,7 +248,7 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(" Data not parsed...");
break;
case LumpType.LUMP_WORLDLIGHTS_HDR:
- Print(builder, model.WorldLightsLump);
+ Print(builder, model.HDRWorldLightsLump);
break;
case LumpType.LUMP_LEAF_AMBIENT_LIGHTING_HDR:
Print(builder, model.HDRAmbientLightingLump);
@@ -529,6 +527,40 @@ namespace SabreTools.Serialization.Printers
}
}
+ private static void Print(StringBuilder builder, PhysCollideLump? lump)
+ {
+ if (lump?.Models == null || lump.Models.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ return;
+ }
+
+ for (int i = 0; i < lump.Models.Length; i++)
+ {
+ var model = lump.Models[i];
+ builder.AppendLine($" Model {i}");
+ builder.AppendLine(model.ModelIndex, " Model index");
+ builder.AppendLine(model.DataSize, " Data size");
+ builder.AppendLine(model.KeydataSize, " Keydata size");
+ builder.AppendLine(model.SolidCount, " Solid count");
+ if (model.Solids == null || model.Solids.Length == 0)
+ {
+ builder.AppendLine(" No solids");
+ }
+ else
+ {
+ for (int j = 0; j < model.Solids.Length; j++)
+ {
+ var solid = model.Solids[j];
+ builder.AppendLine($" Solid {j}");
+ builder.AppendLine(solid.Size, " Size");
+ builder.AppendLine(" Collision data skipped...");
+ }
+ }
+ builder.AppendLine(" Keydata skipped...");
+ }
+ }
+
private static void Print(StringBuilder builder, LightmapLump? lump)
{
if (lump?.Lightmap == null || lump.Lightmap.Length == 0)
@@ -581,15 +613,15 @@ namespace SabreTools.Serialization.Printers
}
}
builder.AppendLine(lump.VertexIndexCount, " Vertex index count");
- if (lump.VertexIndices == null || lump.VertexIndices.Length == 0)
+ if (lump.VertexIndicies == null || lump.VertexIndicies.Length == 0)
{
builder.AppendLine(" No vertex indicies");
}
else
{
- for (int j = 0; j < lump.VertexIndices.Length; j++)
+ for (int j = 0; j < lump.VertexIndicies.Length; j++)
{
- builder.AppendLine($" Vertex Index {j}: {lump.VertexIndices[j]}");
+ builder.AppendLine($" Vertex Index {j}: {lump.VertexIndicies[j]}");
}
}
}
@@ -806,7 +838,7 @@ namespace SabreTools.Serialization.Printers
{
var info = lump.Infos[i];
builder.AppendLine($" Disp Info {i}");
- builder.AppendLine($" Start position: ({info.startPosition.X}, {info.startPosition.Y}, {info.startPosition.Z})");
+ builder.AppendLine($" Start position: ({info.StartPosition.X}, {info.StartPosition.Y}, {info.StartPosition.Z})");
builder.AppendLine(info.DispVertStart, " Index into disp verts");
builder.AppendLine(info.DispTriStart, " Index into disp tris");
builder.AppendLine(info.Power, " Power");
@@ -910,6 +942,17 @@ namespace SabreTools.Serialization.Printers
}
}
+ private static void Print(StringBuilder builder, PakfileLump? lump)
+ {
+ if (lump?.Data == null || lump.Data.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ return;
+ }
+
+ builder.AppendLine(" Data skipped...");
+ }
+
private static void Print(StringBuilder builder, CubemapsLump? lump)
{
if (lump?.Cubemaps == null || lump.Cubemaps.Length == 0)
diff --git a/SabreTools.Serialization/SabreTools.Serialization.csproj b/SabreTools.Serialization/SabreTools.Serialization.csproj
index 3ec28349..c396a05d 100644
--- a/SabreTools.Serialization/SabreTools.Serialization.csproj
+++ b/SabreTools.Serialization/SabreTools.Serialization.csproj
@@ -33,7 +33,7 @@
-
+
\ No newline at end of file