diff --git a/SabreTools.Serialization.Test/SabreTools.Serialization.Test.csproj b/SabreTools.Serialization.Test/SabreTools.Serialization.Test.csproj
index 37045b54..7ab48ba7 100644
--- a/SabreTools.Serialization.Test/SabreTools.Serialization.Test.csproj
+++ b/SabreTools.Serialization.Test/SabreTools.Serialization.Test.csproj
@@ -27,7 +27,7 @@
all
-
+
runtime; build; native; contentfiles; analyzers; buildtransitive
diff --git a/SabreTools.Serialization/Deserializers/BSP.cs b/SabreTools.Serialization/Deserializers/BSP.cs
index bc88e3fb..04cde230 100644
--- a/SabreTools.Serialization/Deserializers/BSP.cs
+++ b/SabreTools.Serialization/Deserializers/BSP.cs
@@ -1,3 +1,4 @@
+using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
@@ -6,10 +7,10 @@ using static SabreTools.Models.BSP.Constants;
namespace SabreTools.Serialization.Deserializers
{
- public class BSP : BaseBinaryDeserializer
+ public class BSP : BaseBinaryDeserializer
{
///
- public override Models.BSP.File? Deserialize(Stream? data)
+ public override BspFile? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -23,13 +24,13 @@ namespace SabreTools.Serialization.Deserializers
int initialOffset = (int)data.Position;
// Create a new Half-Life Level to fill
- var file = new Models.BSP.File();
+ var file = new BspFile();
#region Header
// Try to parse the header
var header = ParseHeader(data);
- if (header == null)
+ if (header?.Lumps == null)
return null;
// Set the level header
@@ -39,59 +40,326 @@ namespace SabreTools.Serialization.Deserializers
#region Lumps
- // Create the lump array
- file.Lumps = new Lump[HL_BSP_LUMP_COUNT];
-
- // Try to parse the lumps
- for (int i = 0; i < HL_BSP_LUMP_COUNT; i++)
+ // LUMP_ENTITIES [0]
+ var lumpEntry = header.Lumps[(int)LumpType.LUMP_ENTITIES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
{
- var lump = ParseLump(data);
- if (lump == null)
- return null;
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
- file.Lumps[i] = lump;
+ // Read the lump data
+ var entities = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ // TODO: Read this into sets of key-value pairs
+ var sb = new StringBuilder();
+ char c = '\0';
+ do
+ {
+ c = (char)data.ReadByteValue();
+ sb.Append(c);
+ } while (c != '}');
+
+ var entity = new Entity();
+ entity.Attributes = new List>
+ {
+ new("REPLACE", sb.ToString()),
+ };
+ entities.Add(entity);
+ }
+
+ var lump = new EntitiesLump();
+ lump.Entities = [.. entities];
+
+ file.Entities = lump;
}
- #endregion
-
- #region Texture header
-
- // Try to get the texture header lump
- var textureDataLump = file.Lumps[HL_BSP_LUMP_TEXTUREDATA];
- if (textureDataLump == null || textureDataLump.Offset == 0 || textureDataLump.Length == 0)
- return null;
-
- // Seek to the texture header
- data.Seek(textureDataLump.Offset, SeekOrigin.Begin);
-
- // Try to parse the texture header
- var textureHeader = ParseTextureHeader(data);
- if (textureHeader == null)
- return null;
-
- // Set the texture header
- file.TextureHeader = textureHeader;
-
- #endregion
-
- #region Textures
-
- // Create the texture array
- file.Textures = new Texture[textureHeader.TextureCount];
-
- // Try to parse the textures
- for (int i = 0; i < textureHeader.TextureCount; i++)
+ // LUMP_PLANES [1]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_PLANES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
{
- // Get the texture offset
- int offset = (int)(textureHeader.Offsets![i] + file.Lumps[HL_BSP_LUMP_TEXTUREDATA]!.Offset);
- if (offset < 0 || offset >= data.Length)
- continue;
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
- // Seek to the texture
- data.Seek(offset, SeekOrigin.Begin);
+ // Read the lump data
+ var planes = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var plane = data.ReadType();
+ if (plane != null)
+ planes.Add(plane);
+ }
- var texture = ParseTexture(data);
- file.Textures[i] = texture;
+ var lump = new PlanesLump();
+ lump.Planes = [.. planes];
+
+ file.PlanesLump = lump;
+ }
+
+ // LUMP_TEXTURES [2]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_TEXTURES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the header
+ var lump = new TextureLump();
+ lump.Header = ParseTextureHeader(data);
+
+ // Read the lump data
+ var textures = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var texture = data.ReadType();
+ if (texture != null)
+ textures.Add(texture);
+ }
+
+ lump.Textures = [.. textures];
+
+ file.TextureLump = lump;
+ }
+
+ // LUMP_VERTICES [3]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_VERTICES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var vertices = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ vertices.Add(data.ReadType());
+ }
+
+ var lump = new VerticesLump();
+ lump.Vertices = [.. vertices];
+
+ file.VerticesLump = lump;
+ }
+
+ // LUMP_VISIBILITY [4]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_VISIBILITY];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // TODO: Parse LUMP_VISIBILITY when added to model
+ }
+
+ // LUMP_NODES [5]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_NODES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var nodes = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var node = data.ReadType();
+ if (node != null)
+ nodes.Add(node);
+ }
+
+ var lump = new BspNodesLump();
+ lump.Nodes = [.. nodes];
+
+ file.NodesLump = lump;
+ }
+
+ // LUMP_TEXINFO [6]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_TEXINFO];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var texinfos = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var texinfo = data.ReadType();
+ if (texinfo != null)
+ texinfos.Add(texinfo);
+ }
+
+ var lump = new BspTexinfoLump();
+ lump.Texinfos = [.. texinfos];
+
+ file.TexinfoLump = lump;
+ }
+
+ // LUMP_FACES [7]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_FACES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var faces = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var face = data.ReadType();
+ if (face != null)
+ faces.Add(face);
+ }
+
+ var lump = new BspFacesLump();
+ lump.Faces = [.. faces];
+
+ file.FacesLump = lump;
+ }
+
+ // LUMP_LIGHTING [8]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_LIGHTING];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var lump = new LightmapLump();
+ lump.Lightmap = new byte[lumpEntry.Length / 3, 3];
+
+ for (int i = 0; i < lumpEntry.Length / 3; i++)
+ for (int j = 0; j < 3; j++)
+ {
+ lump.Lightmap[i, j] = data.ReadByteValue();
+ }
+
+ file.LightmapLump = lump;
+ }
+
+ // LUMP_CLIPNODES [9]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_CLIPNODES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var clipnodes = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var clipnode = data.ReadType();
+ if (clipnode != null)
+ clipnodes.Add(clipnode);
+ }
+
+ var lump = new ClipnodesLump();
+ lump.Clipnodes = [.. clipnodes];
+
+ file.ClipnodesLump = lump;
+ }
+
+ // LUMP_LEAVES [10]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_LEAVES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var leaves = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var leaf = data.ReadType();
+ if (leaf != null)
+ leaves.Add(leaf);
+ }
+
+ var lump = new BspLeavesLump();
+ lump.Leaves = [.. leaves];
+
+ file.LeavesLump = lump;
+ }
+
+ // LUMP_MARKSURFACES [11]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_MARKSURFACES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var marksurfaces = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ marksurfaces.Add(data.ReadUInt16());
+ }
+
+ var lump = new MarksurfacesLump();
+ lump.Marksurfaces = [.. marksurfaces];
+
+ file.MarksurfacesLump = lump;
+ }
+
+ // LUMP_EDGES [12]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_EDGES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var edges = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var edge = data.ReadType();
+ if (edge != null)
+ edges.Add(edge);
+ }
+
+ var lump = new EdgesLump();
+ lump.Edges = [.. edges];
+
+ file.EdgesLump = lump;
+ }
+
+ // LUMP_SURFEDGES [13]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_SURFEDGES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var surfedges = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ surfedges.Add(data.ReadInt32());
+ }
+
+ var lump = new SurfedgesLump();
+ lump.Surfedges = [.. surfedges];
+
+ file.SurfedgesLump = lump;
+ }
+
+ // LUMP_MODELS [14]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_MODELS];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var models = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var model = data.ReadType();
+ if (model != null)
+ models.Add(model);
+ }
+
+ var lump = new BspModelsLump();
+ lump.Models = [.. models];
+
+ file.ModelsLump = lump;
}
#endregion
@@ -105,28 +373,24 @@ namespace SabreTools.Serialization.Deserializers
/// Stream to parse
/// Filled Half-Life Level header on success, null on error
/// Only recognized versions are 29 and 30
- private static Header? ParseHeader(Stream data)
+ private static BspHeader? ParseHeader(Stream data)
{
- var header = data.ReadType();
+ // TODO: Use marshalling here later
+ var header = new BspHeader();
- if (header == null)
- return null;
- if (header.Version != 29 && header.Version != 30)
+ header.Version = data.ReadInt32();
+ if (header.Version < 29 || header.Version > 30)
return null;
+ header.Lumps = new BspLumpEntry[BSP_HEADER_LUMPS];
+ for (int i = 0; i < BSP_HEADER_LUMPS; i++)
+ {
+ header.Lumps[i] = data.ReadType()!;
+ }
+
return header;
}
- ///
- /// Parse a Stream into a lump
- ///
- /// Stream to parse
- /// Filled lump on success, null on error
- private static Lump? ParseLump(Stream data)
- {
- return data.ReadType();
- }
-
///
/// Parse a Stream into a Half-Life Level texture header
///
@@ -137,80 +401,14 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Use marshalling here instead of building
var textureHeader = new TextureHeader();
- textureHeader.TextureCount = data.ReadUInt32();
-
- var offsets = new uint[textureHeader.TextureCount];
-
- for (int i = 0; i < textureHeader.TextureCount; i++)
+ textureHeader.MipTextureCount = data.ReadUInt32();
+ textureHeader.Offsets = new int[textureHeader.MipTextureCount];
+ for (int i = 0; i < textureHeader.Offsets.Length; i++)
{
- offsets[i] = data.ReadUInt32();
- if (data.Position >= data.Length)
- break;
+ textureHeader.Offsets[i] = data.ReadInt32();
}
- textureHeader.Offsets = offsets;
-
return textureHeader;
}
-
- ///
- /// Parse a Stream into a texture
- ///
- /// Stream to parse
- /// Mipmap level
- /// Filled texture on success, null on error
- private static Texture ParseTexture(Stream data, uint mipmap = 0)
- {
- // TODO: Use marshalling here instead of building
- var texture = new Texture();
-
- byte[]? name = data.ReadBytes(16);
- if (name != null)
- texture.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
- texture.Width = data.ReadUInt32();
- texture.Height = data.ReadUInt32();
- texture.Offsets = new uint[4];
- for (int i = 0; i < 4; i++)
- {
- texture.Offsets[i] = data.ReadUInt32();
- }
-
- // Get the size of the pixel data
- uint pixelSize = 0;
- for (int i = 0; i < HL_BSP_MIPMAP_COUNT; i++)
- {
- if (texture.Offsets[i] != 0)
- {
- pixelSize += (texture.Width >> i) * (texture.Height >> i);
- }
- }
-
- // If we have no pixel data
- if (pixelSize == 0)
- return texture;
-
- texture.TextureData = data.ReadBytes((int)pixelSize);
- texture.PaletteSize = data.ReadUInt16();
- texture.PaletteData = data.ReadBytes((int)(texture.PaletteSize * 3));
-
- // Adjust the dimensions based on mipmap level
- switch (mipmap)
- {
- case 1:
- texture.Width /= 2;
- texture.Height /= 2;
- break;
- case 2:
- texture.Width /= 4;
- texture.Height /= 4;
- break;
- case 3:
- texture.Width /= 8;
- texture.Height /= 8;
- break;
- }
-
- return texture;
- }
}
}
\ No newline at end of file
diff --git a/SabreTools.Serialization/Deserializers/VBSP.cs b/SabreTools.Serialization/Deserializers/VBSP.cs
index ef6f3568..5c67dc66 100644
--- a/SabreTools.Serialization/Deserializers/VBSP.cs
+++ b/SabreTools.Serialization/Deserializers/VBSP.cs
@@ -1,15 +1,17 @@
+using System;
+using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
-using SabreTools.Models.VBSP;
-using static SabreTools.Models.VBSP.Constants;
+using SabreTools.Models.BSP;
+using static SabreTools.Models.BSP.Constants;
namespace SabreTools.Serialization.Deserializers
{
- public class VBSP : BaseBinaryDeserializer
+ public class VBSP : BaseBinaryDeserializer
{
///
- public override Models.VBSP.File? Deserialize(Stream? data)
+ public override VbspFile? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
@@ -23,13 +25,13 @@ namespace SabreTools.Serialization.Deserializers
long initialOffset = data.Position;
// Create a new Half-Life 2 Level to fill
- var file = new Models.VBSP.File();
+ var file = new VbspFile();
#region Header
// Try to parse the header
var header = ParseHeader(data);
- if (header == null)
+ if (header?.Lumps == null)
return null;
// Set the package header
@@ -37,6 +39,808 @@ namespace SabreTools.Serialization.Deserializers
#endregion
+ #region Lumps
+
+ // LUMP_ENTITIES [0]
+ var lumpEntry = header.Lumps[(int)LumpType.LUMP_ENTITIES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var entities = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ // TODO: Read this into sets of key-value pairs
+ var sb = new StringBuilder();
+ char c = '\0';
+ do
+ {
+ c = (char)data.ReadByteValue();
+ sb.Append(c);
+ } while (c != '}');
+
+ var entity = new Entity();
+ entity.Attributes = new List>
+ {
+ new("REPLACE", sb.ToString()),
+ };
+ entities.Add(entity);
+ }
+
+ var lump = new EntitiesLump();
+ lump.Entities = [.. entities];
+
+ file.Entities = lump;
+ }
+
+ // LUMP_PLANES [1]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_PLANES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var planes = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var plane = data.ReadType();
+ if (plane != null)
+ planes.Add(plane);
+ }
+
+ var lump = new PlanesLump();
+ lump.Planes = [.. planes];
+
+ file.PlanesLump = lump;
+ }
+
+ // LUMP_TEXDATA [2]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_TEXTURES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var texdatas = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var texdata = data.ReadType();
+ if (texdata != null)
+ texdatas.Add(texdata);
+ }
+
+ var lump = new TexdataLump();
+ lump.Texdatas = [.. texdatas];
+
+ file.TexdataLump = lump;
+ }
+
+ // LUMP_VERTEXES [3]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_VERTICES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var vertices = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ vertices.Add(data.ReadType());
+ }
+
+ var lump = new VerticesLump();
+ lump.Vertices = [.. vertices];
+
+ file.VerticesLump = lump;
+ }
+
+ // LUMP_VISIBILITY [4]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_VISIBILITY];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var lump = new VisibilityLump();
+
+ lump.NumClusters = data.ReadInt32();
+ lump.ByteOffsets = new int[lump.NumClusters, 2];
+ for (int i = 0; i < lump.NumClusters; i++)
+ for (int j = 0; j < 2; j++)
+ {
+ lump.ByteOffsets[i, j] = data.ReadByteValue();
+ }
+
+ file.VisibilityLump = lump;
+ }
+
+ // LUMP_NODES [5]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_NODES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var nodes = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var node = data.ReadType();
+ if (node != null)
+ nodes.Add(node);
+ }
+
+ var lump = new VbspNodesLump();
+ lump.Nodes = [.. nodes];
+
+ file.NodesLump = lump;
+ }
+
+ // LUMP_TEXINFO [6]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_TEXINFO];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var texinfos = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var texinfo = data.ReadType();
+ if (texinfo != null)
+ texinfos.Add(texinfo);
+ }
+
+ var lump = new VbspTexinfoLump();
+ lump.Texinfos = [.. texinfos];
+
+ file.TexinfoLump = lump;
+ }
+
+ // LUMP_FACES [7]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_FACES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var faces = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var face = data.ReadType();
+ if (face != null)
+ faces.Add(face);
+ }
+
+ var lump = new VbspFacesLump();
+ lump.Faces = [.. faces];
+
+ file.FacesLump = lump;
+ }
+
+ // LUMP_LIGHTING [8]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_LIGHTING];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var lump = new LightmapLump();
+ lump.Lightmap = new byte[lumpEntry.Length / 3, 3];
+
+ for (int i = 0; i < lumpEntry.Length / 3; i++)
+ for (int j = 0; j < 3; j++)
+ {
+ lump.Lightmap[i, j] = data.ReadByteValue();
+ }
+
+ file.LightmapLump = lump;
+ }
+
+ // LUMP_OCCLUSION [9]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_CLIPNODES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var lump = new OcclusionLump();
+
+ lump.Count = data.ReadInt32();
+ lump.Data = new OccluderData[lump.Count];
+ for (int i = 0; i < lump.Count; i++)
+ {
+ var occluderData = data.ReadType();
+ if (occluderData != null)
+ lump.Data[i] = occluderData;
+ }
+ lump.PolyDataCount = data.ReadInt32();
+ lump.PolyData = new OccluderPolyData[lump.Count];
+ for (int i = 0; i < lump.Count; i++)
+ {
+ var polyData = data.ReadType();
+ if (polyData != null)
+ lump.PolyData[i] = polyData;
+ }
+ lump.VertexIndexCount = data.ReadInt32();
+ lump.VertexIndices = new int[lump.VertexIndexCount];
+ for (int i = 0; i < lump.VertexIndexCount; i++)
+ {
+ lump.VertexIndices[i] = data.ReadInt32();
+ }
+
+ file.OcclusionLump = lump;
+ }
+
+ // LUMP_LEAVES [10]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_LEAVES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var leaves = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ // TODO: Fix parsing between V0 and V1+
+ var leaf = data.ReadType();
+ if (leaf != null)
+ leaves.Add(leaf);
+ }
+
+ var lump = new VbspLeavesLump();
+ lump.Leaves = [.. leaves];
+
+ file.LeavesLump = lump;
+ }
+
+ // LUMP_FACEIDS [11]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_MARKSURFACES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var marksurfaces = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ marksurfaces.Add(data.ReadUInt16());
+ }
+
+ var lump = new MarksurfacesLump();
+ lump.Marksurfaces = [.. marksurfaces];
+
+ file.MarksurfacesLump = lump;
+ }
+
+ // LUMP_EDGES [12]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_EDGES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var edges = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var edge = data.ReadType();
+ if (edge != null)
+ edges.Add(edge);
+ }
+
+ var lump = new EdgesLump();
+ lump.Edges = [.. edges];
+
+ file.EdgesLump = lump;
+ }
+
+ // LUMP_SURFEDGES [13]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_SURFEDGES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var surfedges = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ surfedges.Add(data.ReadInt32());
+ }
+
+ var lump = new SurfedgesLump();
+ lump.Surfedges = [.. surfedges];
+
+ file.SurfedgesLump = lump;
+ }
+
+ // LUMP_MODELS [14]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_MODELS];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var models = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var model = data.ReadType();
+ if (model != null)
+ models.Add(model);
+ }
+
+ var lump = new VbspModelsLump();
+ lump.Models = [.. models];
+
+ file.ModelsLump = lump;
+ }
+
+ // LUMP_WORLDLIGHTS [15]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_WORLDLIGHTS];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var worldLights = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var worldLight = data.ReadType();
+ if (worldLight != null)
+ worldLights.Add(worldLight);
+ }
+
+ var lump = new WorldLightsLump();
+ lump.WorldLights = [.. worldLights];
+
+ file.LDRWorldLightsLump = lump;
+ }
+
+ // LUMP_LEAFFACES [16]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_LEAFFACES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var map = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ map.Add(data.ReadUInt16());
+ }
+
+ var lump = new LeafFacesLump();
+ lump.Map = [.. map];
+
+ file.LeafFacesLump = lump;
+ }
+
+ // LUMP_LEAFBRUSHES [17]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_LEAFBRUSHES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var map = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ map.Add(data.ReadUInt16());
+ }
+
+ var lump = new LeafBrushesLump();
+ lump.Map = [.. map];
+
+ file.LeafBrushesLump = lump;
+ }
+
+ // LUMP_BRUSHES [18]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_BRUSHES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var brushes = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var brush = data.ReadType();
+ if (brush != null)
+ brushes.Add(brush);
+ }
+
+ var lump = new BrushesLump();
+ lump.Brushes = [.. brushes];
+
+ file.BrushesLump = lump;
+ }
+
+ // LUMP_BRUSHSIDES [19]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_BRUSHSIDES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var brushsides = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var brushside = data.ReadType();
+ if (brushside != null)
+ brushsides.Add(brushside);
+ }
+
+ var lump = new BrushsidesLump();
+ lump.Brushsides = [.. brushsides];
+
+ file.BrushsidesLump = lump;
+ }
+
+ // TODO: Support LUMP_AREAS [20] when in Models
+ // TODO: Support LUMP_AREAPORTALS [21] when in Models
+ // TODO: Support LUMP_PORTALS / LUMP_UNUSED0 / LUMP_PROPCOLLISION [22] when in Models
+ // TODO: Support LUMP_CLUSTERS / LUMP_UNUSED1 / LUMP_PROPHULLS [23] when in Models
+ // TODO: Support LUMP_PORTALVERTS / LUMP_UNUSED2 / LUMP_FAKEENTITIES / LUMP_PROPHULLVERTS [24] when in Models
+ // TODO: Support LUMP_CLUSTERPORTALS / LUMP_UNUSED3 / LUMP_PROPTRIS [25] when in Models
+
+ // LUMP_DISPINFO [26]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_DISPINFO];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var dispInfos = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var dispInfo = data.ReadType();
+ if (dispInfo != null)
+ dispInfos.Add(dispInfo);
+ }
+
+ var lump = new DispInfosLump();
+ lump.Infos = [.. dispInfos];
+
+ file.DispInfoLump = lump;
+ }
+
+ // LUMP_ORIGINALFACES [27]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_ORIGINALFACES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var faces = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var face = data.ReadType();
+ if (face != null)
+ faces.Add(face);
+ }
+
+ var lump = new VbspFacesLump();
+ lump.Faces = [.. faces];
+
+ file.OriginalFacesLump = lump;
+ }
+
+ // TODO: Support LUMP_PHYSDISP [28] when in Models
+ // TODO: Support LUMP_PHYSCOLLIDE [29] when in Models
+ // TODO: Support LUMP_VERTNORMALS [30] when in Models
+ // TODO: Support LUMP_VERTNORMALINDICES [31] when in Models
+ // TODO: Support LUMP_DISP_LIGHTMAP_ALPHAS [32] when in Models
+
+ // LUMP_DISP_VERTS [33]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_DISP_VERTS];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var verts = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var vert = data.ReadType();
+ if (vert != null)
+ verts.Add(vert);
+ }
+
+ var lump = new DispVertsLump();
+ lump.Verts = [.. verts];
+
+ file.DispVertLump = lump;
+ }
+
+ // TODO: Support LUMP_DISP_LIGHTMAP_SAMPLE_POSITIONS [34] when in Models
+
+ // LUMP_GAME_LUMP [35]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_GAME_LUMP];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var lump = new GameLump();
+
+ lump.LumpCount = data.ReadInt32();
+ lump.Directories = new GameLumpDirectory[lump.LumpCount];
+ for (int i = 0; i < lump.LumpCount; i++)
+ {
+ var dir = data.ReadType();
+ if (dir != null)
+ lump.Directories[i] = dir;
+ }
+
+ file.GameLump = lump;
+ }
+
+ // TODO: Support LUMP_LEAFWATERDATA [36] when in Models
+ // TODO: Support LUMP_PRIMITIVES [37] when in Models
+ // TODO: Support LUMP_PRIMVERTS [38] when in Models
+ // TODO: Support LUMP_PRIMINDICES [39] when in Models
+ // TODO: Support LUMP_PAKFILE [40] when in Models
+ // TODO: Support LUMP_CLIPPORTALVERTS [41] when in Models
+
+ // LUMP_CUBEMAPS [42]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_CUBEMAPS];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var cubemaps = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var cubemap = data.ReadType();
+ if (cubemap != null)
+ cubemaps.Add(cubemap);
+ }
+
+ var lump = new CubemapsLump();
+ lump.Cubemaps = [.. cubemaps];
+
+ file.CubemapLump = lump;
+ }
+
+ // LUMP_TEXDATA_STRING_DATA [43]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_TEXDATA_STRING_DATA];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var strings = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var str = data.ReadNullTerminatedAnsiString();
+ if (str != null)
+ strings.Add(str);
+ }
+
+ var lump = new TexdataStringData();
+ lump.Strings = [.. strings];
+
+ file.TexdataStringData = lump;
+ }
+
+ // LUMP_TEXDATA_STRING_TABLE [44]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_TEXDATA_STRING_TABLE];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var offsets = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ offsets.Add(data.ReadInt32());
+ }
+
+ var lump = new TexdataStringTable();
+ lump.Offsets = [.. offsets];
+
+ file.TexdataStringTable = lump;
+ }
+
+ // LUMP_OVERLAYS [45]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_OVERLAYS];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var overlays = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var overlay = data.ReadType();
+ if (overlay != null)
+ overlays.Add(overlay);
+ }
+
+ var lump = new OverlaysLump();
+ lump.Overlays = [.. overlays];
+
+ file.OverlaysLump = lump;
+ }
+
+ // TODO: Support LUMP_LEAFMINDISTTOWATER [46] when in Models
+ // TODO: Support LUMP_FACE_MACRO_TEXTURE_INFO [47] when in Models
+
+ // LUMP_DISP_TRIS [48]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_DISP_TRIS];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var tris = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var tri = data.ReadType();
+ if (tri != null)
+ tris.Add(tri);
+ }
+
+ var lump = new DispTrisLump();
+ lump.Tris = [.. tris];
+
+ file.DispTrisLump = lump;
+ }
+
+ // TODO: Support LUMP_PHYSCOLLIDESURFACE / LUMP_PROP_BLOB [49] when in Models
+ // TODO: Support LUMP_WATEROVERLAYS [50] when in Models
+
+ // LUMP_LIGHTMAPPAGES / LUMP_LEAF_AMBIENT_INDEX_HDR [51]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_LIGHTMAPPAGES];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var indicies = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var index = data.ReadType();
+ if (index != null)
+ indicies.Add(index);
+ }
+
+ var lump = new AmbientIndexLump();
+ lump.Indicies = [.. indicies];
+
+ file.HDRAmbientIndexLump = lump;
+ }
+
+ // LUMP_LIGHTMAPPAGEINFOS / LUMP_LEAF_AMBIENT_INDEX [52]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_LIGHTMAPPAGEINFOS];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var indicies = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var index = data.ReadType();
+ if (index != null)
+ indicies.Add(index);
+ }
+
+ var lump = new AmbientIndexLump();
+ lump.Indicies = [.. indicies];
+
+ file.LDRAmbientIndexLump = lump;
+ }
+
+ // TODO: Support LUMP_LIGHTING_HDR [53] when in Models
+
+ // LUMP_WORLDLIGHTS_HDR [54]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_WORLDLIGHTS_HDR];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var worldLights = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var worldLight = data.ReadType();
+ if (worldLight != null)
+ worldLights.Add(worldLight);
+ }
+
+ var lump = new WorldLightsLump();
+ lump.WorldLights = [.. worldLights];
+
+ file.WorldLightsLump = lump;
+ }
+
+ // LUMP_LEAF_AMBIENT_LIGHTING_HDR [55]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_LEAF_AMBIENT_LIGHTING_HDR];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var lightings = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var lighting = data.ReadType();
+ if (lighting != null)
+ lightings.Add(lighting);
+ }
+
+ var lump = new AmbientLightingLump();
+ lump.Lightings = [.. lightings];
+
+ file.HDRAmbientLightingLump = lump;
+ }
+
+ // LUMP_LEAF_AMBIENT_LIGHTING [56]
+ lumpEntry = header.Lumps[(int)LumpType.LUMP_LEAF_AMBIENT_LIGHTING];
+ if (lumpEntry != null && lumpEntry.Offset > 0 && lumpEntry.Length > 0)
+ {
+ // Seek to the lump offset
+ data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
+
+ // Read the lump data
+ var lightings = new List();
+ while (data.Position < lumpEntry.Offset + lumpEntry.Length)
+ {
+ var lighting = data.ReadType();
+ if (lighting != null)
+ lightings.Add(lighting);
+ }
+
+ var lump = new AmbientLightingLump();
+ lump.Lightings = [.. lightings];
+
+ file.LDRAmbientLightingLump = lump;
+ }
+
+ // TODO: Support LUMP_XZIPPAKFILE [57] when in Models
+ // TODO: Support LUMP_FACES_HDR [58] when in Models
+ // TODO: Support LUMP_MAP_FLAGS [59] when in Models
+ // TODO: Support LUMP_OVERLAY_FADES [60] when in Models
+ // TODO: Support LUMP_OVERLAY_SYSTEM_LEVELS [61] when in Models
+ // TODO: Support LUMP_PHYSLEVEL [62] when in Models
+ // TODO: Support LUMP_DISP_MULTIBLEND [63] when in Models
+
+ #endregion
+
return file;
}
@@ -45,61 +849,16 @@ namespace SabreTools.Serialization.Deserializers
///
/// Stream to parse
/// Filled Half-Life 2 Level header on success, null on error
- private static Header? ParseHeader(Stream data)
+ private static VbspHeader? ParseHeader(Stream data)
{
- // TODO: Use marshalling here instead of building
- var header = new Header();
+ var header = data.ReadType();
- byte[]? signature = data.ReadBytes(4);
- if (signature == null)
+ if (header?.Signature != SignatureString)
return null;
-
- header.Signature = Encoding.ASCII.GetString(signature);
- if (header.Signature != SignatureString)
+ if (Array.IndexOf([17, 18, 19, 20, 21, 22, 23, 25, 27, 29, 0x00040014], header.Version) > -1)
return null;
- header.Version = data.ReadInt32();
- if ((header.Version < 19 || header.Version > 22) && header.Version != 0x00040014)
- return null;
-
- header.Lumps = new Lump[HL_VBSP_LUMP_COUNT];
- for (int i = 0; i < HL_VBSP_LUMP_COUNT; i++)
- {
- var lump = ParseLump(data, header.Version);
- if (lump == null)
- return null;
-
- header.Lumps[i] = lump;
- }
-
- header.MapRevision = data.ReadInt32();
-
return header;
}
-
- ///
- /// Parse a Stream into a Half-Life 2 Level lump
- ///
- /// Stream to parse
- /// VBSP version
- /// Filled Half-Life 2 Level lump on success, null on error
- private static Lump? ParseLump(Stream data, int version)
- {
- return data.ReadType();
-
- // This block was commented out because test VBSPs with header
- // version 21 had the values in the "right" order already and
- // were causing decompression issues
-
- //if (version >= 21 && version != 0x00040014)
- //{
- // uint temp = lump.Version;
- // lump.Version = lump.Offset;
- // lump.Offset = lump.Length;
- // lump.Length = temp;
- //}
- //
- //return lump
- }
}
}
\ No newline at end of file
diff --git a/SabreTools.Serialization/Deserializers/VPK.cs b/SabreTools.Serialization/Deserializers/VPK.cs
index c27c5085..4a824953 100644
--- a/SabreTools.Serialization/Deserializers/VPK.cs
+++ b/SabreTools.Serialization/Deserializers/VPK.cs
@@ -40,7 +40,7 @@ namespace SabreTools.Serialization.Deserializers
#region Extended Header
- if (header?.Version == 2)
+ if (header.Version == 2)
{
// Try to parse the extended header
var extendedHeader = ParseExtendedHeader(data);
@@ -69,8 +69,8 @@ namespace SabreTools.Serialization.Deserializers
if (header?.Version == 2
&& file.ExtendedHeader != null
- && file.ExtendedHeader.ArchiveHashLength > 0
- && data.Position + file.ExtendedHeader.ArchiveHashLength <= data.Length)
+ && file.ExtendedHeader.ArchiveMD5SectionSize > 0
+ && data.Position + file.ExtendedHeader.ArchiveMD5SectionSize <= data.Length)
{
// Create the archive hashes list
var archiveHashes = new List();
@@ -79,7 +79,7 @@ namespace SabreTools.Serialization.Deserializers
initialOffset = data.Position;
// Try to parse the directory items
- while (data.Position < initialOffset + file.ExtendedHeader.ArchiveHashLength)
+ while (data.Position < initialOffset + file.ExtendedHeader.ArchiveMD5SectionSize)
{
var archiveHash = ParseArchiveHash(data);
if (archiveHash == null)
diff --git a/SabreTools.Serialization/Deserializers/WAD.cs b/SabreTools.Serialization/Deserializers/WAD.cs
deleted file mode 100644
index c0651e1c..00000000
--- a/SabreTools.Serialization/Deserializers/WAD.cs
+++ /dev/null
@@ -1,226 +0,0 @@
-using System.IO;
-using System.Text;
-using SabreTools.IO.Extensions;
-using SabreTools.Models.WAD;
-using static SabreTools.Models.WAD.Constants;
-
-namespace SabreTools.Serialization.Deserializers
-{
- public class WAD : BaseBinaryDeserializer
- {
- ///
- public override Models.WAD.File? Deserialize(Stream? data)
- {
- // If the data is invalid
- if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
- return null;
-
- // If the offset is out of bounds
- if (data.Position < 0 || data.Position >= data.Length)
- return null;
-
- // Cache the current offset
- long initialOffset = data.Position;
-
- // Create a new Half-Life Texture Package to fill
- var file = new Models.WAD.File();
-
- #region Header
-
- // Try to parse the header
- var header = ParseHeader(data);
- if (header == null)
- return null;
-
- // Set the package header
- file.Header = header;
-
- #endregion
-
- #region Lumps
-
- // Get the lump offset
- uint lumpOffset = header.LumpOffset;
- if (lumpOffset < 0 || lumpOffset >= data.Length)
- return null;
-
- // Seek to the lump offset
- data.Seek(lumpOffset, SeekOrigin.Begin);
-
- // Create the lump array
- file.Lumps = new Lump[header.LumpCount];
- for (int i = 0; i < header.LumpCount; i++)
- {
- var lump = ParseLump(data);
- if (lump == null)
- return null;
-
- file.Lumps[i] = lump;
- }
-
- #endregion
-
- #region Lump Infos
-
- // Create the lump info array
- file.LumpInfos = new LumpInfo?[header.LumpCount];
- for (int i = 0; i < header.LumpCount; i++)
- {
- var lump = file.Lumps[i];
- if (lump == null)
- {
- file.LumpInfos[i] = null;
- continue;
- }
-
- if (lump.Compression != 0)
- {
- file.LumpInfos[i] = null;
- continue;
- }
-
- // Get the lump info offset
- uint lumpInfoOffset = lump.Offset;
- if (lumpInfoOffset < 0 || lumpInfoOffset >= data.Length)
- {
- file.LumpInfos[i] = null;
- continue;
- }
-
- // Seek to the lump info offset
- data.Seek(lumpInfoOffset, SeekOrigin.Begin);
-
- // Try to parse the lump info -- TODO: Do we ever set the mipmap level?
- var lumpInfo = ParseLumpInfo(data, lump.Type);
- file.LumpInfos[i] = lumpInfo;
- }
-
- #endregion
-
- return file;
- }
-
- ///
- /// Parse a Stream into a Half-Life Texture Package header
- ///
- /// Stream to parse
- /// Filled Half-Life Texture Package header on success, null on error
- private static Header? ParseHeader(Stream data)
- {
- var header = data.ReadType();
-
- if (header == null)
- return null;
- if (header.Signature != SignatureString)
- return null;
-
- return header;
- }
-
- ///
- /// Parse a Stream into a Half-Life Texture Package lump
- ///
- /// Stream to parse
- /// Filled Half-Life Texture Package lump on success, null on error
- private static Lump? ParseLump(Stream data)
- {
- return data.ReadType();
- }
-
- ///
- /// Parse a Stream into a Half-Life Texture Package lump info
- ///
- /// Stream to parse
- /// Lump type
- /// Mipmap level
- /// Filled Half-Life Texture Package lump info on success, null on error
- private static LumpInfo? ParseLumpInfo(Stream data, byte type, uint mipmap = 0)
- {
- // TODO: Use marshalling here instead of building
- LumpInfo lumpInfo = new LumpInfo();
-
- // Cache the initial offset
- long initialOffset = data.Position;
-
- // Type 0x42 has no name, type 0x43 does. Are these flags?
- if (type == 0x42)
- {
- if (mipmap > 0)
- return null;
-
- lumpInfo.Width = data.ReadUInt32();
- lumpInfo.Height = data.ReadUInt32();
- lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
- lumpInfo.PaletteSize = data.ReadUInt16();
- }
- else if (type == 0x43)
- {
- if (mipmap > 3)
- return null;
-
- byte[]? name = data.ReadBytes(16);
- if (name != null)
- lumpInfo.Name = Encoding.ASCII.GetString(name);
- lumpInfo.Width = data.ReadUInt32();
- lumpInfo.Height = data.ReadUInt32();
- lumpInfo.PixelOffset = data.ReadUInt32();
- lumpInfo.UnknownData = data.ReadBytes(12);
-
- // Cache the current offset
- long currentOffset = data.Position;
-
- // Seek to the pixel data
- data.Seek(initialOffset + lumpInfo.PixelOffset, SeekOrigin.Begin);
-
- // Read the pixel data
- lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
-
- // Seek back to the offset
- data.Seek(currentOffset, SeekOrigin.Begin);
-
- uint pixelSize = lumpInfo.Width * lumpInfo.Height;
-
- // Mipmap data -- TODO: How do we determine this during initial parsing?
- switch (mipmap)
- {
- case 1: _ = data.ReadBytes((int)pixelSize); break;
- case 2: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4))); break;
- case 3: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16))); break;
- default: return null;
- }
-
- _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16) + (pixelSize / 64))); // Pixel data
- lumpInfo.PaletteSize = data.ReadUInt16();
- lumpInfo.PaletteData = data.ReadBytes((int)lumpInfo.PaletteSize * 3);
- }
- else
- {
- return null;
- }
-
- // Adjust based on mipmap level
- switch (mipmap)
- {
- case 1:
- lumpInfo.Width /= 2;
- lumpInfo.Height /= 2;
- break;
-
- case 2:
- lumpInfo.Width /= 4;
- lumpInfo.Height /= 4;
- break;
-
- case 3:
- lumpInfo.Width /= 8;
- lumpInfo.Height /= 8;
- break;
-
- default:
- return null;
- }
-
- return lumpInfo;
- }
- }
-}
\ No newline at end of file
diff --git a/SabreTools.Serialization/Deserializers/WAD3.cs b/SabreTools.Serialization/Deserializers/WAD3.cs
new file mode 100644
index 00000000..6f3a4d73
--- /dev/null
+++ b/SabreTools.Serialization/Deserializers/WAD3.cs
@@ -0,0 +1,272 @@
+using System.IO;
+using System.Text;
+using SabreTools.IO.Extensions;
+using SabreTools.Models.WAD3;
+using static SabreTools.Models.WAD3.Constants;
+
+namespace SabreTools.Serialization.Deserializers
+{
+ public class WAD3 : BaseBinaryDeserializer
+ {
+ ///
+ public override Models.WAD3.File? Deserialize(Stream? data)
+ {
+ // If the data is invalid
+ if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
+ return null;
+
+ // If the offset is out of bounds
+ if (data.Position < 0 || data.Position >= data.Length)
+ return null;
+
+ // Cache the current offset
+ long initialOffset = data.Position;
+
+ // Create a new Half-Life Texture Package to fill
+ var file = new Models.WAD3.File();
+
+ #region Header
+
+ // Try to parse the header
+ var header = ParseHeader(data);
+ if (header == null)
+ return null;
+
+ // Set the package header
+ file.Header = header;
+
+ #endregion
+
+ #region Directory Entries
+
+ // Get the directory offset
+ uint dirOffset = header.DirOffset;
+ if (dirOffset < 0 || dirOffset >= data.Length)
+ return null;
+
+ // Seek to the lump offset
+ data.Seek(dirOffset, SeekOrigin.Begin);
+
+ // Create the lump array
+ file.DirEntries = new DirEntry[header.NumDirs];
+ for (int i = 0; i < header.NumDirs; i++)
+ {
+ var lump = ParseDirEntry(data);
+ if (lump == null)
+ return null;
+
+ file.DirEntries[i] = lump;
+ }
+
+ #endregion
+
+ #region File Entries
+
+ // Create the file entry array
+ file.FileEntries = new FileEntry?[header.NumDirs];
+ for (int i = 0; i < header.NumDirs; i++)
+ {
+ var dirEntry = file.DirEntries[i];
+ if (dirEntry == null)
+ continue;
+
+ // TODO: Handle compressed entries
+ if (dirEntry.Compression != 0)
+ continue;
+
+ // Get the file entry offset
+ uint fileEntryOffset = dirEntry.Offset;
+ if (fileEntryOffset < 0 || fileEntryOffset >= data.Length)
+ continue;
+
+ // Seek to the file entry offset
+ data.Seek(fileEntryOffset, SeekOrigin.Begin);
+
+ // Try to parse the file entry
+ var fileEntry = ParseFileEntry(data, dirEntry.Type);
+ if (fileEntry != null)
+ file.FileEntries[i] = fileEntry;
+ }
+
+ #endregion
+
+ return file;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Texture Package header on success, null on error
+ private static Header? ParseHeader(Stream data)
+ {
+ var header = data.ReadType();
+
+ if (header == null)
+ return null;
+ if (header.Signature != SignatureString)
+ return null;
+
+ return header;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package directory entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Texture Package directory entry on success, null on error
+ private static DirEntry? ParseDirEntry(Stream data)
+ {
+ return data.ReadType();
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package file entry
+ ///
+ /// Stream to parse
+ /// File entry type
+ /// Filled Half-Life Texture Package file entry on success, null on error
+ private static FileEntry? ParseFileEntry(Stream data, FileType type)
+ {
+ return type switch
+ {
+ FileType.Spraydecal
+ or FileType.Miptex => ParseMipTex(data),
+ FileType.Qpic => ParseQpicImage(data),
+ FileType.Font => ParseFont(data),
+ _ => null,
+ };
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package MipTex
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Texture Package MipTex on success, null on error
+ private static MipTex ParseMipTex(Stream data)
+ {
+ var miptex = new MipTex();
+
+ byte[] nameBytes = data.ReadBytes(16);
+ miptex.Name = Encoding.ASCII.GetString(nameBytes).TrimEnd('\0');
+ miptex.Width = data.ReadUInt32();
+ miptex.Height = data.ReadUInt32();
+ miptex.MipOffsets = new uint[4];
+ for (int i = 0; i < miptex.MipOffsets.Length; i++)
+ {
+ miptex.MipOffsets[i] = data.ReadUInt32();
+ }
+ miptex.MipImages = new MipMap[4];
+ for (int i = 0; i < miptex.MipImages.Length; i++)
+ {
+ miptex.MipImages[i] = ParseMipMap(data, miptex.Width, miptex.Height);
+ }
+ miptex.ColorsUsed = data.ReadUInt16();
+ miptex.Palette = new byte[miptex.ColorsUsed, 3];
+ for (int i = 0; i < miptex.ColorsUsed; i++)
+ for (int j = 0; j < 3; j++)
+ {
+ miptex.Palette[i, j] = data.ReadByteValue();
+ }
+
+ return miptex;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package MipMap
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Texture Package MipMap on success, null on error
+ private static MipMap ParseMipMap(Stream data, uint width, uint height)
+ {
+ var mipmap = new MipMap();
+
+ mipmap.Data = new byte[width, height];
+ for (int i = 0; i < width; i++)
+ for (int j = 0; j < height; j++)
+ {
+ mipmap.Data[i, j] = data.ReadByteValue();
+ }
+
+ return mipmap;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package Qpic image
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Texture Package Qpic image on success, null on error
+ private static QpicImage ParseQpicImage(Stream data)
+ {
+ var qpic = new QpicImage();
+
+ qpic.Width = data.ReadUInt32();
+ qpic.Height = data.ReadUInt32();
+ qpic.Data = new byte[qpic.Height, qpic.Width];
+ for (int i = 0; i < qpic.Height; i++)
+ for (int j = 0; j < qpic.Width; j++)
+ {
+ qpic.Data[i, j] = data.ReadByteValue();
+ }
+ qpic.ColorsUsed = data.ReadUInt16();
+ qpic.Palette = new byte[qpic.ColorsUsed, 3];
+ for (int i = 0; i < qpic.ColorsUsed; i++)
+ for (int j = 0; j < 3; j++)
+ {
+ qpic.Palette[i, j] = data.ReadByteValue();
+ }
+
+ return qpic;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package font
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Texture Package font on success, null on error
+ private static Font ParseFont(Stream data)
+ {
+ var font = new Font();
+
+ font.Width = data.ReadUInt32();
+ font.Height = data.ReadUInt32();
+ font.RowCount = data.ReadUInt32();
+ font.RowHeight = data.ReadUInt32();
+ font.FontInfo = new CharInfo[256];
+ for (int i = 0; i < font.FontInfo.Length; i++)
+ {
+ font.FontInfo[i] = ParseCharInfo(data);
+ }
+ font.Data = new byte[font.Height, font.Width];
+ for (int i = 0; i < font.Height; i++)
+ for (int j = 0; j < font.Width; j++)
+ {
+ font.Data[i, j] = data.ReadByteValue();
+ }
+ font.ColorsUsed = data.ReadUInt16();
+ font.Palette = new byte[font.ColorsUsed, 3];
+ for (int i = 0; i < font.ColorsUsed; i++)
+ for (int j = 0; j < 3; j++)
+ {
+ font.Palette[i, j] = data.ReadByteValue();
+ }
+
+ return font;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package CharInfo
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Texture Package CharInfo on success, null on error
+ private static CharInfo ParseCharInfo(Stream data)
+ {
+ var charinfo = new CharInfo();
+
+ charinfo.StartOffset = data.ReadUInt16();
+ charinfo.CharWidth = data.ReadUInt16();
+
+ return charinfo;
+ }
+ }
+}
\ No newline at end of file
diff --git a/SabreTools.Serialization/Printer.cs b/SabreTools.Serialization/Printer.cs
index 6fea86e3..0a798ead 100644
--- a/SabreTools.Serialization/Printer.cs
+++ b/SabreTools.Serialization/Printer.cs
@@ -63,7 +63,7 @@ namespace SabreTools.Serialization
Wrapper.SGA item => item.PrettyPrint(),
Wrapper.VBSP item => item.PrettyPrint(),
Wrapper.VPK item => item.PrettyPrint(),
- Wrapper.WAD item => item.PrettyPrint(),
+ Wrapper.WAD3 item => item.PrettyPrint(),
Wrapper.XeMID item => item.PrettyPrint(),
Wrapper.XMID item => item.PrettyPrint(),
Wrapper.XZP item => item.PrettyPrint(),
@@ -108,7 +108,7 @@ namespace SabreTools.Serialization
Wrapper.SGA item => item.ExportJSON(),
Wrapper.VBSP item => item.ExportJSON(),
Wrapper.VPK item => item.ExportJSON(),
- Wrapper.WAD item => item.ExportJSON(),
+ Wrapper.WAD3 item => item.ExportJSON(),
Wrapper.XeMID item => item.ExportJSON(),
Wrapper.XMID item => item.ExportJSON(),
Wrapper.XZP item => item.ExportJSON(),
@@ -412,10 +412,10 @@ namespace SabreTools.Serialization
///
/// Export the item information as pretty-printed text
///
- private static StringBuilder PrettyPrint(this Wrapper.WAD item)
+ private static StringBuilder PrettyPrint(this Wrapper.WAD3 item)
{
var builder = new StringBuilder();
- WAD.Print(builder, item.Model);
+ WAD3.Print(builder, item.Model);
return builder;
}
diff --git a/SabreTools.Serialization/Printers/BSP.cs b/SabreTools.Serialization/Printers/BSP.cs
index efbb084d..5cf8e35b 100644
--- a/SabreTools.Serialization/Printers/BSP.cs
+++ b/SabreTools.Serialization/Printers/BSP.cs
@@ -1,29 +1,26 @@
using System.Text;
using SabreTools.Models.BSP;
using SabreTools.Serialization.Interfaces;
-using static SabreTools.Models.BSP.Constants;
namespace SabreTools.Serialization.Printers
{
- public class BSP : IPrinter
+ public class BSP : IPrinter
{
///
- public void PrintInformation(StringBuilder builder, File model)
+ public void PrintInformation(StringBuilder builder, BspFile model)
=> Print(builder, model);
- public static void Print(StringBuilder builder, File file)
+ public static void Print(StringBuilder builder, BspFile file)
{
builder.AppendLine("BSP Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, file.Header);
- Print(builder, file.Lumps);
- Print(builder, file.TextureHeader);
- Print(builder, file.Textures);
+ PrintLumps(builder, file);
}
- private static void Print(StringBuilder builder, Header? header)
+ private static void Print(StringBuilder builder, BspHeader? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
@@ -38,30 +35,21 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine();
}
- private static void Print(StringBuilder builder, Lump?[]? lumps)
+ private static void PrintLumps(StringBuilder builder, BspFile? model)
{
builder.AppendLine(" Lumps Information:");
builder.AppendLine(" -------------------------");
- if (lumps == null || lumps.Length == 0)
+ if (model?.Header?.Lumps == null || model.Header.Lumps.Length == 0)
{
builder.AppendLine(" No lumps");
builder.AppendLine();
return;
}
- for (int i = 0; i < lumps.Length; i++)
+ for (int i = 0; i < model.Header.Lumps.Length; i++)
{
- var lump = lumps[i];
- string specialLumpName = string.Empty;
- switch (i)
- {
- case HL_BSP_LUMP_ENTITIES:
- specialLumpName = " (entities)";
- break;
- case HL_BSP_LUMP_TEXTUREDATA:
- specialLumpName = " (texture data)";
- break;
- }
+ var lump = model.Header.Lumps[i];
+ string specialLumpName = GetLumpName(i);
builder.AppendLine($" Lump {i}{specialLumpName}");
if (lump == null)
@@ -72,79 +60,291 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(lump.Offset, " Offset");
builder.AppendLine(lump.Length, " Length");
+ switch ((LumpType)i)
+ {
+ case LumpType.LUMP_ENTITIES:
+ if (model.Entities?.Entities == null || model.Entities.Entities.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.Entities.Entities.Length; j++)
+ {
+ // TODO: Implement entity printing
+ var entity = model.Entities.Entities[j];
+ builder.AppendLine($" Entity {j}");
+ builder.AppendLine(" Entity data is not parsed properly");
+ }
+ }
+ break;
+ case LumpType.LUMP_PLANES:
+ if (model.PlanesLump?.Planes == null || model.PlanesLump.Planes.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.PlanesLump.Planes.Length; j++)
+ {
+ var plane = model.PlanesLump.Planes[j];
+ builder.AppendLine($" Plane {j}");
+ builder.AppendLine($" Normal vector: {plane.NormalVector.X}, {plane.NormalVector.Y}, {plane.NormalVector.Z}");
+ builder.AppendLine(plane.Distance, " Distance");
+ builder.AppendLine($" Plane type: {plane.PlaneType} (0x{plane.PlaneType:X})");
+ }
+ }
+ break;
+ case LumpType.LUMP_TEXTURES:
+ if (model.TextureLump?.Textures == null || model.TextureLump.Textures.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ var header = model.TextureLump.Header;
+ if (header == null)
+ {
+ builder.AppendLine(" No texture header");
+ }
+ else
+ {
+ builder.AppendLine(" Texture Header:");
+ builder.AppendLine(header.MipTextureCount, " MipTexture count");
+ builder.AppendLine(header.Offsets, " Offsets");
+ }
+
+ builder.AppendLine(" Textures:");
+ for (int j = 0; j < model.TextureLump.Textures.Length; j++)
+ {
+ var texture = model.TextureLump.Textures[j];
+ builder.AppendLine($" Texture {j}");
+ builder.AppendLine(texture.Name, " Name");
+ builder.AppendLine(texture.Width, " Width");
+ builder.AppendLine(texture.Height, " Height");
+ builder.AppendLine(texture.Offsets, " Offsets");
+ }
+ }
+ break;
+ case LumpType.LUMP_VERTICES:
+ if (model.VerticesLump?.Vertices == null || model.VerticesLump.Vertices.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.VerticesLump.Vertices.Length; j++)
+ {
+ var vertex = model.VerticesLump.Vertices[j];
+ builder.AppendLine($" Vertex {j}: {vertex.X}, {vertex.Y}, {vertex.Z}");
+ }
+ }
+ break;
+ case LumpType.LUMP_VISIBILITY:
+ // TODO: Implement when added to Models
+ // if (model.VisibilityLump == null)
+ // {
+ // builder.AppendLine(" No data");
+ // }
+ // else
+ // {
+ // builder.AppendLine(model.VisibilityLump.NumClusters, " Cluster count");
+ // builder.AppendLine(model.VisibilityLump.ByteOffsets, " Byte offsets");
+ // }
+ break;
+ case LumpType.LUMP_NODES:
+ if (model.NodesLump?.Nodes == null || model.NodesLump.Nodes.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.NodesLump.Nodes.Length; j++)
+ {
+ var node = model.NodesLump.Nodes[j];
+ builder.AppendLine($" Node {j}");
+ builder.AppendLine(node.Children, " Children");
+ builder.AppendLine(node.Mins, " Mins");
+ builder.AppendLine(node.Maxs, " Maxs");
+ builder.AppendLine(node.FirstFace, " First face index");
+ builder.AppendLine(node.FaceCount, " Count of faces");
+ }
+ }
+ break;
+ case LumpType.LUMP_TEXINFO:
+ if (model.TexinfoLump?.Texinfos == null || model.TexinfoLump.Texinfos.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.TexinfoLump.Texinfos.Length; j++)
+ {
+ var texinfo = model.TexinfoLump.Texinfos[j];
+ builder.AppendLine($" Texinfo {j}");
+ builder.AppendLine($" S-Vector: {texinfo.SVector.X}, {texinfo.SVector.Y}, {texinfo.SVector.Z}");
+ builder.AppendLine(texinfo.TextureSShift, " Texture shift in S direction");
+ builder.AppendLine($" T-Vector: {texinfo.TVector.X}, {texinfo.TVector.Y}, {texinfo.TVector.Z}");
+ builder.AppendLine(texinfo.TextureTShift, " Texture shift in T direction");
+ builder.AppendLine(texinfo.MiptexIndex, " Miptex index");
+ builder.AppendLine($" Flags: {texinfo.Flags} (0x{texinfo.Flags:X})");
+ }
+ }
+ break;
+ case LumpType.LUMP_FACES:
+ if (model.FacesLump?.Faces == null || model.FacesLump.Faces.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.FacesLump.Faces.Length; j++)
+ {
+ var face = model.FacesLump.Faces[j];
+ builder.AppendLine($" Face {j}");
+ builder.AppendLine(face.PlaneIndex, " Plane index");
+ builder.AppendLine(face.PlaneSideCount, " Plane side count");
+ builder.AppendLine(face.FirstEdgeIndex, " First surfedge index");
+ builder.AppendLine(face.NumberOfEdges, " Surfedge count");
+ builder.AppendLine(face.TextureInfoIndex, " Texture info index");
+ builder.AppendLine(face.LightingStyles, " Lighting styles");
+ builder.AppendLine(face.LightmapOffset, " Lightmap offset");
+ }
+ }
+ break;
+ case LumpType.LUMP_LIGHTING:
+ if (model.LightmapLump?.Lightmap == null || model.LightmapLump.Lightmap.Length == 0)
+ builder.AppendLine(" No data");
+ else
+ builder.AppendLine(" Lightmap data skipped...");
+ break;
+ case LumpType.LUMP_CLIPNODES:
+ if (model.ClipnodesLump?.Clipnodes == null || model.ClipnodesLump.Clipnodes.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.ClipnodesLump.Clipnodes.Length; j++)
+ {
+ var clipnode = model.ClipnodesLump.Clipnodes[j];
+ builder.AppendLine($" Clipnode {j}");
+ builder.AppendLine(clipnode.PlaneIndex, " Plane index");
+ builder.AppendLine(clipnode.ChildrenIndices, " Children indices");
+ }
+ }
+ break;
+ case LumpType.LUMP_LEAVES:
+ if (model.LeavesLump?.Leaves == null || model.LeavesLump.Leaves.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.LeavesLump.Leaves.Length; j++)
+ {
+ var leaf = model.LeavesLump.Leaves[j];
+ builder.AppendLine($" Leaf {j}");
+ builder.AppendLine($" Contents: {leaf.Contents} (0x{leaf.Contents:X})");
+ builder.AppendLine(leaf.VisOffset, " Visibility offset");
+ builder.AppendLine(leaf.Mins, " Mins");
+ builder.AppendLine(leaf.Maxs, " Maxs");
+ builder.AppendLine(leaf.FirstMarkSurfaceIndex, " First marksurface index");
+ builder.AppendLine(leaf.MarkSurfacesCount, " Marksurfaces count");
+ builder.AppendLine(leaf.AmbientLevels, " Ambient sound levels");
+ }
+ }
+ break;
+ case LumpType.LUMP_MARKSURFACES:
+ if (model.MarksurfacesLump?.Marksurfaces == null || model.MarksurfacesLump.Marksurfaces.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.MarksurfacesLump.Marksurfaces.Length; j++)
+ {
+ var marksurface = model.MarksurfacesLump.Marksurfaces[j];
+ builder.AppendLine($" Marksurface {j}: {marksurface} (0x{marksurface:X4})");
+ }
+ }
+ break;
+ case LumpType.LUMP_EDGES:
+ if (model.EdgesLump?.Edges == null || model.EdgesLump.Edges.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.EdgesLump.Edges.Length; j++)
+ {
+ var edge = model.EdgesLump.Edges[j];
+ builder.AppendLine($" Edge {j}");
+ builder.AppendLine(edge.VertexIndices, " Vertex indices");
+ }
+ }
+ break;
+ case LumpType.LUMP_SURFEDGES:
+ if (model.SurfedgesLump?.Surfedges == null || model.SurfedgesLump.Surfedges.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.SurfedgesLump.Surfedges.Length; j++)
+ {
+ var surfedge = model.SurfedgesLump.Surfedges[j];
+ builder.AppendLine($" Surfedge {j}: {surfedge} (0x{surfedge:X4})");
+ }
+ }
+ break;
+ case LumpType.LUMP_MODELS:
+ if (model.ModelsLump?.Models == null || model.ModelsLump.Models.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.ModelsLump.Models.Length; j++)
+ {
+ var bmodel = model.ModelsLump.Models[j];
+ builder.AppendLine($" Model {j}");
+ builder.AppendLine($" Mins: {bmodel.Mins.X}, {bmodel.Mins.Y}, {bmodel.Mins.Z}");
+ builder.AppendLine($" Maxs: {bmodel.Maxs.X}, {bmodel.Maxs.Y}, {bmodel.Maxs.Z}");
+ builder.AppendLine($" Origin vector: {bmodel.OriginVector.X}, {bmodel.OriginVector.Y}, {bmodel.OriginVector.Z}");
+ builder.AppendLine(bmodel.HeadnodesIndex, " Headnodes index");
+ builder.AppendLine(bmodel.VisLeafsCount, " ??? (VisLeafsCount)");
+ builder.AppendLine(bmodel.FirstFaceIndex, " First face index");
+ builder.AppendLine(bmodel.FacesCount, " Faces count");
+ }
+ }
+ break;
+ }
}
builder.AppendLine();
}
- private static void Print(StringBuilder builder, TextureHeader? header)
+ private static string GetLumpName(int i)
{
- builder.AppendLine(" Texture Header Information:");
- builder.AppendLine(" -------------------------");
- if (header == null)
+ return (LumpType)i switch
{
- builder.AppendLine(" No texture header");
- builder.AppendLine();
- return;
- }
-
- builder.AppendLine(header.TextureCount, " Texture count");
- builder.AppendLine(" Offsets:");
- if (header.Offsets == null || header.Offsets.Length == 0)
- {
- builder.AppendLine(" No offsets");
- builder.AppendLine();
- return;
- }
-
- for (int i = 0; i < header.Offsets.Length; i++)
- {
- builder.AppendLine(header.Offsets[i], $" Offset {i}");
- }
- builder.AppendLine();
- }
-
- private static void Print(StringBuilder builder, Texture?[]? textures)
- {
- builder.AppendLine(" Textures Information:");
- builder.AppendLine(" -------------------------");
- if (textures == null || textures.Length == 0)
- {
- builder.AppendLine(" No textures");
- builder.AppendLine();
- return;
- }
-
- for (int i = 0; i < textures.Length; i++)
- {
- var texture = textures[i];
- builder.AppendLine($" Texture {i}");
- if (texture == null)
- {
- builder.AppendLine(" [NULL]");
- continue;
- }
-
- builder.AppendLine(texture.Name, " Name");
- builder.AppendLine(texture.Width, " Width");
- builder.AppendLine(texture.Height, " Height");
- builder.AppendLine(" Offsets:");
- if (texture.Offsets == null || texture.Offsets.Length == 0)
- {
- builder.AppendLine(" No offsets");
- continue;
- }
- else
- {
- for (int j = 0; j < texture.Offsets.Length; j++)
- {
- builder.AppendLine(texture.Offsets[i], $" Offset {j}");
- }
- }
- // Skip texture data
- builder.AppendLine(texture.PaletteSize, " Palette size");
- // Skip palette data
- }
- builder.AppendLine();
+ LumpType.LUMP_ENTITIES => " - LUMP_ENTITIES",
+ LumpType.LUMP_PLANES => " - LUMP_PLANES",
+ LumpType.LUMP_TEXTURES => " - LUMP_TEXTURES",
+ LumpType.LUMP_VERTICES => " - LUMP_VERTICES",
+ LumpType.LUMP_VISIBILITY => " - LUMP_VISIBILITY",
+ LumpType.LUMP_NODES => " - LUMP_NODES",
+ LumpType.LUMP_TEXINFO => " - LUMP_TEXINFO",
+ LumpType.LUMP_FACES => " - LUMP_FACES",
+ LumpType.LUMP_LIGHTING => " - LUMP_LIGHTING",
+ LumpType.LUMP_CLIPNODES => " - LUMP_CLIPNODES",
+ LumpType.LUMP_LEAVES => " - LUMP_LEAVES",
+ LumpType.LUMP_MARKSURFACES => " - LUMP_MARKSURFACES",
+ LumpType.LUMP_EDGES => " - LUMP_EDGES",
+ LumpType.LUMP_SURFEDGES => " - LUMP_SURFEDGES",
+ LumpType.LUMP_MODELS => " - LUMP_MODELS",
+ _ => string.Empty,
+ };
}
}
}
\ No newline at end of file
diff --git a/SabreTools.Serialization/Printers/VBSP.cs b/SabreTools.Serialization/Printers/VBSP.cs
index 84673caa..00660533 100644
--- a/SabreTools.Serialization/Printers/VBSP.cs
+++ b/SabreTools.Serialization/Printers/VBSP.cs
@@ -1,26 +1,26 @@
using System.Text;
-using SabreTools.Models.VBSP;
+using SabreTools.Models.BSP;
using SabreTools.Serialization.Interfaces;
-using static SabreTools.Models.VBSP.Constants;
namespace SabreTools.Serialization.Printers
{
- public class VBSP : IPrinter
+ public class VBSP : IPrinter
{
///
- public void PrintInformation(StringBuilder builder, File model)
+ public void PrintInformation(StringBuilder builder, VbspFile model)
=> Print(builder, model);
- public static void Print(StringBuilder builder, File file)
+ public static void Print(StringBuilder builder, VbspFile file)
{
- builder.AppendLine("VBSP Information:");
+ builder.AppendLine("BSP Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, file.Header);
+ PrintLumps(builder, file);
}
- private static void Print(StringBuilder builder, Header? header)
+ private static void Print(StringBuilder builder, VbspHeader? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
@@ -35,34 +35,23 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(header.Version, " Version");
builder.AppendLine(header.MapRevision, " Map revision");
builder.AppendLine();
-
- Print(builder, header.Lumps);
}
- private static void Print(StringBuilder builder, Lump?[]? lumps)
+ private static void PrintLumps(StringBuilder builder, VbspFile? model)
{
builder.AppendLine(" Lumps Information:");
builder.AppendLine(" -------------------------");
- if (lumps == null || lumps.Length == 0)
+ if (model?.Header?.Lumps == null || model.Header.Lumps.Length == 0)
{
builder.AppendLine(" No lumps");
builder.AppendLine();
return;
}
- for (int i = 0; i < lumps.Length; i++)
+ for (int i = 0; i < model.Header.Lumps.Length; i++)
{
- var lump = lumps[i];
- string specialLumpName = string.Empty;
- switch (i)
- {
- case HL_VBSP_LUMP_ENTITIES:
- specialLumpName = " (entities)";
- break;
- case HL_VBSP_LUMP_PAKFILE:
- specialLumpName = " (pakfile)";
- break;
- }
+ var lump = model.Header.Lumps[i];
+ string specialLumpName = GetLumpName(i);
builder.AppendLine($" Lump {i}{specialLumpName}");
if (lump == null)
@@ -73,10 +62,396 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(lump.Offset, " Offset");
builder.AppendLine(lump.Length, " Length");
- builder.AppendLine(lump.Version, " Version");
- builder.AppendLine(lump.FourCC, " 4CC");
+ switch ((LumpType)i)
+ {
+ case LumpType.LUMP_ENTITIES:
+ if (model.Entities?.Entities == null || model.Entities.Entities.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.Entities.Entities.Length; j++)
+ {
+ // TODO: Implement entity printing
+ var entity = model.Entities.Entities[j];
+ builder.AppendLine($" Entity {j}");
+ builder.AppendLine(" Entity data is not parsed properly");
+ }
+ }
+ break;
+ case LumpType.LUMP_PLANES:
+ if (model.PlanesLump?.Planes == null || model.PlanesLump.Planes.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.PlanesLump.Planes.Length; j++)
+ {
+ var plane = model.PlanesLump.Planes[j];
+ builder.AppendLine($" Plane {j}");
+ builder.AppendLine($" Normal vector: {plane.NormalVector.X}, {plane.NormalVector.Y}, {plane.NormalVector.Z}");
+ builder.AppendLine(plane.Distance, " Distance");
+ builder.AppendLine($" Plane type: {plane.PlaneType} (0x{plane.PlaneType:X})");
+ }
+ }
+ break;
+ case LumpType.LUMP_TEXTURES:
+ if (model.TexdataLump?.Texdatas == null || model.TexdataLump.Texdatas.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.TexdataLump.Texdatas.Length; j++)
+ {
+ var texdata = model.TexdataLump.Texdatas[j];
+ builder.AppendLine($" Texture {j}");
+ builder.AppendLine($" Reflectivity: {texdata.Reflectivity.X}, {texdata.Reflectivity.Y}, {texdata.Reflectivity.Z}");
+ builder.AppendLine(texdata.NameStringTableID, " Name string table ID");
+ builder.AppendLine(texdata.Width, " Width");
+ builder.AppendLine(texdata.Height, " Height");
+ builder.AppendLine(texdata.ViewWidth, " View width");
+ builder.AppendLine(texdata.ViewHeight, " View height");
+ }
+ }
+ break;
+ case LumpType.LUMP_VERTICES:
+ if (model.VerticesLump?.Vertices == null || model.VerticesLump.Vertices.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.VerticesLump.Vertices.Length; j++)
+ {
+ var vertex = model.VerticesLump.Vertices[j];
+ builder.AppendLine($" Vertex {j}: {vertex.X}, {vertex.Y}, {vertex.Z}");
+ }
+ }
+ break;
+ case LumpType.LUMP_VISIBILITY:
+ if (model.VisibilityLump == null)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ builder.AppendLine(model.VisibilityLump.NumClusters, " Cluster count");
+ builder.AppendLine(" Byte offsets skipped...");
+ }
+ break;
+ case LumpType.LUMP_NODES:
+ if (model.NodesLump?.Nodes == null || model.NodesLump.Nodes.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.NodesLump.Nodes.Length; j++)
+ {
+ var node = model.NodesLump.Nodes[j];
+ builder.AppendLine($" Node {j}");
+ builder.AppendLine(node.Children, " Children");
+ builder.AppendLine(node.Mins, " Mins");
+ builder.AppendLine(node.Maxs, " Maxs");
+ builder.AppendLine(node.FirstFace, " First face index");
+ builder.AppendLine(node.FaceCount, " Count of faces");
+ builder.AppendLine(node.Area, " Area");
+ builder.AppendLine(node.Padding, " Padding");
+ }
+ }
+ break;
+ case LumpType.LUMP_TEXINFO:
+ if (model.TexinfoLump?.Texinfos == null || model.TexinfoLump.Texinfos.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.TexinfoLump.Texinfos.Length; j++)
+ {
+ var texinfo = model.TexinfoLump.Texinfos[j];
+ builder.AppendLine($" Texinfo {j}");
+ builder.AppendLine($" Texture S-Vector: {texinfo.TextureSVector.X}, {texinfo.TextureSVector.Y}, {texinfo.TextureSVector.Z}");
+ builder.AppendLine(texinfo.TextureSShift, " Texture shift in S direction");
+ builder.AppendLine($" Texture T-Vector: {texinfo.TextureTVector.X}, {texinfo.TextureTVector.Y}, {texinfo.TextureTVector.Z}");
+ builder.AppendLine(texinfo.TextureTShift, " Texture shift in T direction");
+ builder.AppendLine($" Lightmap S-Vector: {texinfo.LightmapSVector.X}, {texinfo.LightmapSVector.Y}, {texinfo.LightmapSVector.Z}");
+ builder.AppendLine(texinfo.TextureSShift, " Lightmap shift in S direction");
+ builder.AppendLine($" Lightmap T-Vector: {texinfo.LightmapTVector.X}, {texinfo.LightmapTVector.Y}, {texinfo.LightmapTVector.Z}");
+ builder.AppendLine(texinfo.TextureTShift, " Lightmap shift in T direction");
+ builder.AppendLine($" Flags: {texinfo.Flags} (0x{texinfo.Flags:X})");
+ builder.AppendLine(texinfo.TexData, " Pointer to texdata");
+ }
+ }
+ break;
+ case LumpType.LUMP_FACES:
+ if (model.FacesLump?.Faces == null || model.FacesLump.Faces.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.FacesLump.Faces.Length; j++)
+ {
+ var face = model.FacesLump.Faces[j];
+ builder.AppendLine($" Face {j}");
+ builder.AppendLine(face.PlaneNum, " Plane number");
+ builder.AppendLine(face.Side, " Side");
+ builder.AppendLine(face.OnNode, " On node");
+ builder.AppendLine(face.FirstEdgeIndex, " First surfedge index");
+ builder.AppendLine(face.NumberOfEdges, " Surfedge count");
+ builder.AppendLine(face.TextureInfoIndex, " Texture info index");
+ builder.AppendLine(face.DisplacementInfoIndex, " Displacement info index");
+ builder.AppendLine(face.SurfaceFogVolumeID, " Surface fog volume ID");
+ builder.AppendLine(face.Styles, " Styles");
+ builder.AppendLine(face.LightmapOffset, " Lightmap offset");
+ builder.AppendLine(face.Area, " Area");
+ builder.AppendLine(face.LightmapTextureMinsInLuxels, " Lightmap texture mins in Luxels");
+ builder.AppendLine(face.LightmapTextureSizeInLuxels, " Lightmap texture size in Luxels");
+ builder.AppendLine(face.OrigFace, " Original face index");
+ builder.AppendLine(face.PrimitiveCount, " Primitive count");
+ builder.AppendLine(face.FirstPrimitiveID, " First primitive ID");
+ builder.AppendLine(face.SmoothingGroups, " Smoothing groups");
+ }
+ }
+ break;
+ case LumpType.LUMP_LIGHTING:
+ if (model.LightmapLump?.Lightmap == null || model.LightmapLump.Lightmap.Length == 0)
+ builder.AppendLine(" No data");
+ else
+ builder.AppendLine(" Lightmap data skipped...");
+ break;
+ case LumpType.LUMP_CLIPNODES:
+ if (model.OcclusionLump == null)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ builder.AppendLine(model.OcclusionLump.Count, " Count");
+ if (model.OcclusionLump.Data == null || model.OcclusionLump.Data.Length == 0)
+ {
+ builder.AppendLine(" No occluder data");
+ }
+ else
+ {
+ for (int j = 0; j < model.OcclusionLump.Data.Length; j++)
+ {
+ var data = model.OcclusionLump.Data[j];
+ builder.AppendLine($" Occluder Data {j}");
+ builder.AppendLine(data.Flags, " Flags");
+ builder.AppendLine(data.FirstPoly, " First poly");
+ builder.AppendLine(data.PolyCount, " Poly count");
+ builder.AppendLine($" Mins: {data.Mins.X}, {data.Mins.Y}, {data.Mins.Z}");
+ builder.AppendLine($" Maxs: {data.Maxs.X}, {data.Maxs.Y}, {data.Maxs.Z}");
+ builder.AppendLine(data.Area, " Area");
+ }
+ }
+ builder.AppendLine(model.OcclusionLump.PolyDataCount, " Polydata count");
+ if (model.OcclusionLump.PolyData == null || model.OcclusionLump.PolyData.Length == 0)
+ {
+ builder.AppendLine(" No occluder polydata");
+ }
+ else
+ {
+ for (int j = 0; j < model.OcclusionLump.PolyData.Length; j++)
+ {
+ var polydata = model.OcclusionLump.PolyData[j];
+ builder.AppendLine($" Occluder Polydata {j}");
+ builder.AppendLine(polydata.FirstVertexIndex, " First vertex index");
+ builder.AppendLine(polydata.VertexCount, " Vertex count");
+ builder.AppendLine(polydata.PlanEnum, " Plan enum");
+ }
+ }
+ builder.AppendLine(model.OcclusionLump.VertexIndexCount, " Vertex index count");
+ if (model.OcclusionLump.VertexIndices == null || model.OcclusionLump.VertexIndices.Length == 0)
+ {
+ builder.AppendLine(" No vertex indicies");
+ }
+ else
+ {
+ for (int j = 0; j < model.OcclusionLump.VertexIndices.Length; j++)
+ {
+ builder.AppendLine($" Vertex Index {j}: {model.OcclusionLump.VertexIndices[j]}");
+ }
+ }
+ }
+ break;
+ case LumpType.LUMP_LEAVES:
+ if (model.LeavesLump?.Leaves == null || model.LeavesLump.Leaves.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.LeavesLump.Leaves.Length; j++)
+ {
+ var leaf = model.LeavesLump.Leaves[j];
+ builder.AppendLine($" Leaf {j}");
+ builder.AppendLine($" Contents: {leaf.Contents} (0x{leaf.Contents:X})");
+ builder.AppendLine(leaf.Cluster, " Cluster");
+ builder.AppendLine(leaf.AreaFlags, " AreaFlags");
+ builder.AppendLine(leaf.Mins, " Mins");
+ builder.AppendLine(leaf.Maxs, " Maxs");
+ builder.AppendLine(leaf.FirstLeafFace, " First leaf face");
+ builder.AppendLine(leaf.NumLeafFaces, " Leaf faces count");
+ builder.AppendLine(leaf.FirstLeafBrush, " First leaf brush");
+ builder.AppendLine(leaf.NumLeafBrushes, " Leaf brushes count");
+ builder.AppendLine(leaf.LeafWaterDataID, " Leaf water data ID");
+ if (lump.Version == 0)
+ {
+ // TODO: Figure out how to print the colors array
+ }
+ else
+ {
+ builder.AppendLine(leaf.Padding, " Padding");
+ }
+ }
+ }
+ break;
+ case LumpType.LUMP_MARKSURFACES:
+ if (model.MarksurfacesLump?.Marksurfaces == null || model.MarksurfacesLump.Marksurfaces.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.MarksurfacesLump.Marksurfaces.Length; j++)
+ {
+ var marksurface = model.MarksurfacesLump.Marksurfaces[j];
+ builder.AppendLine($" Marksurface {j}: {marksurface} (0x{marksurface:X4})");
+ }
+ }
+ break;
+ case LumpType.LUMP_EDGES:
+ if (model.EdgesLump?.Edges == null || model.EdgesLump.Edges.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.EdgesLump.Edges.Length; j++)
+ {
+ var edge = model.EdgesLump.Edges[j];
+ builder.AppendLine($" Edge {j}");
+ builder.AppendLine(edge.VertexIndices, " Vertex indices");
+ }
+ }
+ break;
+ case LumpType.LUMP_SURFEDGES:
+ if (model.SurfedgesLump?.Surfedges == null || model.SurfedgesLump.Surfedges.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.SurfedgesLump.Surfedges.Length; j++)
+ {
+ var surfedge = model.SurfedgesLump.Surfedges[j];
+ builder.AppendLine($" Surfedge {j}: {surfedge} (0x{surfedge:X4})");
+ }
+ }
+ break;
+ case LumpType.LUMP_MODELS:
+ if (model.ModelsLump?.Models == null || model.ModelsLump.Models.Length == 0)
+ {
+ builder.AppendLine(" No data");
+ }
+ else
+ {
+ for (int j = 0; j < model.ModelsLump.Models.Length; j++)
+ {
+ var bmodel = model.ModelsLump.Models[j];
+ builder.AppendLine($" Model {j}");
+ builder.AppendLine($" Mins: {bmodel.Mins.X}, {bmodel.Mins.Y}, {bmodel.Mins.Z}");
+ builder.AppendLine($" Maxs: {bmodel.Maxs.X}, {bmodel.Maxs.Y}, {bmodel.Maxs.Z}");
+ builder.AppendLine($" Origin vector: {bmodel.OriginVector.X}, {bmodel.OriginVector.Y}, {bmodel.OriginVector.Z}");
+ builder.AppendLine(bmodel.HeadNode, " Headnode index");
+ builder.AppendLine(bmodel.FirstFaceIndex, " First face index");
+ builder.AppendLine(bmodel.FacesCount, " Faces count");
+ }
+ }
+ break;
+
+ // TODO: Implement remaining printed lump types
+ }
}
builder.AppendLine();
}
+
+ private static string GetLumpName(int i)
+ {
+ return (LumpType)i switch
+ {
+ LumpType.LUMP_ENTITIES => " - LUMP_ENTITIES",
+ LumpType.LUMP_PLANES => " - LUMP_PLANES",
+ LumpType.LUMP_TEXTURES => " - LUMP_TEXDATA",
+ LumpType.LUMP_VERTICES => " - LUMP_VERTEXES",
+ LumpType.LUMP_VISIBILITY => " - LUMP_VISIBILITY",
+ LumpType.LUMP_NODES => " - LUMP_NODES",
+ LumpType.LUMP_TEXINFO => " - LUMP_TEXINFO",
+ LumpType.LUMP_FACES => " - LUMP_FACES",
+ LumpType.LUMP_LIGHTING => " - LUMP_LIGHTING",
+ LumpType.LUMP_CLIPNODES => " - LUMP_OCCLUSION",
+ LumpType.LUMP_LEAVES => " - LUMP_LEAVES",
+ LumpType.LUMP_MARKSURFACES => " - LUMP_FACEIDS",
+ LumpType.LUMP_EDGES => " - LUMP_EDGES",
+ LumpType.LUMP_SURFEDGES => " - LUMP_SURFEDGES",
+ LumpType.LUMP_MODELS => " - LUMP_MODELS",
+ LumpType.LUMP_WORLDLIGHTS => " - LUMP_WORLDLIGHTS",
+ LumpType.LUMP_LEAFFACES => " - LUMP_LEAFFACES",
+ LumpType.LUMP_LEAFBRUSHES => " - LUMP_LEAFBRUSHES",
+ LumpType.LUMP_BRUSHES => " - LUMP_BRUSHES",
+ LumpType.LUMP_BRUSHSIDES => " - LUMP_BRUSHSIDES",
+ LumpType.LUMP_AREAS => " - LUMP_AREAS",
+ LumpType.LUMP_AREAPORTALS => " - LUMP_AREAPORTALS",
+ LumpType.LUMP_PORTALS => " - LUMP_PORTALS / LUMP_UNUSED0 / LUMP_PROPCOLLISION",
+ LumpType.LUMP_CLUSTERS => " - LUMP_CLUSTERS / LUMP_UNUSED1 / LUMP_PROPHULLS",
+ LumpType.LUMP_PORTALVERTS => " - LUMP_PORTALVERTS / LUMP_UNUSED2 / LUMP_FAKEENTITIES / LUMP_PROPHULLVERTS",
+ LumpType.LUMP_CLUSTERPORTALS => " - LUMP_CLUSTERPORTALS / LUMP_UNUSED3 / LUMP_PROPTRIS",
+ LumpType.LUMP_DISPINFO => " - LUMP_DISPINFO",
+ LumpType.LUMP_ORIGINALFACES => " - LUMP_ORIGINALFACES",
+ LumpType.LUMP_PHYSDISP => " - LUMP_PHYSDISP",
+ LumpType.LUMP_PHYSCOLLIDE => " - LUMP_PHYSCOLLIDE",
+ LumpType.LUMP_VERTNORMALS => " - LUMP_VERTNORMALS",
+ LumpType.LUMP_VERTNORMALINDICES => " - LUMP_VERTNORMALINDICES",
+ LumpType.LUMP_DISP_LIGHTMAP_ALPHAS => " - LUMP_DISP_LIGHTMAP_ALPHAS",
+ LumpType.LUMP_DISP_VERTS => " - LUMP_DISP_VERTS",
+ LumpType.LUMP_DISP_LIGHTMAP_SAMPLE_POSITIONS => " - LUMP_DISP_LIGHTMAP_SAMPLE_POSITIONS",
+ LumpType.LUMP_GAME_LUMP => " - LUMP_GAME_LUMP",
+ LumpType.LUMP_LEAFWATERDATA => " - LUMP_LEAFWATERDATA",
+ LumpType.LUMP_PRIMITIVES => " - LUMP_PRIMITIVES",
+ LumpType.LUMP_PRIMVERTS => " - LUMP_PRIMVERTS",
+ LumpType.LUMP_PRIMINDICES => " - LUMP_PRIMINDICES",
+ LumpType.LUMP_PAKFILE => " - LUMP_PAKFILE",
+ LumpType.LUMP_CLIPPORTALVERTS => " - LUMP_CLIPPORTALVERTS",
+ LumpType.LUMP_CUBEMAPS => " - LUMP_CUBEMAPS",
+ LumpType.LUMP_TEXDATA_STRING_DATA => " - LUMP_TEXDATA_STRING_DATA",
+ LumpType.LUMP_TEXDATA_STRING_TABLE => " - LUMP_TEXDATA_STRING_TABLE",
+ LumpType.LUMP_OVERLAYS => " - LUMP_OVERLAYS",
+ LumpType.LUMP_LEAFMINDISTTOWATER => " - LUMP_LEAFMINDISTTOWATER",
+ LumpType.LUMP_FACE_MACRO_TEXTURE_INFO => " - LUMP_FACE_MACRO_TEXTURE_INFO",
+ LumpType.LUMP_DISP_TRIS => " - LUMP_DISP_TRIS",
+ LumpType.LUMP_PHYSCOLLIDESURFACE => " - LUMP_PHYSCOLLIDESURFACE / LUMP_PROP_BLOB",
+ LumpType.LUMP_WATEROVERLAYS => " - LUMP_WATEROVERLAYS",
+ LumpType.LUMP_LIGHTMAPPAGES => " - LUMP_LIGHTMAPPAGES / LUMP_LEAF_AMBIENT_INDEX_HDR",
+ LumpType.LUMP_LIGHTMAPPAGEINFOS => " - LUMP_LIGHTMAPPAGEINFOS / LUMP_LEAF_AMBIENT_INDEX",
+ LumpType.LUMP_LIGHTING_HDR => " - LUMP_LIGHTING_HDR",
+ LumpType.LUMP_WORLDLIGHTS_HDR => " - LUMP_WORLDLIGHTS_HDR",
+ LumpType.LUMP_LEAF_AMBIENT_LIGHTING_HDR => " - LUMP_LEAF_AMBIENT_LIGHTING_HDR",
+ LumpType.LUMP_LEAF_AMBIENT_LIGHTING => " - LUMP_LEAF_AMBIENT_LIGHTING",
+ LumpType.LUMP_XZIPPAKFILE => " - LUMP_XZIPPAKFILE",
+ LumpType.LUMP_FACES_HDR => " - LUMP_FACES_HDR",
+ LumpType.LUMP_MAP_FLAGS => " - LUMP_MAP_FLAGS",
+ LumpType.LUMP_OVERLAY_FADES => " - LUMP_OVERLAY_FADES",
+ LumpType.LUMP_OVERLAY_SYSTEM_LEVELS => " - LUMP_OVERLAY_SYSTEM_LEVELS",
+ LumpType.LUMP_PHYSLEVEL => " - LUMP_PHYSLEVEL",
+ LumpType.LUMP_DISP_MULTIBLEND => " - LUMP_DISP_MULTIBLEND",
+ _ => string.Empty,
+ };
+ }
}
}
\ No newline at end of file
diff --git a/SabreTools.Serialization/Printers/VPK.cs b/SabreTools.Serialization/Printers/VPK.cs
index b5585526..3e1fac13 100644
--- a/SabreTools.Serialization/Printers/VPK.cs
+++ b/SabreTools.Serialization/Printers/VPK.cs
@@ -35,7 +35,7 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(header.Signature, " Signature");
builder.AppendLine(header.Version, " Version");
- builder.AppendLine(header.DirectoryLength, " Directory length");
+ builder.AppendLine(header.TreeSize, " Tree size");
builder.AppendLine();
}
@@ -50,10 +50,10 @@ namespace SabreTools.Serialization.Printers
return;
}
- builder.AppendLine(header.Dummy0, " Dummy 0");
- builder.AppendLine(header.ArchiveHashLength, " Archive hash length");
- builder.AppendLine(header.ExtraLength, " Extra length");
- builder.AppendLine(header.Dummy1, " Dummy 1");
+ builder.AppendLine(header.FileDataSectionSize, " File data section size");
+ builder.AppendLine(header.ArchiveMD5SectionSize, " Archive MD5 section size");
+ builder.AppendLine(header.OtherMD5SectionSize, " Other MD5 section size");
+ builder.AppendLine(header.SignatureSectionSize, " Signature section size");
builder.AppendLine();
}
diff --git a/SabreTools.Serialization/Printers/WAD.cs b/SabreTools.Serialization/Printers/WAD.cs
deleted file mode 100644
index 5730944e..00000000
--- a/SabreTools.Serialization/Printers/WAD.cs
+++ /dev/null
@@ -1,107 +0,0 @@
-using System.Text;
-using SabreTools.Models.WAD;
-using SabreTools.Serialization.Interfaces;
-
-namespace SabreTools.Serialization.Printers
-{
- public class WAD : IPrinter
- {
- ///
- public void PrintInformation(StringBuilder builder, File model)
- => Print(builder, model);
-
- public static void Print(StringBuilder builder, File file)
- {
- builder.AppendLine("WAD Information:");
- builder.AppendLine("-------------------------");
- builder.AppendLine();
-
- Print(builder, file.Header);
- Print(builder, file.Lumps);
- Print(builder, file.LumpInfos);
- }
-
- private static void Print(StringBuilder builder, Header? header)
- {
- builder.AppendLine(" Header Information:");
- builder.AppendLine(" -------------------------");
- if (header == null)
- {
- builder.AppendLine(" No header");
- builder.AppendLine();
- return;
- }
-
- builder.AppendLine(header.Signature, " Signature");
- builder.AppendLine(header.LumpCount, " Lump count");
- builder.AppendLine(header.LumpOffset, " Lump offset");
- builder.AppendLine();
- }
-
- private static void Print(StringBuilder builder, Lump?[]? entries)
- {
- builder.AppendLine(" Lumps Information:");
- builder.AppendLine(" -------------------------");
- if (entries == null || entries.Length == 0)
- {
- builder.AppendLine(" No lumps");
- builder.AppendLine();
- return;
- }
-
- for (int i = 0; i < entries.Length; i++)
- {
- var entry = entries[i];
- builder.AppendLine($" Lump {i}");
- if (entry == null)
- {
- builder.AppendLine(" [NULL]");
- continue;
- }
-
- builder.AppendLine(entry.Offset, " Offset");
- builder.AppendLine(entry.DiskLength, " Disk length");
- builder.AppendLine(entry.Length, " Length");
- builder.AppendLine(entry.Type, " Type");
- builder.AppendLine(entry.Compression, " Compression");
- builder.AppendLine(entry.Padding0, " Padding 0");
- builder.AppendLine(entry.Padding1, " Padding 1");
- builder.AppendLine(entry.Name, " Name");
- }
- builder.AppendLine();
- }
-
- private static void Print(StringBuilder builder, LumpInfo?[]? entries)
- {
- builder.AppendLine(" Lump Infos Information:");
- builder.AppendLine(" -------------------------");
- if (entries == null || entries.Length == 0)
- {
- builder.AppendLine(" No lump infos");
- builder.AppendLine();
- return;
- }
-
- for (int i = 0; i < entries.Length; i++)
- {
- var entry = entries[i];
- builder.AppendLine($" Lump Info {i}");
- if (entry == null)
- {
- builder.AppendLine(" Lump is compressed");
- continue;
- }
-
- builder.AppendLine(entry.Name, " Name");
- builder.AppendLine(entry.Width, " Width");
- builder.AppendLine(entry.Height, " Height");
- builder.AppendLine(entry.PixelOffset, " Pixel offset");
- // TODO: Print unknown data?
- // TODO: Print pixel data?
- builder.AppendLine(entry.PaletteSize, " Palette size");
- // TODO: Print palette data?
- }
- builder.AppendLine();
- }
- }
-}
\ No newline at end of file
diff --git a/SabreTools.Serialization/Printers/WAD3.cs b/SabreTools.Serialization/Printers/WAD3.cs
new file mode 100644
index 00000000..da2867f4
--- /dev/null
+++ b/SabreTools.Serialization/Printers/WAD3.cs
@@ -0,0 +1,131 @@
+using System.Text;
+using SabreTools.Models.WAD3;
+using SabreTools.Serialization.Interfaces;
+
+namespace SabreTools.Serialization.Printers
+{
+ public class WAD3 : IPrinter
+ {
+ ///
+ public void PrintInformation(StringBuilder builder, File model)
+ => Print(builder, model);
+
+ public static void Print(StringBuilder builder, File file)
+ {
+ builder.AppendLine("WAD Information:");
+ builder.AppendLine("-------------------------");
+ builder.AppendLine();
+
+ Print(builder, file.Header);
+ Print(builder, file.DirEntries);
+ Print(builder, file.FileEntries);
+ }
+
+ private static void Print(StringBuilder builder, Header? header)
+ {
+ builder.AppendLine(" Header Information:");
+ builder.AppendLine(" -------------------------");
+ if (header == null)
+ {
+ builder.AppendLine(" No header");
+ builder.AppendLine();
+ return;
+ }
+
+ builder.AppendLine(header.Signature, " Signature");
+ builder.AppendLine(header.NumDirs, " Number of directory entries");
+ builder.AppendLine(header.DirOffset, " Offset to first directory entry");
+ builder.AppendLine();
+ }
+
+ private static void Print(StringBuilder builder, DirEntry?[]? entries)
+ {
+ builder.AppendLine(" Directory Entries Information:");
+ builder.AppendLine(" -------------------------");
+ if (entries == null || entries.Length == 0)
+ {
+ builder.AppendLine(" No directory entries");
+ builder.AppendLine();
+ return;
+ }
+
+ for (int i = 0; i < entries.Length; i++)
+ {
+ var entry = entries[i];
+ builder.AppendLine($" Directory Entry {i}");
+ if (entry == null)
+ {
+ builder.AppendLine(" [NULL]");
+ continue;
+ }
+
+ builder.AppendLine(entry.Offset, " Offset");
+ builder.AppendLine(entry.DiskLength, " Disk length");
+ builder.AppendLine(entry.Length, " Length");
+ builder.AppendLine($" File type: {entry.Type} (0x{entry.Type:X})");
+ builder.AppendLine(entry.Compression, " Compression");
+ builder.AppendLine(entry.Padding, " Padding");
+ builder.AppendLine(entry.Name, " Name");
+ }
+ builder.AppendLine();
+ }
+
+ private static void Print(StringBuilder builder, FileEntry?[]? entries)
+ {
+ builder.AppendLine(" File Entries Information:");
+ builder.AppendLine(" -------------------------");
+ if (entries == null || entries.Length == 0)
+ {
+ builder.AppendLine(" No file entries");
+ builder.AppendLine();
+ return;
+ }
+
+ for (int i = 0; i < entries.Length; i++)
+ {
+ var entry = entries[i];
+ builder.AppendLine($" File Entry {i}");
+ if (entry == null)
+ {
+ builder.AppendLine(" [NULL]");
+ continue;
+ }
+
+ if (entry is MipTex mipTex)
+ {
+ builder.AppendLine(mipTex.Name, " Name");
+ builder.AppendLine(mipTex.Width, " Width");
+ builder.AppendLine(mipTex.Height, " Height");
+ builder.AppendLine(mipTex.MipOffsets, " Mipmap Offsets");
+ builder.AppendLine(" Mipmap Images skipped...");
+ builder.AppendLine(mipTex.ColorsUsed, " Colors used");
+ builder.AppendLine(" Palette skipped...");
+ }
+ else if (entry is QpicImage qpic)
+ {
+ builder.AppendLine(qpic.Width, " Width");
+ builder.AppendLine(qpic.Height, " Height");
+ builder.AppendLine(" Image data skipped...");
+ builder.AppendLine(qpic.ColorsUsed, " Colors used");
+ builder.AppendLine(" Palette skipped...");
+ }
+ else if (entry is Font font)
+ {
+ builder.AppendLine(font.Width, " Width");
+ builder.AppendLine(font.Height, " Height");
+ builder.AppendLine(font.RowCount, " Row count");
+ builder.AppendLine(font.RowHeight, " Row height");
+ builder.AppendLine(" Font info skipped...");
+ builder.AppendLine(" Image data skipped...");
+ builder.AppendLine(font.ColorsUsed, " Colors used");
+ builder.AppendLine(" Palette skipped...");
+ }
+ else
+ {
+ builder.AppendLine(" Unrecognized entry type");
+ }
+ }
+ builder.AppendLine();
+ }
+ }
+}
\ No newline at end of file
diff --git a/SabreTools.Serialization/SabreTools.Serialization.csproj b/SabreTools.Serialization/SabreTools.Serialization.csproj
index 5956151d..3ec28349 100644
--- a/SabreTools.Serialization/SabreTools.Serialization.csproj
+++ b/SabreTools.Serialization/SabreTools.Serialization.csproj
@@ -33,7 +33,7 @@
-
+
\ No newline at end of file
diff --git a/SabreTools.Serialization/StringBuilderExtensions.cs b/SabreTools.Serialization/StringBuilderExtensions.cs
index 931bf68a..1ea9bec6 100644
--- a/SabreTools.Serialization/StringBuilderExtensions.cs
+++ b/SabreTools.Serialization/StringBuilderExtensions.cs
@@ -84,6 +84,16 @@ namespace SabreTools.Serialization
return sb.AppendLine($"{prefixString}: {valueString}");
}
+ ///
+ /// Append a line containing a Single to a StringBuilder
+ ///
+ public static StringBuilder AppendLine(this StringBuilder sb, float? value, string prefixString)
+ {
+ value ??= 0;
+ string valueString = $"{value} (0x{value:X8})";
+ return sb.AppendLine($"{prefixString}: {valueString}");
+ }
+
///
/// Append a line containing a Int64 to a StringBuilder
///
@@ -104,6 +114,16 @@ namespace SabreTools.Serialization
return sb.AppendLine($"{prefixString}: {valueString}");
}
+ ///
+ /// Append a line containing a Double to a StringBuilder
+ ///
+ public static StringBuilder AppendLine(this StringBuilder sb, double? value, string prefixString)
+ {
+ value ??= 0;
+ string valueString = $"{value} (0x{value:X16})";
+ return sb.AppendLine($"{prefixString}: {valueString}");
+ }
+
///
/// Append a line containing a string to a StringBuilder
///
diff --git a/SabreTools.Serialization/Wrappers/BSP.cs b/SabreTools.Serialization/Wrappers/BSP.cs
index 1f12639f..7179a790 100644
--- a/SabreTools.Serialization/Wrappers/BSP.cs
+++ b/SabreTools.Serialization/Wrappers/BSP.cs
@@ -2,7 +2,7 @@ using System.IO;
namespace SabreTools.Serialization.Wrappers
{
- public class BSP : WrapperBase
+ public class BSP : WrapperBase
{
#region Descriptive Properties
@@ -14,14 +14,14 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
///
- public BSP(Models.BSP.File? model, byte[]? data, int offset)
+ public BSP(Models.BSP.BspFile? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
///
- public BSP(Models.BSP.File? model, Stream? data)
+ public BSP(Models.BSP.BspFile? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
diff --git a/SabreTools.Serialization/Wrappers/VBSP.cs b/SabreTools.Serialization/Wrappers/VBSP.cs
index 7cc42337..634f369d 100644
--- a/SabreTools.Serialization/Wrappers/VBSP.cs
+++ b/SabreTools.Serialization/Wrappers/VBSP.cs
@@ -2,7 +2,7 @@ using System.IO;
namespace SabreTools.Serialization.Wrappers
{
- public class VBSP : WrapperBase
+ public class VBSP : WrapperBase
{
#region Descriptive Properties
@@ -14,14 +14,14 @@ namespace SabreTools.Serialization.Wrappers
#region Constructors
///
- public VBSP(Models.VBSP.File? model, byte[]? data, int offset)
+ public VBSP(Models.BSP.VbspFile? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
///
- public VBSP(Models.VBSP.File? model, Stream? data)
+ public VBSP(Models.BSP.VbspFile? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
diff --git a/SabreTools.Serialization/Wrappers/WAD.cs b/SabreTools.Serialization/Wrappers/WAD3.cs
similarity index 64%
rename from SabreTools.Serialization/Wrappers/WAD.cs
rename to SabreTools.Serialization/Wrappers/WAD3.cs
index 5221b476..0badbde1 100644
--- a/SabreTools.Serialization/Wrappers/WAD.cs
+++ b/SabreTools.Serialization/Wrappers/WAD3.cs
@@ -2,38 +2,38 @@ using System.IO;
namespace SabreTools.Serialization.Wrappers
{
- public class WAD : WrapperBase
+ public class WAD3 : WrapperBase
{
#region Descriptive Properties
///
- public override string DescriptionString => "Half-Life Texture Package File (WAD)";
+ public override string DescriptionString => "Half-Life Texture Package File (WAD3)";
#endregion
#region Constructors
///
- public WAD(Models.WAD.File? model, byte[]? data, int offset)
+ public WAD3(Models.WAD3.File? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
///
- public WAD(Models.WAD.File? model, Stream? data)
+ public WAD3(Models.WAD3.File? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
///
- /// Create a WAD from a byte array and offset
+ /// Create a WAD3 from a byte array and offset
///
- /// Byte array representing the WAD
+ /// Byte array representing the WAD3
/// Offset within the array to parse
- /// A WAD wrapper on success, null on failure
- public static WAD? Create(byte[]? data, int offset)
+ /// A WAD3 wrapper on success, null on failure
+ public static WAD3? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
@@ -49,23 +49,23 @@ namespace SabreTools.Serialization.Wrappers
}
///
- /// Create a WAD from a Stream
+ /// Create a WAD3 from a Stream
///
- /// Stream representing the WAD
- /// An WAD wrapper on success, null on failure
- public static WAD? Create(Stream? data)
+ /// Stream representing the WAD3
+ /// An WAD3 wrapper on success, null on failure
+ public static WAD3? Create(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
- var file = Deserializers.WAD.DeserializeStream(data);
+ var file = Deserializers.WAD3.DeserializeStream(data);
if (file == null)
return null;
try
{
- return new WAD(file, data);
+ return new WAD3(file, data);
}
catch
{
diff --git a/SabreTools.Serialization/Wrappers/WrapperFactory.cs b/SabreTools.Serialization/Wrappers/WrapperFactory.cs
index 5ff9b358..89880d61 100644
--- a/SabreTools.Serialization/Wrappers/WrapperFactory.cs
+++ b/SabreTools.Serialization/Wrappers/WrapperFactory.cs
@@ -54,7 +54,7 @@ namespace SabreTools.Serialization.Wrappers
WrapperType.Textfile => null,// TODO: Implement wrapper
WrapperType.VBSP => VBSP.Create(data),
WrapperType.VPK => VPK.Create(data),
- WrapperType.WAD => WAD.Create(data),
+ WrapperType.WAD => WAD3.Create(data),
WrapperType.XZ => null,// TODO: Implement wrapper
WrapperType.XZP => XZP.Create(data),
_ => null,