diff --git a/BurnOutSharp.Builders/BSP.cs b/BurnOutSharp.Builders/BSP.cs
new file mode 100644
index 00000000..8b06dd34
--- /dev/null
+++ b/BurnOutSharp.Builders/BSP.cs
@@ -0,0 +1,190 @@
+using System.IO;
+using BurnOutSharp.Models.BSP;
+using BurnOutSharp.Utilities;
+
+namespace BurnOutSharp.Builders
+{
+ public static class BSP
+ {
+ #region Constants
+
+ ///
+ /// Number of lumps in a BSP
+ ///
+ private const int HL_BSP_LUMP_COUNT = 15;
+
+ ///
+ /// Index for the entities lump
+ ///
+ public const int HL_BSP_LUMP_ENTITIES = 0;
+
+ ///
+ /// Index for the texture data lump
+ ///
+ public const int HL_BSP_LUMP_TEXTUREDATA = 2;
+
+ ///
+ /// Number of valid mipmap levels
+ ///
+ public const int HL_BSP_MIPMAP_COUNT = 4;
+
+ #endregion
+
+ #region Byte Data
+
+ ///
+ /// Parse a byte array into a Half-Life Level
+ ///
+ /// Byte array to parse
+ /// Offset into the byte array
+ /// Filled Half-Life Level on success, null on error
+ public static Models.BSP.File ParseFile(byte[] data, int offset)
+ {
+ // If the data is invalid
+ if (data == null)
+ return null;
+
+ // If the offset is out of bounds
+ if (offset < 0 || offset >= data.Length)
+ return null;
+
+ // Create a memory stream and parse that
+ MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
+ return ParseFile(dataStream);
+ }
+
+ #endregion
+
+ #region Stream Data
+
+ ///
+ /// Parse a Stream into a Half-Life Level
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Level on success, null on error
+ public static Models.BSP.File ParseFile(Stream data)
+ {
+ // If the data is invalid
+ if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
+ return null;
+
+ // If the offset is out of bounds
+ if (data.Position < 0 || data.Position >= data.Length)
+ return null;
+
+ // Cache the current offset
+ int initialOffset = (int)data.Position;
+
+ // Create a new Half-Life Level to fill
+ var file = new Models.BSP.File();
+
+ #region Header
+
+ // Try to parse the header
+ var header = ParseHeader(data);
+ if (header == null)
+ return null;
+
+ // Set the level header
+ file.Header = header;
+
+ #endregion
+
+ #region Lumps
+
+ // Create the lump array
+ file.Lumps = new Lump[HL_BSP_LUMP_COUNT];
+
+ // Try to parse the lumps
+ for (int i = 0; i < HL_BSP_LUMP_COUNT; i++)
+ {
+ var lump = ParseLump(data);
+ file.Lumps[i] = lump;
+ }
+
+ #endregion
+
+ #region Texture header
+
+ // Try to get the texture header lump
+ var textureDataLump = file.Lumps[HL_BSP_LUMP_TEXTUREDATA];
+ if (textureDataLump.Offset == 0 || textureDataLump.Length == 0)
+ return null;
+
+ // Seek to the texture header
+ data.Seek(textureDataLump.Offset, SeekOrigin.Begin);
+
+ // Try to parse the texture header
+ var textureHeader = ParseTextureHeader(data);
+ if (textureHeader == null)
+ return null;
+
+ // Set the texture header
+ file.TextureHeader = textureHeader;
+
+ #endregion
+
+ return file;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Level header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Level header on success, null on error
+ private static Header ParseHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ Header header = new Header();
+
+ // Only recognized versions are 29 and 30
+ header.Version = data.ReadUInt32();
+ if (header.Version != 29 && header.Version != 30)
+ return null;
+
+ return header;
+ }
+
+ ///
+ /// Parse a Stream into a lump
+ ///
+ /// Stream to parse
+ /// Filled lump on success, null on error
+ private static Lump ParseLump(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ Lump lump = new Lump();
+
+ lump.Offset = data.ReadUInt32();
+ lump.Length = data.ReadUInt32();
+
+ return lump;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Level texture header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Level texture header on success, null on error
+ private static TextureHeader ParseTextureHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ TextureHeader textureHeader = new TextureHeader();
+
+ textureHeader.TextureCount = data.ReadUInt32();
+
+ var offsets = new uint[textureHeader.TextureCount];
+
+ for (int i = 0; i < textureHeader.TextureCount; i++)
+ {
+ offsets[i] = data.ReadUInt32();
+ }
+
+ textureHeader.Offsets = offsets;
+
+ return textureHeader;
+ }
+
+ #endregion
+ }
+}
diff --git a/BurnOutSharp.Builders/GCF.cs b/BurnOutSharp.Builders/GCF.cs
new file mode 100644
index 00000000..ac5225ed
--- /dev/null
+++ b/BurnOutSharp.Builders/GCF.cs
@@ -0,0 +1,773 @@
+using System.IO;
+using System.Text;
+using BurnOutSharp.Models.GCF;
+using BurnOutSharp.Utilities;
+
+namespace BurnOutSharp.Builders
+{
+ public static class GCF
+ {
+ #region Constants
+
+ ///
+ /// The item is a file.
+ ///
+ private const int HL_GCF_FLAG_FILE = 0x00004000;
+
+ ///
+ /// The item is encrypted.
+ ///
+ private const int HL_GCF_FLAG_ENCRYPTED = 0x00000100;
+
+ ///
+ /// Backup the item before overwriting it.
+ ///
+ private const int HL_GCF_FLAG_BACKUP_LOCAL = 0x00000040;
+
+ ///
+ /// The item is to be copied to the disk.
+ ///
+ private const int HL_GCF_FLAG_COPY_LOCAL = 0x0000000A;
+
+ ///
+ /// Don't overwrite the item if copying it to the disk and the item already exists.
+ ///
+ private const int HL_GCF_FLAG_COPY_LOCAL_NO_OVERWRITE = 0x00000001;
+
+ ///
+ /// The maximum data allowed in a 32 bit checksum.
+ ///
+ private const int HL_GCF_CHECKSUM_LENGTH = 0x00008000;
+
+ #endregion
+
+ #region Byte Data
+
+ ///
+ /// Parse a byte array into a Half-Life Game Cache
+ ///
+ /// Byte array to parse
+ /// Offset into the byte array
+ /// Filled Half-Life Game Cache on success, null on error
+ public static Models.GCF.File ParseFile(byte[] data, int offset)
+ {
+ // If the data is invalid
+ if (data == null)
+ return null;
+
+ // If the offset is out of bounds
+ if (offset < 0 || offset >= data.Length)
+ return null;
+
+ // Create a memory stream and parse that
+ MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
+ return ParseFile(dataStream);
+ }
+
+ #endregion
+
+ #region Stream Data
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache on success, null on error
+ public static Models.GCF.File ParseFile(Stream data)
+ {
+ // If the data is invalid
+ if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
+ return null;
+
+ // If the offset is out of bounds
+ if (data.Position < 0 || data.Position >= data.Length)
+ return null;
+
+ // Cache the current offset
+ long initialOffset = data.Position;
+
+ // Create a new Half-Life Game Cache to fill
+ var file = new Models.GCF.File();
+
+ #region Header
+
+ // Try to parse the header
+ var header = ParseHeader(data);
+ if (header == null)
+ return null;
+
+ // Set the game cache header
+ file.Header = header;
+
+ #endregion
+
+ #region Block Entry Header
+
+ // Try to parse the block entry header
+ var blockEntryHeader = ParseBlockEntryHeader(data);
+ if (blockEntryHeader == null)
+ return null;
+
+ // Set the game cache block entry header
+ file.BlockEntryHeader = blockEntryHeader;
+
+ #endregion
+
+ #region Block Entries
+
+ // Create the block entry array
+ file.BlockEntries = new BlockEntry[blockEntryHeader.BlockCount];
+
+ // Try to parse the block entries
+ for (int i = 0; i < blockEntryHeader.BlockCount; i++)
+ {
+ var blockEntry = ParseBlockEntry(data);
+ file.BlockEntries[i] = blockEntry;
+ }
+
+ #endregion
+
+ #region Fragmentation Map Header
+
+ // Try to parse the fragmentation map header
+ var fragmentationMapHeader = ParseFragmentationMapHeader(data);
+ if (fragmentationMapHeader == null)
+ return null;
+
+ // Set the game cache fragmentation map header
+ file.FragmentationMapHeader = fragmentationMapHeader;
+
+ #endregion
+
+ #region Fragmentation Maps
+
+ // Create the fragmentation map array
+ file.FragmentationMaps = new FragmentationMap[fragmentationMapHeader.BlockCount];
+
+ // Try to parse the fragmentation maps
+ for (int i = 0; i < fragmentationMapHeader.BlockCount; i++)
+ {
+ var fragmentationMap = ParseFragmentationMap(data);
+ file.FragmentationMaps[i] = fragmentationMap;
+ }
+
+ #endregion
+
+ #region Block Entry Map Header
+
+ if (header.MinorVersion < 6)
+ {
+ // Try to parse the block entry map header
+ var blockEntryMapHeader = ParseBlockEntryMapHeader(data);
+ if (blockEntryMapHeader == null)
+ return null;
+
+ // Set the game cache block entry map header
+ file.BlockEntryMapHeader = blockEntryMapHeader;
+ }
+
+ #endregion
+
+ #region Block Entry Maps
+
+ if (header.MinorVersion < 6)
+ {
+ // Create the block entry map array
+ file.BlockEntryMaps = new BlockEntryMap[file.BlockEntryMapHeader.BlockCount];
+
+ // Try to parse the block entry maps
+ for (int i = 0; i < file.BlockEntryMapHeader.BlockCount; i++)
+ {
+ var blockEntryMap = ParseBlockEntryMap(data);
+ file.BlockEntryMaps[i] = blockEntryMap;
+ }
+ }
+
+ #endregion
+
+ // Cache the current offset
+ initialOffset = data.Position;
+
+ #region Directory Header
+
+ // Try to parse the directory header
+ var directoryHeader = ParseDirectoryHeader(data);
+ if (directoryHeader == null)
+ return null;
+
+ // Set the game cache directory header
+ file.DirectoryHeader = directoryHeader;
+
+ #endregion
+
+ #region Directory Entries
+
+ // Create the directory entry array
+ file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount];
+
+ // Try to parse the directory entries
+ for (int i = 0; i < directoryHeader.ItemCount; i++)
+ {
+ var directoryEntry = ParseDirectoryEntry(data);
+ file.DirectoryEntries[i] = directoryEntry;
+ }
+
+ #endregion
+
+ #region Directory Names
+
+ // Read the directory names as a single string
+ byte[] directoryNames = data.ReadBytes((int)directoryHeader.NameSize);
+ file.DirectoryNames = Encoding.ASCII.GetString(directoryNames);
+
+ #endregion
+
+ #region Directory Info 1 Entries
+
+ // Create the directory info 1 entry array
+ file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count];
+
+ // Try to parse the directory info 1 entries
+ for (int i = 0; i < directoryHeader.Info1Count; i++)
+ {
+ var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
+ file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
+ }
+
+ #endregion
+
+ #region Directory Info 2 Entries
+
+ // Create the directory info 2 entry array
+ file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount];
+
+ // Try to parse the directory info 2 entries
+ for (int i = 0; i < directoryHeader.ItemCount; i++)
+ {
+ var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
+ file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
+ }
+
+ #endregion
+
+ #region Directory Copy Entries
+
+ // Create the directory copy entry array
+ file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount];
+
+ // Try to parse the directory copy entries
+ for (int i = 0; i < directoryHeader.CopyCount; i++)
+ {
+ var directoryCopyEntry = ParseDirectoryCopyEntry(data);
+ file.DirectoryCopyEntries[i] = directoryCopyEntry;
+ }
+
+ #endregion
+
+ #region Directory Local Entries
+
+ // Create the directory local entry array
+ file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount];
+
+ // Try to parse the directory local entries
+ for (int i = 0; i < directoryHeader.LocalCount; i++)
+ {
+ var directoryLocalEntry = ParseDirectoryLocalEntry(data);
+ file.DirectoryLocalEntries[i] = directoryLocalEntry;
+ }
+
+ #endregion
+
+ // Seek to end of directory section, just in case
+ data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin);
+
+ #region Directory Map Header
+
+ if (header.MinorVersion >= 5)
+ {
+ // Try to parse the directory map header
+ var directoryMapHeader = ParseDirectoryMapHeader(data);
+ if (directoryMapHeader == null)
+ return null;
+
+ // Set the game cache directory map header
+ file.DirectoryMapHeader = directoryMapHeader;
+ }
+
+ #endregion
+
+ #region Directory Map Entries
+
+ // Create the directory map entry array
+ file.DirectoryMapEntries = new DirectoryMapEntry[directoryHeader.ItemCount];
+
+ // Try to parse the directory map entries
+ for (int i = 0; i < directoryHeader.ItemCount; i++)
+ {
+ var directoryMapEntry = ParseDirectoryMapEntry(data);
+ file.DirectoryMapEntries[i] = directoryMapEntry;
+ }
+
+ #endregion
+
+ #region Checksum Header
+
+ // Try to parse the checksum header
+ var checksumHeader = ParseChecksumHeader(data);
+ if (checksumHeader == null)
+ return null;
+
+ // Set the game cache checksum header
+ file.ChecksumHeader = checksumHeader;
+
+ #endregion
+
+ // Cache the current offset
+ initialOffset = data.Position;
+
+ #region Checksum Map Header
+
+ // Try to parse the checksum map header
+ var checksumMapHeader = ParseChecksumMapHeader(data);
+ if (checksumMapHeader == null)
+ return null;
+
+ // Set the game cache checksum map header
+ file.ChecksumMapHeader = checksumMapHeader;
+
+ #endregion
+
+ #region Checksum Map Entries
+
+ // Create the checksum map entry array
+ file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount];
+
+ // Try to parse the checksum map entries
+ for (int i = 0; i < checksumMapHeader.ItemCount; i++)
+ {
+ var checksumMapEntry = ParseChecksumMapEntry(data);
+ file.ChecksumMapEntries[i] = checksumMapEntry;
+ }
+
+ #endregion
+
+ #region Checksum Entries
+
+ // Create the checksum entry array
+ file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount];
+
+ // Try to parse the checksum entries
+ for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
+ {
+ var checksumEntry = ParseChecksumEntry(data);
+ file.ChecksumEntries[i] = checksumEntry;
+ }
+
+ #endregion
+
+ // Seek to end of checksum section, just in case
+ data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin);
+
+ #region Data Block Header
+
+ // Try to parse the data block header
+ var dataBlockHeader = ParseDataBlockHeader(data, header.MinorVersion);
+ if (dataBlockHeader == null)
+ return null;
+
+ // Set the game cache data block header
+ file.DataBlockHeader = dataBlockHeader;
+
+ #endregion
+
+ return file;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache on success, null on error
+ private static Header ParseHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ Header header = new Header();
+
+ header.Dummy0 = data.ReadUInt32();
+ if (header.Dummy0 != 0x00000001)
+ return null;
+
+ header.MajorVersion = data.ReadUInt32();
+ if (header.MajorVersion != 0x00000001)
+ return null;
+
+ header.MinorVersion = data.ReadUInt32();
+ if (header.MinorVersion != 3 && header.MinorVersion != 5 && header.MinorVersion != 6)
+ return null;
+
+ header.CacheID = data.ReadUInt32();
+ header.LastVersionPlayed = data.ReadUInt32();
+ header.Dummy1 = data.ReadUInt32();
+ header.Dummy2 = data.ReadUInt32();
+ header.FileSize = data.ReadUInt32();
+ header.BlockSize = data.ReadUInt32();
+ header.BlockCount = data.ReadUInt32();
+ header.Dummy3 = data.ReadUInt32();
+
+ return header;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache block entry header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache block entry header on success, null on error
+ private static BlockEntryHeader ParseBlockEntryHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ BlockEntryHeader blockEntryHeader = new BlockEntryHeader();
+
+ blockEntryHeader.BlockCount = data.ReadUInt32();
+ blockEntryHeader.BlocksUsed = data.ReadUInt32();
+ blockEntryHeader.Dummy0 = data.ReadUInt32();
+ blockEntryHeader.Dummy1 = data.ReadUInt32();
+ blockEntryHeader.Dummy2 = data.ReadUInt32();
+ blockEntryHeader.Dummy3 = data.ReadUInt32();
+ blockEntryHeader.Dummy4 = data.ReadUInt32();
+ blockEntryHeader.Checksum = data.ReadUInt32();
+
+ return blockEntryHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache block entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache block entry on success, null on error
+ private static BlockEntry ParseBlockEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ BlockEntry blockEntry = new BlockEntry();
+
+ blockEntry.EntryFlags = data.ReadUInt32();
+ blockEntry.FileDataOffset = data.ReadUInt32();
+ blockEntry.FileDataSize = data.ReadUInt32();
+ blockEntry.FirstDataBlockIndex = data.ReadUInt32();
+ blockEntry.NextBlockEntryIndex = data.ReadUInt32();
+ blockEntry.PreviousBlockEntryIndex = data.ReadUInt32();
+ blockEntry.DirectoryIndex = data.ReadUInt32();
+
+ return blockEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache fragmentation map header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache fragmentation map header on success, null on error
+ private static FragmentationMapHeader ParseFragmentationMapHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ FragmentationMapHeader fragmentationMapHeader = new FragmentationMapHeader();
+
+ fragmentationMapHeader.BlockCount = data.ReadUInt32();
+ fragmentationMapHeader.FirstUnusedEntry = data.ReadUInt32();
+ fragmentationMapHeader.Terminator = data.ReadUInt32();
+ fragmentationMapHeader.Checksum = data.ReadUInt32();
+
+ return fragmentationMapHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache fragmentation map
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache fragmentation map on success, null on error
+ private static FragmentationMap ParseFragmentationMap(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ FragmentationMap fragmentationMap = new FragmentationMap();
+
+ fragmentationMap.NextDataBlockIndex = data.ReadUInt32();
+
+ return fragmentationMap;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache block entry map header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache block entry map header on success, null on error
+ private static BlockEntryMapHeader ParseBlockEntryMapHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ BlockEntryMapHeader blockEntryMapHeader = new BlockEntryMapHeader();
+
+ blockEntryMapHeader.BlockCount = data.ReadUInt32();
+ blockEntryMapHeader.FirstBlockEntryIndex = data.ReadUInt32();
+ blockEntryMapHeader.LastBlockEntryIndex = data.ReadUInt32();
+ blockEntryMapHeader.Dummy0 = data.ReadUInt32();
+ blockEntryMapHeader.Checksum = data.ReadUInt32();
+
+ return blockEntryMapHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache block entry map
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache block entry map on success, null on error
+ private static BlockEntryMap ParseBlockEntryMap(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ BlockEntryMap blockEntryMap = new BlockEntryMap();
+
+ blockEntryMap.PreviousBlockEntryIndex = data.ReadUInt32();
+ blockEntryMap.NextBlockEntryIndex = data.ReadUInt32();
+
+ return blockEntryMap;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache directory header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache directory header on success, null on error
+ private static DirectoryHeader ParseDirectoryHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryHeader directoryHeader = new DirectoryHeader();
+
+ directoryHeader.Dummy0 = data.ReadUInt32();
+ directoryHeader.CacheID = data.ReadUInt32();
+ directoryHeader.LastVersionPlayed = data.ReadUInt32();
+ directoryHeader.ItemCount = data.ReadUInt32();
+ directoryHeader.FileCount = data.ReadUInt32();
+ directoryHeader.Dummy1 = data.ReadUInt32();
+ directoryHeader.DirectorySize = data.ReadUInt32();
+ directoryHeader.NameSize = data.ReadUInt32();
+ directoryHeader.Info1Count = data.ReadUInt32();
+ directoryHeader.CopyCount = data.ReadUInt32();
+ directoryHeader.LocalCount = data.ReadUInt32();
+ directoryHeader.Dummy2 = data.ReadUInt32();
+ directoryHeader.Dummy3 = data.ReadUInt32();
+ directoryHeader.Checksum = data.ReadUInt32();
+
+ return directoryHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache directory entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache directory entry on success, null on error
+ private static DirectoryEntry ParseDirectoryEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryEntry directoryEntry = new DirectoryEntry();
+
+ directoryEntry.NameOffset = data.ReadUInt32();
+ directoryEntry.ItemSize = data.ReadUInt32();
+ directoryEntry.ChecksumIndex = data.ReadUInt32();
+ directoryEntry.DirectoryFlags = data.ReadUInt32();
+ directoryEntry.ParentIndex = data.ReadUInt32();
+ directoryEntry.NextIndex = data.ReadUInt32();
+ directoryEntry.FirstIndex = data.ReadUInt32();
+
+ return directoryEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache directory info 1 entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache directory info 1 entry on success, null on error
+ private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
+
+ directoryInfo1Entry.Dummy0 = data.ReadUInt32();
+
+ return directoryInfo1Entry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache directory info 2 entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache directory info 2 entry on success, null on error
+ private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
+
+ directoryInfo2Entry.Dummy0 = data.ReadUInt32();
+
+ return directoryInfo2Entry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache directory copy entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache directory copy entry on success, null on error
+ private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
+
+ directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
+
+ return directoryCopyEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache directory local entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache directory local entry on success, null on error
+ private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
+
+ directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
+
+ return directoryLocalEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache directory map header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache directory map header on success, null on error
+ private static DirectoryMapHeader ParseDirectoryMapHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryMapHeader directoryMapHeader = new DirectoryMapHeader();
+
+ directoryMapHeader.Dummy0 = data.ReadUInt32();
+ if (directoryMapHeader.Dummy0 != 0x00000001)
+ return null;
+
+ directoryMapHeader.Dummy1 = data.ReadUInt32();
+ if (directoryMapHeader.Dummy0 != 0x00000000)
+ return null;
+
+ return directoryMapHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache directory map entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache directory map entry on success, null on error
+ private static DirectoryMapEntry ParseDirectoryMapEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryMapEntry directoryMapEntry = new DirectoryMapEntry();
+
+ directoryMapEntry.FirstBlockIndex = data.ReadUInt32();
+
+ return directoryMapEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache checksum header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache checksum header on success, null on error
+ private static ChecksumHeader ParseChecksumHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ ChecksumHeader checksumHeader = new ChecksumHeader();
+
+ checksumHeader.Dummy0 = data.ReadUInt32();
+ checksumHeader.ChecksumSize = data.ReadUInt32();
+
+ return checksumHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache checksum map header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache checksum map header on success, null on error
+ private static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
+
+ checksumMapHeader.Dummy0 = data.ReadUInt32();
+ if (checksumMapHeader.Dummy0 != 0x14893721)
+ return null;
+
+ checksumMapHeader.Dummy1 = data.ReadUInt32();
+ if (checksumMapHeader.Dummy0 != 0x00000001)
+ return null;
+
+ checksumMapHeader.ItemCount = data.ReadUInt32();
+ checksumMapHeader.ChecksumCount = data.ReadUInt32();
+
+ return checksumMapHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache checksum map entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache checksum map entry on success, null on error
+ private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
+
+ checksumMapEntry.ChecksumCount = data.ReadUInt32();
+ checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
+
+ return checksumMapEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache checksum entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Game Cache checksum entry on success, null on error
+ private static ChecksumEntry ParseChecksumEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ ChecksumEntry checksumEntry = new ChecksumEntry();
+
+ checksumEntry.Checksum = data.ReadUInt32();
+
+ return checksumEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Game Cache data block header
+ ///
+ /// Stream to parse
+ /// Minor version field from the header
+ /// Filled Half-Life Game Cache data block header on success, null on error
+ private static DataBlockHeader ParseDataBlockHeader(Stream data, uint minorVersion)
+ {
+ // TODO: Use marshalling here instead of building
+ DataBlockHeader dataBlockHeader = new DataBlockHeader();
+
+ // In version 3 the DataBlockHeader is missing the LastVersionPlayed field.
+ if (minorVersion >= 5)
+ dataBlockHeader.LastVersionPlayed = data.ReadUInt32();
+
+ dataBlockHeader.BlockCount = data.ReadUInt32();
+ dataBlockHeader.BlockSize = data.ReadUInt32();
+ dataBlockHeader.FirstBlockOffset = data.ReadUInt32();
+ dataBlockHeader.BlocksUsed = data.ReadUInt32();
+ dataBlockHeader.Checksum = data.ReadUInt32();
+
+ return dataBlockHeader;
+ }
+
+ #endregion
+ }
+}
diff --git a/BurnOutSharp.Builders/NCF.cs b/BurnOutSharp.Builders/NCF.cs
new file mode 100644
index 00000000..2f00ccc3
--- /dev/null
+++ b/BurnOutSharp.Builders/NCF.cs
@@ -0,0 +1,524 @@
+using System.IO;
+using System.Text;
+using BurnOutSharp.Models.NCF;
+using BurnOutSharp.Utilities;
+
+namespace BurnOutSharp.Builders
+{
+ public static class NCF
+ {
+ #region Constants
+
+ ///
+ /// The item is a file.
+ ///
+ public const int HL_NCF_FLAG_FILE = 0x00004000;
+
+ ///
+ /// The item is encrypted.
+ ///
+ public const int HL_NCF_FLAG_ENCRYPTED = 0x00000100;
+
+ ///
+ /// Backup the item before overwriting it.
+ ///
+ public const int HL_NCF_FLAG_BACKUP_LOCAL = 0x00000040;
+
+ ///
+ /// The item is to be copied to the disk.
+ ///
+ public const int HL_NCF_FLAG_COPY_LOCAL = 0x0000000a;
+
+ ///
+ /// Don't overwrite the item if copying it to the disk and the item already exis
+ ///
+ public const int HL_NCF_FLAG_COPY_LOCAL_NO_OVERWRITE = 0x00000001;
+
+ #endregion
+
+ #region Byte Data
+
+ ///
+ /// Parse a byte array into a Half-Life No Cache
+ ///
+ /// Byte array to parse
+ /// Offset into the byte array
+ /// Filled Half-Life No Cache on success, null on error
+ public static Models.NCF.File ParseFile(byte[] data, int offset)
+ {
+ // If the data is invalid
+ if (data == null)
+ return null;
+
+ // If the offset is out of bounds
+ if (offset < 0 || offset >= data.Length)
+ return null;
+
+ // Create a memory stream and parse that
+ MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
+ return ParseFile(dataStream);
+ }
+
+ #endregion
+
+ #region Stream Data
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache on success, null on error
+ public static Models.NCF.File ParseFile(Stream data)
+ {
+ // If the data is invalid
+ if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
+ return null;
+
+ // If the offset is out of bounds
+ if (data.Position < 0 || data.Position >= data.Length)
+ return null;
+
+ // Cache the current offset
+ long initialOffset = data.Position;
+
+ // Create a new Half-Life No Cache to fill
+ var file = new Models.NCF.File();
+
+ #region Header
+
+ // Try to parse the header
+ var header = ParseHeader(data);
+ if (header == null)
+ return null;
+
+ // Set the no cache header
+ file.Header = header;
+
+ #endregion
+
+ // Cache the current offset
+ initialOffset = data.Position;
+
+ #region Directory Header
+
+ // Try to parse the directory header
+ var directoryHeader = ParseDirectoryHeader(data);
+ if (directoryHeader == null)
+ return null;
+
+ // Set the game cache directory header
+ file.DirectoryHeader = directoryHeader;
+
+ #endregion
+
+ #region Directory Entries
+
+ // Create the directory entry array
+ file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount];
+
+ // Try to parse the directory entries
+ for (int i = 0; i < directoryHeader.ItemCount; i++)
+ {
+ var directoryEntry = ParseDirectoryEntry(data);
+ file.DirectoryEntries[i] = directoryEntry;
+ }
+
+ #endregion
+
+ #region Directory Names
+
+ // Read the directory names as a single string
+ byte[] directoryNames = data.ReadBytes((int)directoryHeader.NameSize);
+ file.DirectoryNames = Encoding.ASCII.GetString(directoryNames);
+
+ #endregion
+
+ #region Directory Info 1 Entries
+
+ // Create the directory info 1 entry array
+ file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count];
+
+ // Try to parse the directory info 1 entries
+ for (int i = 0; i < directoryHeader.Info1Count; i++)
+ {
+ var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
+ file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
+ }
+
+ #endregion
+
+ #region Directory Info 2 Entries
+
+ // Create the directory info 2 entry array
+ file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount];
+
+ // Try to parse the directory info 2 entries
+ for (int i = 0; i < directoryHeader.ItemCount; i++)
+ {
+ var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
+ file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
+ }
+
+ #endregion
+
+ #region Directory Copy Entries
+
+ // Create the directory copy entry array
+ file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount];
+
+ // Try to parse the directory copy entries
+ for (int i = 0; i < directoryHeader.CopyCount; i++)
+ {
+ var directoryCopyEntry = ParseDirectoryCopyEntry(data);
+ file.DirectoryCopyEntries[i] = directoryCopyEntry;
+ }
+
+ #endregion
+
+ #region Directory Local Entries
+
+ // Create the directory local entry array
+ file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount];
+
+ // Try to parse the directory local entries
+ for (int i = 0; i < directoryHeader.LocalCount; i++)
+ {
+ var directoryLocalEntry = ParseDirectoryLocalEntry(data);
+ file.DirectoryLocalEntries[i] = directoryLocalEntry;
+ }
+
+ #endregion
+
+ // Seek to end of directory section, just in case
+ data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin);
+
+ #region Unknown Header
+
+ // Try to parse the unknown header
+ var unknownHeader = ParseUnknownHeader(data);
+ if (unknownHeader == null)
+ return null;
+
+ // Set the game cache unknown header
+ file.UnknownHeader = unknownHeader;
+
+ #endregion
+
+ #region Directory Map Entries
+
+ // Create the unknown entry array
+ file.UnknownEntries = new UnknownEntry[directoryHeader.ItemCount];
+
+ // Try to parse the unknown entries
+ for (int i = 0; i < directoryHeader.ItemCount; i++)
+ {
+ var unknownEntry = ParseUnknownEntry(data);
+ file.UnknownEntries[i] = unknownEntry;
+ }
+
+ #endregion
+
+ #region Checksum Header
+
+ // Try to parse the checksum header
+ var checksumHeader = ParseChecksumHeader(data);
+ if (checksumHeader == null)
+ return null;
+
+ // Set the game cache checksum header
+ file.ChecksumHeader = checksumHeader;
+
+ #endregion
+
+ // Cache the current offset
+ initialOffset = data.Position;
+
+ #region Checksum Map Header
+
+ // Try to parse the checksum map header
+ var checksumMapHeader = ParseChecksumMapHeader(data);
+ if (checksumMapHeader == null)
+ return null;
+
+ // Set the game cache checksum map header
+ file.ChecksumMapHeader = checksumMapHeader;
+
+ #endregion
+
+ #region Checksum Map Entries
+
+ // Create the checksum map entry array
+ file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount];
+
+ // Try to parse the checksum map entries
+ for (int i = 0; i < checksumMapHeader.ItemCount; i++)
+ {
+ var checksumMapEntry = ParseChecksumMapEntry(data);
+ file.ChecksumMapEntries[i] = checksumMapEntry;
+ }
+
+ #endregion
+
+ #region Checksum Entries
+
+ // Create the checksum entry array
+ file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount];
+
+ // Try to parse the checksum entries
+ for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
+ {
+ var checksumEntry = ParseChecksumEntry(data);
+ file.ChecksumEntries[i] = checksumEntry;
+ }
+
+ #endregion
+
+ // Seek to end of checksum section, just in case
+ data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin);
+
+ return file;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache header on success, null on error
+ private static Header ParseHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ Header header = new Header();
+
+ header.Dummy0 = data.ReadUInt32();
+ header.MajorVersion = data.ReadUInt32();
+ if (header.MajorVersion != 2)
+ return null;
+
+ header.MinorVersion = data.ReadUInt32();
+ if (header.MinorVersion != 1)
+ return null;
+
+ header.CacheID = data.ReadUInt32();
+ header.LastVersionPlayed = data.ReadUInt32();
+ header.Dummy3 = data.ReadUInt32();
+ header.Dummy4 = data.ReadUInt32();
+ header.FileSize = data.ReadUInt32();
+ header.BlockSize = data.ReadUInt32();
+ header.BlockCount = data.ReadUInt32();
+ header.Dummy5 = data.ReadUInt32();
+
+ return header;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache directory header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache directory header on success, null on error
+ private static DirectoryHeader ParseDirectoryHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryHeader directoryHeader = new DirectoryHeader();
+
+ directoryHeader.Dummy0 = data.ReadUInt32();
+ directoryHeader.CacheID = data.ReadUInt32();
+ directoryHeader.LastVersionPlayed = data.ReadUInt32();
+ directoryHeader.ItemCount = data.ReadUInt32();
+ directoryHeader.FileCount = data.ReadUInt32();
+ directoryHeader.Dummy1 = data.ReadUInt32();
+ directoryHeader.DirectorySize = data.ReadUInt32();
+ directoryHeader.NameSize = data.ReadUInt32();
+ directoryHeader.Info1Count = data.ReadUInt32();
+ directoryHeader.CopyCount = data.ReadUInt32();
+ directoryHeader.LocalCount = data.ReadUInt32();
+ directoryHeader.Dummy2 = data.ReadUInt32();
+ directoryHeader.Checksum = data.ReadUInt32();
+
+ return directoryHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache directory entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache directory entry on success, null on error
+ private static DirectoryEntry ParseDirectoryEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryEntry directoryEntry = new DirectoryEntry();
+
+ directoryEntry.NameOffset = data.ReadUInt32();
+ directoryEntry.ItemSize = data.ReadUInt32();
+ directoryEntry.ChecksumIndex = data.ReadUInt32();
+ directoryEntry.DirectoryFlags = data.ReadUInt32();
+ directoryEntry.ParentIndex = data.ReadUInt32();
+ directoryEntry.NextIndex = data.ReadUInt32();
+ directoryEntry.FirstIndex = data.ReadUInt32();
+
+ return directoryEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache directory info 1 entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache directory info 1 entry on success, null on error
+ private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
+
+ directoryInfo1Entry.Dummy0 = data.ReadUInt32();
+
+ return directoryInfo1Entry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache directory info 2 entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache directory info 2 entry on success, null on error
+ private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
+
+ directoryInfo2Entry.Dummy0 = data.ReadUInt32();
+
+ return directoryInfo2Entry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache directory copy entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache directory copy entry on success, null on error
+ private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
+
+ directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
+
+ return directoryCopyEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache directory local entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache directory local entry on success, null on error
+ private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
+
+ directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
+
+ return directoryLocalEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache unknown header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache unknown header on success, null on error
+ private static UnknownHeader ParseUnknownHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ UnknownHeader unknownHeader = new UnknownHeader();
+
+ unknownHeader.Dummy0 = data.ReadUInt32();
+ if (unknownHeader.Dummy0 != 0x00000001)
+ return null;
+
+ unknownHeader.Dummy1 = data.ReadUInt32();
+ if (unknownHeader.Dummy0 != 0x00000000)
+ return null;
+
+ return unknownHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache unknown entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cacheunknown entry on success, null on error
+ private static UnknownEntry ParseUnknownEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ UnknownEntry unknownEntry = new UnknownEntry();
+
+ unknownEntry.Dummy0 = data.ReadUInt32();
+
+ return unknownEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache checksum header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache checksum header on success, null on error
+ private static ChecksumHeader ParseChecksumHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ ChecksumHeader checksumHeader = new ChecksumHeader();
+
+ checksumHeader.Dummy0 = data.ReadUInt32();
+ checksumHeader.ChecksumSize = data.ReadUInt32();
+
+ return checksumHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache checksum map header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache checksum map header on success, null on error
+ private static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
+
+ checksumMapHeader.Dummy0 = data.ReadUInt32();
+ checksumMapHeader.Dummy1 = data.ReadUInt32();
+ checksumMapHeader.ItemCount = data.ReadUInt32();
+ checksumMapHeader.ChecksumCount = data.ReadUInt32();
+
+ return checksumMapHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache checksum map entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache checksum map entry on success, null on error
+ private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
+
+ checksumMapEntry.ChecksumCount = data.ReadUInt32();
+ checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
+
+ return checksumMapEntry;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life No Cache checksum entry
+ ///
+ /// Stream to parse
+ /// Filled Half-Life No Cache checksum entry on success, null on error
+ private static ChecksumEntry ParseChecksumEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ ChecksumEntry checksumEntry = new ChecksumEntry();
+
+ checksumEntry.Checksum = data.ReadUInt32();
+
+ return checksumEntry;
+ }
+
+ #endregion
+ }
+}
diff --git a/BurnOutSharp.Builders/PAK.cs b/BurnOutSharp.Builders/PAK.cs
new file mode 100644
index 00000000..0d86799e
--- /dev/null
+++ b/BurnOutSharp.Builders/PAK.cs
@@ -0,0 +1,133 @@
+using System.IO;
+using System.Text;
+using BurnOutSharp.Models.PAK;
+using BurnOutSharp.Utilities;
+
+namespace BurnOutSharp.Builders
+{
+ public static class PAK
+ {
+ #region Byte Data
+
+ ///
+ /// Parse a byte array into a Half-Life Package
+ ///
+ /// Byte array to parse
+ /// Offset into the byte array
+ /// Filled Half-Life Package on success, null on error
+ public static Models.PAK.File ParseFile(byte[] data, int offset)
+ {
+ // If the data is invalid
+ if (data == null)
+ return null;
+
+ // If the offset is out of bounds
+ if (offset < 0 || offset >= data.Length)
+ return null;
+
+ // Create a memory stream and parse that
+ MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
+ return ParseFile(dataStream);
+ }
+
+ #endregion
+
+ #region Stream Data
+
+ ///
+ /// Parse a Stream into a Half-Life Package
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Package on success, null on error
+ public static Models.PAK.File ParseFile(Stream data)
+ {
+ // If the data is invalid
+ if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
+ return null;
+
+ // If the offset is out of bounds
+ if (data.Position < 0 || data.Position >= data.Length)
+ return null;
+
+ // Cache the current offset
+ long initialOffset = data.Position;
+
+ // Create a new Half-Life Package to fill
+ var file = new Models.PAK.File();
+
+ #region Header
+
+ // Try to parse the header
+ var header = ParseHeader(data);
+ if (header == null)
+ return null;
+
+ // Set the package header
+ file.Header = header;
+
+ #endregion
+
+ #region Directory Items
+
+ // Get the directory items offset
+ uint directoryItemsOffset = header.DirectoryOffset;
+ if (directoryItemsOffset < 0 || directoryItemsOffset >= data.Length)
+ return null;
+
+ // Seek to the directory items
+ data.Seek(directoryItemsOffset, SeekOrigin.Begin);
+
+ // Create the directory item array
+ file.DirectoryItems = new DirectoryItem[header.DirectoryLength / 64];
+
+ // Try to parse the directory items
+ for (int i = 0; i < file.DirectoryItems.Length; i++)
+ {
+ var directoryItem = ParseDirectoryItem(data);
+ file.DirectoryItems[i] = directoryItem;
+ }
+
+ #endregion
+
+ return file;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Package header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Package header on success, null on error
+ private static Header ParseHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ Header header = new Header();
+
+ byte[] signature = data.ReadBytes(4);
+ header.Signature = Encoding.ASCII.GetString(signature);
+ header.DirectoryOffset = data.ReadUInt32();
+ header.DirectoryLength = data.ReadUInt32();
+
+ return header;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Package directory item
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Package directory item on success, null on error
+ private static DirectoryItem ParseDirectoryItem(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryItem directoryItem = new DirectoryItem();
+
+ byte[] itemName = data.ReadBytes(56);
+ directoryItem.ItemName = Encoding.ASCII.GetString(itemName);
+ directoryItem.ItemOffset = data.ReadUInt32();
+ directoryItem.ItemLength = data.ReadUInt32();
+
+ return directoryItem;
+ }
+
+ #endregion
+ }
+}
diff --git a/BurnOutSharp.Builders/SGA.cs b/BurnOutSharp.Builders/SGA.cs
new file mode 100644
index 00000000..476e415c
--- /dev/null
+++ b/BurnOutSharp.Builders/SGA.cs
@@ -0,0 +1,688 @@
+using System.Collections.Generic;
+using System.IO;
+using System.Text;
+using BurnOutSharp.Models.SGA;
+using BurnOutSharp.Utilities;
+
+namespace BurnOutSharp.Builders
+{
+ public static class SGA
+ {
+ #region Constants
+
+ ///
+ /// Length of a SGA checksum in bytes
+ ///
+ public const int HL_SGA_CHECKSUM_LENGTH = 0x00008000;
+
+ #endregion
+
+ #region Byte Data
+
+ ///
+ /// Parse a byte array into an SGA
+ ///
+ /// Byte array to parse
+ /// Offset into the byte array
+ /// Filled SGA on success, null on error
+ public static Models.SGA.File ParseFile(byte[] data, int offset)
+ {
+ // If the data is invalid
+ if (data == null)
+ return null;
+
+ // If the offset is out of bounds
+ if (offset < 0 || offset >= data.Length)
+ return null;
+
+ // Create a memory stream and parse that
+ MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
+ return ParseFile(dataStream);
+ }
+
+ #endregion
+
+ #region Stream Data
+
+ ///
+ /// Parse a Stream into an SGA
+ ///
+ /// Stream to parse
+ /// Filled SGA on success, null on error
+ public static Models.SGA.File ParseFile(Stream data)
+ {
+ // If the data is invalid
+ if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
+ return null;
+
+ // If the offset is out of bounds
+ if (data.Position < 0 || data.Position >= data.Length)
+ return null;
+
+ // Cache the current offset
+ long initialOffset = data.Position;
+
+ // Create a new SGA to fill
+ var file = new Models.SGA.File();
+
+ #region Header
+
+ // Try to parse the header
+ var header = ParseHeader(data);
+ if (header == null)
+ return null;
+
+ // Set the SGA header
+ file.Header = header;
+
+ #endregion
+
+ #region Directory
+
+ // Try to parse the directory
+ var directory = ParseDirectory(data, header.MajorVersion);
+ if (directory == null)
+ return null;
+
+ // Set the SGA directory
+ file.Directory = directory;
+ #endregion
+
+ return file;
+ }
+
+ ///
+ /// Parse a Stream into an SGA header
+ ///
+ /// Stream to parse
+ /// Filled SGA header on success, null on error
+ private static Header ParseHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ byte[] signatureBytes = data.ReadBytes(8);
+ string signature = Encoding.ASCII.GetString(signatureBytes);
+ if (signature != "_ARCHIVE")
+ return null;
+
+ ushort majorVersion = data.ReadUInt16();
+ ushort minorVersion = data.ReadUInt16();
+ if (minorVersion != 0)
+ return null;
+
+ switch (majorVersion)
+ {
+ // Versions 4 and 5 share the same header
+ case 4:
+ case 5:
+ Header4 header4 = new Header4();
+
+ header4.Signature = signature;
+ header4.MajorVersion = majorVersion;
+ header4.MinorVersion = minorVersion;
+ header4.FileMD5 = data.ReadBytes(0x10);
+ byte[] header4Name = data.ReadBytes(64);
+ header4.Name = Encoding.ASCII.GetString(header4Name);
+ header4.HeaderMD5 = data.ReadBytes(0x10);
+ header4.HeaderLength = data.ReadUInt32();
+ header4.FileDataOffset = data.ReadUInt32();
+ header4.Dummy0 = data.ReadUInt32();
+
+ return header4;
+
+ // Versions 6 and 7 share the same header
+ case 6:
+ case 7:
+ Header6 header6 = new Header6();
+
+ header6.Signature = signature;
+ header6.MajorVersion = majorVersion;
+ header6.MinorVersion = minorVersion;
+ byte[] header6Name = data.ReadBytes(64);
+ header6.Name = Encoding.ASCII.GetString(header6Name);
+ header6.HeaderLength = data.ReadUInt32();
+ header6.FileDataOffset = data.ReadUInt32();
+ header6.Dummy0 = data.ReadUInt32();
+
+ return header6;
+
+ // No other major versions are recognized
+ default:
+ return null;
+ }
+ }
+
+ ///
+ /// Parse a Stream into an SGA directory
+ ///
+ /// Stream to parse
+ /// SGA major version
+ /// Filled SGA directory on success, null on error
+ private static Models.SGA.Directory ParseDirectory(Stream data, ushort majorVersion)
+ {
+ #region Directory
+
+ // Create the appropriate type of directory
+ Models.SGA.Directory directory;
+ switch (majorVersion)
+ {
+ case 4: directory = new Directory4(); break;
+ case 5: directory = new Directory5(); break;
+ case 6: directory = new Directory6(); break;
+ case 7: directory = new Directory7(); break;
+ default: return null;
+ }
+
+ #endregion
+
+ #region Directory Header
+
+ // Try to parse the directory header
+ var directoryHeader = ParseDirectoryHeader(data, majorVersion);
+ if (directoryHeader == null)
+ return null;
+
+ // Set the directory header
+ switch (majorVersion)
+ {
+ case 4: (directory as Directory4).DirectoryHeader = directoryHeader as DirectoryHeader4; break;
+ case 5: (directory as Directory5).DirectoryHeader = directoryHeader as DirectoryHeader5; break;
+ case 6: (directory as Directory6).DirectoryHeader = directoryHeader as DirectoryHeader5; break;
+ case 7: (directory as Directory7).DirectoryHeader = directoryHeader as DirectoryHeader7; break;
+ default: return null;
+ }
+
+ #endregion
+
+ #region Sections
+
+ // Get the sections offset
+ uint sectionOffset;
+ switch (majorVersion)
+ {
+ case 4: sectionOffset = (directoryHeader as DirectoryHeader4).SectionOffset; break;
+ case 5: sectionOffset = (directoryHeader as DirectoryHeader5).SectionOffset; break;
+ case 6: sectionOffset = (directoryHeader as DirectoryHeader5).SectionOffset; break;
+ case 7: sectionOffset = (directoryHeader as DirectoryHeader7).SectionOffset; break;
+ default: return null;
+ }
+
+ // Validate the offset
+ if (sectionOffset < 0 || sectionOffset >= data.Length)
+ return null;
+
+ // Seek to the sections
+ data.Seek(sectionOffset, SeekOrigin.Begin);
+
+ // Get the section count
+ uint sectionCount;
+ switch (majorVersion)
+ {
+ case 4: sectionCount = (directoryHeader as DirectoryHeader4).SectionCount; break;
+ case 5: sectionCount = (directoryHeader as DirectoryHeader5).SectionCount; break;
+ case 6: sectionCount = (directoryHeader as DirectoryHeader5).SectionCount; break;
+ case 7: sectionCount = (directoryHeader as DirectoryHeader7).SectionCount; break;
+ default: return null;
+ }
+
+ // Create the sections array
+ object[] sections;
+ switch (majorVersion)
+ {
+ case 4: sections = new Section4[sectionCount]; break;
+ case 5: sections = new Section5[sectionCount]; break;
+ case 6: sections = new Section5[sectionCount]; break;
+ case 7: sections = new Section5[sectionCount]; break;
+ default: return null;
+ }
+
+ // Try to parse the sections
+ for (int i = 0; i < sections.Length; i++)
+ {
+ switch (majorVersion)
+ {
+ case 4: sections[i] = ParseSection4(data); break;
+ case 5: sections[i] = ParseSection5(data); break;
+ case 6: sections[i] = ParseSection5(data); break;
+ case 7: sections[i] = ParseSection5(data); break;
+ default: return null;
+ }
+ }
+
+ // Assign the sections
+ switch (majorVersion)
+ {
+ case 4: (directory as Directory4).Sections = sections as Section4[]; break;
+ case 5: (directory as Directory5).Sections = sections as Section5[]; break;
+ case 6: (directory as Directory6).Sections = sections as Section5[]; break;
+ case 7: (directory as Directory7).Sections = sections as Section5[]; break;
+ default: return null;
+ }
+
+ #endregion
+
+ #region Folders
+
+ // Get the folders offset
+ uint folderOffset;
+ switch (majorVersion)
+ {
+ case 4: folderOffset = (directoryHeader as DirectoryHeader4).FolderOffset; break;
+ case 5: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break;
+ case 6: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break;
+ case 7: folderOffset = (directoryHeader as DirectoryHeader7).FolderOffset; break;
+ default: return null;
+ }
+
+ // Validate the offset
+ if (folderOffset < 0 || folderOffset >= data.Length)
+ return null;
+
+ // Seek to the folders
+ data.Seek(folderOffset, SeekOrigin.Begin);
+
+ // Get the folder count
+ uint folderCount;
+ switch (majorVersion)
+ {
+ case 4: folderCount = (directoryHeader as DirectoryHeader4).FolderCount; break;
+ case 5: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break;
+ case 6: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break;
+ case 7: folderCount = (directoryHeader as DirectoryHeader7).FolderCount; break;
+ default: return null;
+ }
+
+ // Create the folders array
+ object[] folders;
+ switch (majorVersion)
+ {
+ case 4: folders = new Folder4[folderCount]; break;
+ case 5: folders = new Folder5[folderCount]; break;
+ case 6: folders = new Folder5[folderCount]; break;
+ case 7: folders = new Folder5[folderCount]; break;
+ default: return null;
+ }
+
+ // Try to parse the folders
+ for (int i = 0; i < folders.Length; i++)
+ {
+ switch (majorVersion)
+ {
+ case 4: folders[i] = ParseFolder4(data); break;
+ case 5: folders[i] = ParseFolder5(data); break;
+ case 6: folders[i] = ParseFolder5(data); break;
+ case 7: folders[i] = ParseFolder5(data); break;
+ default: return null;
+ }
+ }
+
+ // Assign the folders
+ switch (majorVersion)
+ {
+ case 4: (directory as Directory4).Folders = folders as Folder4[]; break;
+ case 5: (directory as Directory5).Folders = folders as Folder5[]; break;
+ case 6: (directory as Directory6).Folders = folders as Folder5[]; break;
+ case 7: (directory as Directory7).Folders = folders as Folder5[]; break;
+ default: return null;
+ }
+
+ #endregion
+
+ #region Files
+
+ // Get the files offset
+ uint fileOffset;
+ switch (majorVersion)
+ {
+ case 4: fileOffset = (directoryHeader as DirectoryHeader4).FileOffset; break;
+ case 5: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break;
+ case 6: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break;
+ case 7: fileOffset = (directoryHeader as DirectoryHeader7).FileOffset; break;
+ default: return null;
+ }
+
+ // Validate the offset
+ if (fileOffset < 0 || fileOffset >= data.Length)
+ return null;
+
+ // Seek to the files
+ data.Seek(fileOffset, SeekOrigin.Begin);
+
+ // Get the file count
+ uint fileCount;
+ switch (majorVersion)
+ {
+ case 4: fileCount = (directoryHeader as DirectoryHeader4).FileCount; break;
+ case 5: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break;
+ case 6: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break;
+ case 7: fileCount = (directoryHeader as DirectoryHeader7).FileCount; break;
+ default: return null;
+ }
+
+ // Create the files array
+ object[] files;
+ switch (majorVersion)
+ {
+ case 4: files = new File4[fileCount]; break;
+ case 5: files = new File4[fileCount]; break;
+ case 6: files = new File6[fileCount]; break;
+ case 7: files = new File7[fileCount]; break;
+ default: return null;
+ }
+
+ // Try to parse the files
+ for (int i = 0; i < files.Length; i++)
+ {
+ switch (majorVersion)
+ {
+ case 4: files[i] = ParseFile4(data); break;
+ case 5: files[i] = ParseFile4(data); break;
+ case 6: files[i] = ParseFile6(data); break;
+ case 7: files[i] = ParseFile7(data); break;
+ default: return null;
+ }
+ }
+
+ // Assign the files
+ switch (majorVersion)
+ {
+ case 4: (directory as Directory4).Files = files as File4[]; break;
+ case 5: (directory as Directory5).Files = files as File4[]; break;
+ case 6: (directory as Directory6).Files = files as File6[]; break;
+ case 7: (directory as Directory7).Files = files as File7[]; break;
+ default: return null;
+ }
+
+ #endregion
+
+ #region String Table
+
+ // Get the string table offset
+ uint stringTableOffset;
+ switch (majorVersion)
+ {
+ case 4: stringTableOffset = (directoryHeader as DirectoryHeader4).StringTableOffset; break;
+ case 5: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break;
+ case 6: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break;
+ case 7: stringTableOffset = (directoryHeader as DirectoryHeader7).StringTableOffset; break;
+ default: return null;
+ }
+
+ // Validate the offset
+ if (stringTableOffset < 0 || stringTableOffset >= data.Length)
+ return null;
+
+ // Seek to the string table
+ data.Seek(stringTableOffset, SeekOrigin.Begin);
+
+ // Get the string table count
+ uint stringCount;
+ switch (majorVersion)
+ {
+ case 4: stringCount = (directoryHeader as DirectoryHeader4).StringTableCount; break;
+ case 5: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break;
+ case 6: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break;
+ case 7: stringCount = (directoryHeader as DirectoryHeader7).StringTableCount; break;
+ default: return null;
+ }
+
+ // Create the strings array
+ Dictionary strings = new Dictionary((int)stringCount);
+
+ // Try to parse the strings
+ for (int i = 0; i < stringCount; i++)
+ {
+ long currentPosition = data.Position;
+ strings[currentPosition] = data.ReadString(Encoding.ASCII);
+ }
+
+ // Assign the files
+ switch (majorVersion)
+ {
+ case 4: (directory as Directory4).StringTable = strings; break;
+ case 5: (directory as Directory5).StringTable = strings; break;
+ case 6: (directory as Directory6).StringTable = strings; break;
+ case 7: (directory as Directory7).StringTable = strings; break;
+ default: return null;
+ }
+
+ #endregion
+
+ return directory;
+ }
+
+ ///
+ /// Parse a Stream into an SGA directory header
+ ///
+ /// Stream to parse
+ /// SGA major version
+ /// Filled SGA directory header on success, null on error
+ private static object ParseDirectoryHeader(Stream data, ushort majorVersion)
+ {
+ switch (majorVersion)
+ {
+ case 4: return ParseDirectory4Header(data);
+ case 5: return ParseDirectory5Header(data);
+ case 6: return ParseDirectory5Header(data);
+ case 7: return ParseDirectory7Header(data);
+ default: return null;
+ }
+ }
+
+ ///
+ /// Parse a Stream into an SGA directory header version 4
+ ///
+ /// Stream to parse
+ /// Filled SGA directory header version 4 on success, null on error
+ private static DirectoryHeader4 ParseDirectory4Header(Stream data)
+ {
+ DirectoryHeader4 directoryHeader4 = new DirectoryHeader4();
+
+ directoryHeader4.SectionOffset = data.ReadUInt32();
+ directoryHeader4.SectionCount = data.ReadUInt16();
+ directoryHeader4.FolderOffset = data.ReadUInt32();
+ directoryHeader4.FolderCount = data.ReadUInt16();
+ directoryHeader4.FileOffset = data.ReadUInt32();
+ directoryHeader4.FileCount = data.ReadUInt16();
+ directoryHeader4.StringTableOffset = data.ReadUInt32();
+ directoryHeader4.StringTableCount = data.ReadUInt16();
+
+ return directoryHeader4;
+ }
+
+ ///
+ /// Parse a Stream into an SGA directory header version 5
+ ///
+ /// Stream to parse
+ /// Filled SGA directory header version 5 on success, null on error
+ private static DirectoryHeader5 ParseDirectory5Header(Stream data)
+ {
+ DirectoryHeader5 directoryHeader5 = new DirectoryHeader5();
+
+ directoryHeader5.SectionOffset = data.ReadUInt32();
+ directoryHeader5.SectionCount = data.ReadUInt32();
+ directoryHeader5.FolderOffset = data.ReadUInt32();
+ directoryHeader5.FolderCount = data.ReadUInt32();
+ directoryHeader5.FileOffset = data.ReadUInt32();
+ directoryHeader5.FileCount = data.ReadUInt32();
+ directoryHeader5.StringTableOffset = data.ReadUInt32();
+ directoryHeader5.StringTableCount = data.ReadUInt32();
+
+ return directoryHeader5;
+ }
+
+ ///
+ /// Parse a Stream into an SGA directory header version 7
+ ///
+ /// Stream to parse
+ /// Filled SGA directory header version 7 on success, null on error
+ private static DirectoryHeader7 ParseDirectory7Header(Stream data)
+ {
+ DirectoryHeader7 directoryHeader7 = new DirectoryHeader7();
+
+ directoryHeader7.SectionOffset = data.ReadUInt32();
+ directoryHeader7.SectionCount = data.ReadUInt32();
+ directoryHeader7.FolderOffset = data.ReadUInt32();
+ directoryHeader7.FolderCount = data.ReadUInt32();
+ directoryHeader7.FileOffset = data.ReadUInt32();
+ directoryHeader7.FileCount = data.ReadUInt32();
+ directoryHeader7.StringTableOffset = data.ReadUInt32();
+ directoryHeader7.StringTableCount = data.ReadUInt32();
+ directoryHeader7.HashTableOffset = data.ReadUInt32();
+ directoryHeader7.BlockSize = data.ReadUInt32();
+
+ return directoryHeader7;
+ }
+
+ ///
+ /// Parse a Stream into an SGA section version 4
+ ///
+ /// Stream to parse
+ /// SGA major version
+ /// Filled SGA section version 4 on success, null on error
+ private static Section4 ParseSection4(Stream data)
+ {
+ Section4 section4 = new Section4();
+
+ byte[] section4Alias = data.ReadBytes(64);
+ section4.Alias = Encoding.ASCII.GetString(section4Alias);
+ byte[] section4Name = data.ReadBytes(64);
+ section4.Name = Encoding.ASCII.GetString(section4Name);
+ section4.FolderStartIndex = data.ReadUInt16();
+ section4.FolderEndIndex = data.ReadUInt16();
+ section4.FileStartIndex = data.ReadUInt16();
+ section4.FileEndIndex = data.ReadUInt16();
+ section4.FolderRootIndex = data.ReadUInt16();
+
+ return section4;
+ }
+
+ ///
+ /// Parse a Stream into an SGA section version 5
+ ///
+ /// Stream to parse
+ /// SGA major version
+ /// Filled SGA section version 5 on success, null on error
+ private static Section5 ParseSection5(Stream data)
+ {
+ Section5 section5 = new Section5();
+
+ byte[] section5Alias = data.ReadBytes(64);
+ section5.Alias = Encoding.ASCII.GetString(section5Alias);
+ byte[] section5Name = data.ReadBytes(64);
+ section5.Name = Encoding.ASCII.GetString(section5Name);
+ section5.FolderStartIndex = data.ReadUInt32();
+ section5.FolderEndIndex = data.ReadUInt32();
+ section5.FileStartIndex = data.ReadUInt32();
+ section5.FileEndIndex = data.ReadUInt32();
+ section5.FolderRootIndex = data.ReadUInt32();
+
+ return section5;
+ }
+
+ ///
+ /// Parse a Stream into an SGA folder version 4
+ ///
+ /// Stream to parse
+ /// SGA major version
+ /// Filled SGA folder version 4 on success, null on error
+ private static Folder4 ParseFolder4(Stream data)
+ {
+ Folder4 folder4 = new Folder4();
+
+ folder4.NameOffset = data.ReadUInt32();
+ folder4.FolderStartIndex =data.ReadUInt16();
+ folder4.FolderEndIndex =data.ReadUInt16();
+ folder4.FileStartIndex =data.ReadUInt16();
+ folder4.FileEndIndex =data.ReadUInt16();
+
+ return folder4;
+ }
+
+ ///
+ /// Parse a Stream into an SGA folder version 5
+ ///
+ /// Stream to parse
+ /// SGA major version
+ /// Filled SGA folder version 5 on success, null on error
+ private static Folder5 ParseFolder5(Stream data)
+ {
+ Folder5 folder5 = new Folder5();
+
+ folder5.NameOffset = data.ReadUInt32();
+ folder5.FolderStartIndex = data.ReadUInt32();
+ folder5.FolderEndIndex = data.ReadUInt32();
+ folder5.FileStartIndex = data.ReadUInt32();
+ folder5.FileEndIndex = data.ReadUInt32();
+
+ return folder5;
+ }
+
+ ///
+ /// Parse a Stream into an SGA file version 4
+ ///
+ /// Stream to parse
+ /// SGA major version
+ /// Filled SGA file version 4 on success, null on error
+ private static File4 ParseFile4(Stream data)
+ {
+ File4 file4 = new File4();
+
+ file4.NameOffset = data.ReadUInt32();
+ file4.Offset = data.ReadUInt32();
+ file4.SizeOnDisk = data.ReadUInt32();
+ file4.Size = data.ReadUInt32();
+ file4.TimeModified = data.ReadUInt32();
+ file4.Dummy0 = data.ReadByteValue();
+ file4.Type = data.ReadByteValue();
+
+ return file4;
+ }
+
+ ///
+ /// Parse a Stream into an SGA file version 6
+ ///
+ /// Stream to parse
+ /// SGA major version
+ /// Filled SGA file version 6 on success, null on error
+ private static File6 ParseFile6(Stream data)
+ {
+ File6 file6 = new File6();
+
+ file6.NameOffset = data.ReadUInt32();
+ file6.Offset = data.ReadUInt32();
+ file6.SizeOnDisk = data.ReadUInt32();
+ file6.Size = data.ReadUInt32();
+ file6.TimeModified = data.ReadUInt32();
+ file6.Dummy0 = data.ReadByteValue();
+ file6.Type = data.ReadByteValue();
+ file6.CRC32 = data.ReadUInt32();
+
+ return file6;
+ }
+
+ ///
+ /// Parse a Stream into an SGA file version 7
+ ///
+ /// Stream to parse
+ /// SGA major version
+ /// Filled SGA file version 7 on success, null on error
+ private static File7 ParseFile7(Stream data)
+ {
+ File7 file7 = new File7();
+
+ file7.NameOffset = data.ReadUInt32();
+ file7.Offset = data.ReadUInt32();
+ file7.SizeOnDisk = data.ReadUInt32();
+ file7.Size = data.ReadUInt32();
+ file7.TimeModified = data.ReadUInt32();
+ file7.Dummy0 = data.ReadByteValue();
+ file7.Type = data.ReadByteValue();
+ file7.CRC32 = data.ReadUInt32();
+ file7.HashOffset = data.ReadUInt32();
+
+ return file7;
+ }
+
+ #endregion
+ }
+}
diff --git a/BurnOutSharp.Builders/VBSP.cs b/BurnOutSharp.Builders/VBSP.cs
new file mode 100644
index 00000000..5bcf1114
--- /dev/null
+++ b/BurnOutSharp.Builders/VBSP.cs
@@ -0,0 +1,179 @@
+using System.IO;
+using System.Text;
+using BurnOutSharp.Models.VBSP;
+using BurnOutSharp.Utilities;
+
+namespace BurnOutSharp.Builders
+{
+ public static class VBSP
+ {
+ #region Constants
+
+ ///
+ /// Total number of lumps in the package
+ ///
+ public const int HL_VBSP_LUMP_COUNT = 64;
+
+ ///
+ /// Index for the entities lump
+ ///
+ public const int HL_VBSP_LUMP_ENTITIES = 0;
+
+ ///
+ /// Idnex for the pakfile lump
+ ///
+ public const int HL_VBSP_LUMP_PAKFILE = 40;
+
+ ///
+ /// Zip local file header signature as an integer
+ ///
+ public const int HL_VBSP_ZIP_LOCAL_FILE_HEADER_SIGNATURE = 0x04034b50;
+
+ ///
+ /// Zip file header signature as an integer
+ ///
+ public const int HL_VBSP_ZIP_FILE_HEADER_SIGNATURE = 0x02014b50;
+
+ ///
+ /// Zip end of central directory record signature as an integer
+ ///
+ public const int HL_VBSP_ZIP_END_OF_CENTRAL_DIRECTORY_RECORD_SIGNATURE = 0x06054b50;
+
+ ///
+ /// Length of a ZIP checksum in bytes
+ ///
+ public const int HL_VBSP_ZIP_CHECKSUM_LENGTH = 0x00008000;
+
+ #endregion
+
+ #region Byte Data
+
+ ///
+ /// Parse a byte array into a Half-Life 2 Level
+ ///
+ /// Byte array to parse
+ /// Offset into the byte array
+ /// Filled Half-Life 2 Level on success, null on error
+ public static Models.VBSP.File ParseFile(byte[] data, int offset)
+ {
+ // If the data is invalid
+ if (data == null)
+ return null;
+
+ // If the offset is out of bounds
+ if (offset < 0 || offset >= data.Length)
+ return null;
+
+ // Create a memory stream and parse that
+ MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
+ return ParseFile(dataStream);
+ }
+
+ #endregion
+
+ #region Stream Data
+
+ ///
+ /// Parse a Stream into a Half-Life 2 Level
+ ///
+ /// Stream to parse
+ /// Filled Half-Life 2 Level on success, null on error
+ public static Models.VBSP.File ParseFile(Stream data)
+ {
+ // If the data is invalid
+ if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
+ return null;
+
+ // If the offset is out of bounds
+ if (data.Position < 0 || data.Position >= data.Length)
+ return null;
+
+ // Cache the current offset
+ long initialOffset = data.Position;
+
+ // Create a new Half-Life 2 Level to fill
+ var file = new Models.VBSP.File();
+
+ #region Header
+
+ // Try to parse the header
+ var header = ParseHeader(data);
+ if (header == null)
+ return null;
+
+ // Set the package header
+ file.Header = header;
+
+ #endregion
+
+ return file;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life 2 Level header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life 2 Level header on success, null on error
+ private static Header ParseHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ Header header = new Header();
+
+ byte[] signature = data.ReadBytes(4);
+ header.Signature = Encoding.ASCII.GetString(signature);
+ if (header.Signature != "VBSP")
+ return null;
+
+ header.Version = data.ReadInt32();
+ if ((header.Version < 19 || header.Version > 22) && header.Version != 0x00040014)
+ return null;
+
+ header.Lumps = new Lump[HL_VBSP_LUMP_COUNT];
+ for (int i = 0; i < HL_VBSP_LUMP_COUNT; i++)
+ {
+ header.Lumps[i] = ParseLump(data, header.Version);
+ }
+
+ header.MapRevision = data.ReadInt32();
+
+ return header;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life 2 Level lump
+ ///
+ /// Stream to parse
+ /// VBSP version
+ /// Filled Half-Life 2 Level lump on success, null on error
+ private static Lump ParseLump(Stream data, int version)
+ {
+ // TODO: Use marshalling here instead of building
+ Lump lump = new Lump();
+
+ lump.Offset = data.ReadUInt32();
+ lump.Length = data.ReadUInt32();
+ lump.Version = data.ReadUInt32();
+ lump.FourCC = new char[4];
+ for (int i = 0; i < 4; i++)
+ {
+ lump.FourCC[i] = (char)data.ReadByte();
+ }
+
+ // This block was commented out because test VBSPs with header
+ // version 21 had the values in the "right" order already and
+ // were causing decompression issues
+
+ //if (version >= 21 && version != 0x00040014)
+ //{
+ // uint temp = lump.Version;
+ // lump.Version = lump.Offset;
+ // lump.Offset = lump.Length;
+ // lump.Length = temp;
+ //}
+
+ return lump;
+ }
+
+ #endregion
+ }
+}
diff --git a/BurnOutSharp.Builders/VPK.cs b/BurnOutSharp.Builders/VPK.cs
new file mode 100644
index 00000000..7914cdd2
--- /dev/null
+++ b/BurnOutSharp.Builders/VPK.cs
@@ -0,0 +1,318 @@
+using System.Collections.Generic;
+using System.IO;
+using System.Text;
+using BurnOutSharp.Models.VPK;
+using BurnOutSharp.Utilities;
+
+namespace BurnOutSharp.Builders
+{
+ public static class VPK
+ {
+ #region Constants
+
+ ///
+ /// VPK header signature as an integer
+ ///
+ public const int HL_VPK_SIGNATURE = 0x55aa1234;
+
+ ///
+ /// Index indicating that there is no archive
+ ///
+ public const int HL_VPK_NO_ARCHIVE = 0x7fff;
+
+ ///
+ /// Length of a VPK checksum in bytes
+ ///
+ public const int HL_VPK_CHECKSUM_LENGTH = 0x00008000;
+
+ #endregion
+
+ #region Byte Data
+
+ ///
+ /// Parse a byte array into a Valve Package
+ ///
+ /// Byte array to parse
+ /// Offset into the byte array
+ /// Filled Valve Package on success, null on error
+ public static Models.VPK.File ParseFile(byte[] data, int offset)
+ {
+ // If the data is invalid
+ if (data == null)
+ return null;
+
+ // If the offset is out of bounds
+ if (offset < 0 || offset >= data.Length)
+ return null;
+
+ // Create a memory stream and parse that
+ MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
+ return ParseFile(dataStream);
+ }
+
+ #endregion
+
+ #region Stream Data
+
+ ///
+ /// Parse a Stream into a Valve Package
+ ///
+ /// Stream to parse
+ /// Filled Valve Package on success, null on error
+ public static Models.VPK.File ParseFile(Stream data)
+ {
+ // If the data is invalid
+ if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
+ return null;
+
+ // If the offset is out of bounds
+ if (data.Position < 0 || data.Position >= data.Length)
+ return null;
+
+ // Cache the current offset
+ long initialOffset = data.Position;
+
+ // Create a new Valve Package to fill
+ var file = new Models.VPK.File();
+
+ #region Header
+
+ // Try to parse the header
+ // The original version had no signature.
+ var header = ParseHeader(data);
+
+ // Set the package header
+ file.Header = header;
+
+ #endregion
+
+ #region Extended Header
+
+ if (header?.Version == 2)
+ {
+ // Try to parse the extended header
+ var extendedHeader = ParseExtendedHeader(data);
+ if (extendedHeader == null)
+ return null;
+
+ // Set the package extended header
+ file.ExtendedHeader = extendedHeader;
+ }
+
+ #endregion
+
+ #region Archive Hashes
+
+ if (header?.Version == 2 && file.ExtendedHeader != null && file.ExtendedHeader.ArchiveHashLength > 0)
+ {
+ // Create the archive hashes list
+ var archiveHashes = new List();
+
+ // Cache the current offset
+ initialOffset = data.Position;
+
+ // Try to parse the directory items
+ while (data.Position < initialOffset + file.ExtendedHeader.ArchiveHashLength)
+ {
+ var archiveHash = ParseArchiveHash(data);
+ archiveHashes.Add(archiveHash);
+ }
+
+ file.ArchiveHashes = archiveHashes.ToArray();
+ }
+
+ #endregion
+
+ #region Directory Items
+
+ // Create the directory items tree
+ var directoryItems = ParseDirectoryItemTree(data);
+
+ // Set the directory items
+ file.DirectoryItems = directoryItems;
+
+ #endregion
+
+ return file;
+ }
+
+ ///
+ /// Parse a Stream into a Valve Package header
+ ///
+ /// Stream to parse
+ /// Filled Valve Package header on success, null on error
+ private static Header ParseHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ Header header = new Header();
+
+ header.Signature = data.ReadUInt32();
+ if (header.Signature != HL_VPK_SIGNATURE)
+ return null;
+
+ header.Version = data.ReadUInt32();
+ if (header.Version > 2)
+ return null;
+
+ header.DirectoryLength = data.ReadUInt32();
+
+ return header;
+ }
+
+ ///
+ /// Parse a Stream into a Valve Package extended header
+ ///
+ /// Stream to parse
+ /// Filled Valve Package extended header on success, null on error
+ private static ExtendedHeader ParseExtendedHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ ExtendedHeader extendedHeader = new ExtendedHeader();
+
+ extendedHeader.Dummy0 = data.ReadUInt32();
+ extendedHeader.ArchiveHashLength = data.ReadUInt32();
+ extendedHeader.ExtraLength = data.ReadUInt32();
+ extendedHeader.Dummy1 = data.ReadUInt32();
+
+ return extendedHeader;
+ }
+
+ ///
+ /// Parse a Stream into a Valve Package archive hash
+ ///
+ /// Stream to parse
+ /// Filled Valve Package archive hash on success, null on error
+ private static ArchiveHash ParseArchiveHash(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ ArchiveHash archiveHash = new ArchiveHash();
+
+ archiveHash.ArchiveIndex = data.ReadUInt32();
+ archiveHash.ArchiveOffset = data.ReadUInt32();
+ archiveHash.Length = data.ReadUInt32();
+ archiveHash.Hash = data.ReadBytes(0x10);
+
+ return archiveHash;
+ }
+
+ ///
+ /// Parse a Stream into a Valve Package directory item tree
+ ///
+ /// Stream to parse
+ /// Filled Valve Package directory item tree on success, null on error
+ private static DirectoryItem[] ParseDirectoryItemTree(Stream data)
+ {
+ // Create the directory items list
+ var directoryItems = new List();
+
+ while (true)
+ {
+ // Get the extension
+ string extensionString = data.ReadString(Encoding.ASCII);
+ if (string.IsNullOrEmpty(extensionString))
+ break;
+
+ while (true)
+ {
+ // Get the path
+ string pathString = data.ReadString(Encoding.ASCII);
+ if (string.IsNullOrEmpty(pathString))
+ break;
+
+ while (true)
+ {
+ // Get the name
+ string nameString = data.ReadString(Encoding.ASCII);
+ if (string.IsNullOrEmpty(nameString))
+ break;
+
+ // Get the directory item
+ var directoryItem = ParseDirectoryItem(data, extensionString, pathString, nameString);
+
+ // Add the directory item
+ directoryItems.Add(directoryItem);
+ }
+ }
+ }
+
+ return directoryItems.ToArray();
+ }
+
+ ///
+ /// Parse a Stream into a Valve Package directory item
+ ///
+ /// Stream to parse
+ /// Filled Valve Package directory item on success, null on error
+ private static DirectoryItem ParseDirectoryItem(Stream data, string extension, string path, string name)
+ {
+ DirectoryItem directoryItem = new DirectoryItem();
+
+ directoryItem.Extension = extension;
+ directoryItem.Path = path;
+ directoryItem.Name = name;
+
+ // Get the directory entry
+ var directoryEntry = ParseDirectoryEntry(data);
+
+ // Set the directory entry
+ directoryItem.DirectoryEntry = directoryEntry;
+
+ // Get the preload data pointer
+ long preloadDataPointer = -1; int preloadDataLength = -1;
+ if (directoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE && directoryEntry.EntryLength > 0)
+ {
+ preloadDataPointer = directoryEntry.EntryOffset;
+ preloadDataLength = (int)directoryEntry.EntryLength;
+ }
+ else if (directoryEntry.PreloadBytes > 0)
+ {
+ preloadDataPointer = data.Position;
+ preloadDataLength = directoryEntry.PreloadBytes;
+ }
+
+ // If we had a valid preload data pointer
+ byte[] preloadData = null;
+ if (preloadDataPointer >= 0 && preloadDataLength > 0)
+ {
+ // Cache the current offset
+ long initialOffset = data.Position;
+
+ // Seek to the preload data offset
+ data.Seek(preloadDataPointer, SeekOrigin.Begin);
+
+ // Read the preload data
+ preloadData = data.ReadBytes(preloadDataLength);
+
+ // Seek back to the original offset
+ data.Seek(initialOffset, SeekOrigin.Begin);
+ }
+
+ // Set the preload data
+ directoryItem.PreloadData = preloadData;
+
+ return directoryItem;
+ }
+
+ ///
+ /// Parse a Stream into a Valve Package directory entry
+ ///
+ /// Stream to parse
+ /// Filled Valve Package directory entry on success, null on error
+ private static DirectoryEntry ParseDirectoryEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryEntry directoryEntry = new DirectoryEntry();
+
+ directoryEntry.CRC = data.ReadUInt32();
+ directoryEntry.PreloadBytes = data.ReadUInt16();
+ directoryEntry.ArchiveIndex = data.ReadUInt16();
+ directoryEntry.EntryOffset = data.ReadUInt32();
+ directoryEntry.EntryLength = data.ReadUInt32();
+ directoryEntry.Dummy0 = data.ReadUInt16();
+
+ return directoryEntry;
+ }
+
+ #endregion
+ }
+}
diff --git a/BurnOutSharp.Builders/WAD.cs b/BurnOutSharp.Builders/WAD.cs
new file mode 100644
index 00000000..1a8234e0
--- /dev/null
+++ b/BurnOutSharp.Builders/WAD.cs
@@ -0,0 +1,265 @@
+using System.IO;
+using System.Text;
+using BurnOutSharp.Models.WAD;
+using BurnOutSharp.Utilities;
+
+namespace BurnOutSharp.Builders
+{
+ public static class WAD
+ {
+ #region Byte Data
+
+ ///
+ /// Parse a byte array into a Half-Life Texture Package
+ ///
+ /// Byte array to parse
+ /// Offset into the byte array
+ /// Filled Half-Life Texture Package on success, null on error
+ public static Models.WAD.File ParseFile(byte[] data, int offset)
+ {
+ // If the data is invalid
+ if (data == null)
+ return null;
+
+ // If the offset is out of bounds
+ if (offset < 0 || offset >= data.Length)
+ return null;
+
+ // Create a memory stream and parse that
+ MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
+ return ParseFile(dataStream);
+ }
+
+ #endregion
+
+ #region Stream Data
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Texture Package on success, null on error
+ public static Models.WAD.File ParseFile(Stream data)
+ {
+ // If the data is invalid
+ if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
+ return null;
+
+ // If the offset is out of bounds
+ if (data.Position < 0 || data.Position >= data.Length)
+ return null;
+
+ // Cache the current offset
+ long initialOffset = data.Position;
+
+ // Create a new Half-Life Texture Package to fill
+ var file = new Models.WAD.File();
+
+ #region Header
+
+ // Try to parse the header
+ var header = ParseHeader(data);
+ if (header == null)
+ return null;
+
+ // Set the package header
+ file.Header = header;
+
+ #endregion
+
+ #region Lumps
+
+ // Get the lump offset
+ uint lumpOffset = header.LumpOffset;
+ if (lumpOffset < 0 || lumpOffset >= data.Length)
+ return null;
+
+ // Seek to the lump offset
+ data.Seek(lumpOffset, SeekOrigin.Begin);
+
+ // Create the lump array
+ file.Lumps = new Lump[header.LumpCount];
+ for (int i = 0; i < header.LumpCount; i++)
+ {
+ var lump = ParseLump(data);
+ file.Lumps[i] = lump;
+ }
+
+ #endregion
+
+ #region Lump Infos
+
+ // Create the lump info array
+ file.LumpInfos = new LumpInfo[header.LumpCount];
+ for (int i = 0; i < header.LumpCount; i++)
+ {
+ var lump = file.Lumps[i];
+ if (lump.Compression != 0)
+ {
+ file.LumpInfos[i] = null;
+ continue;
+ }
+
+ // Get the lump info offset
+ uint lumpInfoOffset = lump.Offset;
+ if (lumpInfoOffset < 0 || lumpInfoOffset >= data.Length)
+ {
+ file.LumpInfos[i] = null;
+ continue;
+ }
+
+ // Seek to the lump info offset
+ data.Seek(lumpInfoOffset, SeekOrigin.Begin);
+
+ // Try to parse the lump info -- TODO: Do we ever set the mipmap level?
+ var lumpInfo = ParseLumpInfo(data, lump.Type);
+ file.LumpInfos[i] = lumpInfo;
+ }
+
+ #endregion
+
+ return file;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package header
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Texture Package header on success, null on error
+ private static Header ParseHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ Header header = new Header();
+
+ byte[] signature = data.ReadBytes(4);
+ header.Signature = Encoding.ASCII.GetString(signature);
+ if (header.Signature != "WAD3")
+ return null;
+
+ header.LumpCount = data.ReadUInt32();
+ header.LumpOffset = data.ReadUInt32();
+
+ return header;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package lump
+ ///
+ /// Stream to parse
+ /// Filled Half-Life Texture Package lump on success, null on error
+ private static Lump ParseLump(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ Lump lump = new Lump();
+
+ lump.Offset = data.ReadUInt32();
+ lump.DiskLength = data.ReadUInt32();
+ lump.Length = data.ReadUInt32();
+ lump.Type = data.ReadByteValue();
+ lump.Compression = data.ReadByteValue();
+ lump.Padding0 = data.ReadByteValue();
+ lump.Padding1 = data.ReadByteValue();
+ byte[] name = data.ReadBytes(16);
+ lump.Name = Encoding.ASCII.GetString(name);
+
+ return lump;
+ }
+
+ ///
+ /// Parse a Stream into a Half-Life Texture Package lump info
+ ///
+ /// Stream to parse
+ /// Lump type
+ /// Mipmap level
+ /// Filled Half-Life Texture Package lump info on success, null on error
+ private static LumpInfo ParseLumpInfo(Stream data, byte type, uint mipmap = 0)
+ {
+ // TODO: Use marshalling here instead of building
+ LumpInfo lumpInfo = new LumpInfo();
+
+ // Cache the initial offset
+ long initialOffset = data.Position;
+
+ // Type 0x42 has no name, type 0x43 does. Are these flags?
+ if (type == 0x42)
+ {
+ if (mipmap > 0)
+ return null;
+
+ lumpInfo.Width = data.ReadUInt32();
+ lumpInfo.Height = data.ReadUInt32();
+ lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
+ lumpInfo.PaletteSize = data.ReadUInt16();
+ }
+ else if (type == 0x43)
+ {
+ if (mipmap > 3)
+ return null;
+
+ byte[] name = data.ReadBytes(16);
+ lumpInfo.Name = Encoding.ASCII.GetString(name);
+ lumpInfo.Width = data.ReadUInt32();
+ lumpInfo.Height = data.ReadUInt32();
+ lumpInfo.PixelOffset = data.ReadUInt32();
+ _ = data.ReadBytes(12); // Unknown data
+
+ // Cache the current offset
+ long currentOffset = data.Position;
+
+ // Seek to the pixel data
+ data.Seek(initialOffset + lumpInfo.PixelOffset, SeekOrigin.Begin);
+
+ // Read the pixel data
+ lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
+
+ // Seek back to the offset
+ data.Seek(currentOffset, SeekOrigin.Begin);
+
+ uint pixelSize = lumpInfo.Width * lumpInfo.Height;
+
+ // Mipmap data -- TODO: How do we determine this during initial parsing?
+ switch (mipmap)
+ {
+ case 1: _ = data.ReadBytes((int)pixelSize); break;
+ case 2: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4))); break;
+ case 3: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16))); break;
+ default: return null;
+ }
+
+ _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16) + (pixelSize / 64))); // Pixel data
+ lumpInfo.PaletteSize = data.ReadUInt16();
+ lumpInfo.PaletteData = data.ReadBytes((int)lumpInfo.PaletteSize * 3);
+ }
+ else
+ {
+ return null;
+ }
+
+ // Adjust based on mipmap level
+ switch (mipmap)
+ {
+ case 1:
+ lumpInfo.Width /= 2;
+ lumpInfo.Height /= 2;
+ break;
+
+ case 2:
+ lumpInfo.Width /= 4;
+ lumpInfo.Height /= 4;
+ break;
+
+ case 3:
+ lumpInfo.Width /= 8;
+ lumpInfo.Height /= 8;
+ break;
+
+ default:
+ return null;
+ }
+
+ return lumpInfo;
+ }
+
+ #endregion
+ }
+}
diff --git a/BurnOutSharp.Builders/XZP.cs b/BurnOutSharp.Builders/XZP.cs
new file mode 100644
index 00000000..feaeb1f6
--- /dev/null
+++ b/BurnOutSharp.Builders/XZP.cs
@@ -0,0 +1,261 @@
+using System.IO;
+using System.Text;
+using BurnOutSharp.Models.XZP;
+using BurnOutSharp.Utilities;
+
+namespace BurnOutSharp.Builders
+{
+ public static class XZP
+ {
+ #region Byte Data
+
+ ///
+ /// Parse a byte array into a XBox Package File
+ ///
+ /// Byte array to parse
+ /// Offset into the byte array
+ /// Filled XBox Package File on success, null on error
+ public static Models.XZP.File ParseFile(byte[] data, int offset)
+ {
+ // If the data is invalid
+ if (data == null)
+ return null;
+
+ // If the offset is out of bounds
+ if (offset < 0 || offset >= data.Length)
+ return null;
+
+ // Create a memory stream and parse that
+ MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
+ return ParseFile(dataStream);
+ }
+
+ #endregion
+
+ #region Stream Data
+
+ ///
+ /// Parse a Stream into a XBox Package File
+ ///
+ /// Stream to parse
+ /// Filled XBox Package File on success, null on error
+ public static Models.XZP.File ParseFile(Stream data)
+ {
+ // If the data is invalid
+ if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
+ return null;
+
+ // If the offset is out of bounds
+ if (data.Position < 0 || data.Position >= data.Length)
+ return null;
+
+ // Cache the current offset
+ long initialOffset = data.Position;
+
+ // Create a new XBox Package File to fill
+ var file = new Models.XZP.File();
+
+ #region Header
+
+ // Try to parse the header
+ var header = ParseHeader(data);
+ if (header == null)
+ return null;
+
+ // Set the package header
+ file.Header = header;
+
+ #endregion
+
+ #region Directory Entries
+
+ // Create the directory entry array
+ file.DirectoryEntries = new DirectoryEntry[header.DirectoryEntryCount];
+
+ // Try to parse the directory entries
+ for (int i = 0; i < header.DirectoryEntryCount; i++)
+ {
+ var directoryEntry = ParseDirectoryEntry(data);
+ file.DirectoryEntries[i] = directoryEntry;
+ }
+
+ #endregion
+
+ #region Preload Directory Entries
+
+ if (header.PreloadBytes > 0)
+ {
+ // Create the preload directory entry array
+ file.PreloadDirectoryEntries = new DirectoryEntry[header.PreloadDirectoryEntryCount];
+
+ // Try to parse the preload directory entries
+ for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
+ {
+ var directoryEntry = ParseDirectoryEntry(data);
+ file.PreloadDirectoryEntries[i] = directoryEntry;
+ }
+ }
+
+ #endregion
+
+ #region Preload Directory Mappings
+
+ if (header.PreloadBytes > 0)
+ {
+ // Create the preload directory mapping array
+ file.PreloadDirectoryMappings = new DirectoryMapping[header.PreloadDirectoryEntryCount];
+
+ // Try to parse the preload directory mappings
+ for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
+ {
+ var directoryMapping = ParseDirectoryMapping(data);
+ file.PreloadDirectoryMappings[i] = directoryMapping;
+ }
+ }
+
+ #endregion
+
+ #region Directory Items
+
+ if (header.DirectoryItemCount > 0)
+ {
+ // Get the directory item offset
+ uint directoryItemOffset = header.DirectoryItemOffset;
+ if (directoryItemOffset < 0 || directoryItemOffset >= data.Length)
+ return null;
+
+ // Seek to the directory items
+ data.Seek(directoryItemOffset, SeekOrigin.Begin);
+
+ // Create the directory item array
+ file.DirectoryItems = new DirectoryItem[header.DirectoryItemCount];
+
+ // Try to parse the directory items
+ for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
+ {
+ var directoryItem = ParseDirectoryItem(data);
+ file.DirectoryItems[i] = directoryItem;
+ }
+ }
+
+ #endregion
+
+ #region Footer
+
+ // Seek to the footer
+ data.Seek(-8, SeekOrigin.End);
+
+ // Try to parse the footer
+ var footer = ParseFooter(data);
+ if (footer == null)
+ return null;
+
+ // Set the package footer
+ file.Footer = footer;
+
+ #endregion
+
+ return file;
+ }
+
+ ///
+ /// Parse a Stream into a XBox Package File header
+ ///
+ /// Stream to parse
+ /// Filled XBox Package File header on success, null on error
+ private static Header ParseHeader(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ Header header = new Header();
+
+ byte[] signature = data.ReadBytes(4);
+ header.Signature = Encoding.ASCII.GetString(signature);
+ if (header.Signature != "piZx")
+ return null;
+
+ header.Version = data.ReadUInt32();
+ if (header.Version != 6)
+ return null;
+
+ header.PreloadDirectoryEntryCount = data.ReadUInt32();
+ header.DirectoryEntryCount = data.ReadUInt32();
+ header.PreloadBytes = data.ReadUInt32();
+ header.HeaderLength = data.ReadUInt32();
+ header.DirectoryItemCount = data.ReadUInt32();
+ header.DirectoryItemOffset = data.ReadUInt32();
+ header.DirectoryItemLength = data.ReadUInt32();
+
+ return header;
+ }
+
+ ///
+ /// Parse a Stream into a XBox Package File directory entry
+ ///
+ /// Stream to parse
+ /// Filled XBox Package File directory entry on success, null on error
+ private static DirectoryEntry ParseDirectoryEntry(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryEntry directoryEntry = new DirectoryEntry();
+
+ directoryEntry.FileNameCRC = data.ReadUInt32();
+ directoryEntry.EntryLength = data.ReadUInt32();
+ directoryEntry.EntryOffset = data.ReadUInt32();
+
+ return directoryEntry;
+ }
+
+ ///
+ /// Parse a Stream into a XBox Package File directory mapping
+ ///
+ /// Stream to parse
+ /// Filled XBox Package File directory mapping on success, null on error
+ private static DirectoryMapping ParseDirectoryMapping(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryMapping directoryMapping = new DirectoryMapping();
+
+ directoryMapping.PreloadDirectoryEntryIndex = data.ReadUInt16();
+
+ return directoryMapping;
+ }
+
+ ///
+ /// Parse a Stream into a XBox Package File directory item
+ ///
+ /// Stream to parse
+ /// Filled XBox Package File directory item on success, null on error
+ private static DirectoryItem ParseDirectoryItem(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ DirectoryItem directoryItem = new DirectoryItem();
+
+ directoryItem.FileNameCRC = data.ReadUInt32();
+ directoryItem.NameOffset = data.ReadUInt32();
+ directoryItem.TimeCreated = data.ReadUInt32();
+
+ return directoryItem;
+ }
+
+ ///
+ /// Parse a Stream into a XBox Package File footer
+ ///
+ /// Stream to parse
+ /// Filled XBox Package File footer on success, null on error
+ private static Footer ParseFooter(Stream data)
+ {
+ // TODO: Use marshalling here instead of building
+ Footer footer = new Footer();
+
+ footer.FileLength = data.ReadUInt32();
+ byte[] signature = data.ReadBytes(4);
+ footer.Signature = Encoding.ASCII.GetString(signature);
+ if (footer.Signature != "tFzX")
+ return null;
+
+ return footer;
+ }
+
+ #endregion
+ }
+}
diff --git a/BurnOutSharp.Models/BMP/BITMAPFILEHEADER.cs b/BurnOutSharp.Models/BMP/BITMAPFILEHEADER.cs
new file mode 100644
index 00000000..1b4ee91d
--- /dev/null
+++ b/BurnOutSharp.Models/BMP/BITMAPFILEHEADER.cs
@@ -0,0 +1,35 @@
+namespace BurnOutSharp.Models.BMP
+{
+ ///
+ /// The BITMAPFILEHEADER structure contains information about the type, size,
+ /// and layout of a file that contains a DIB.
+ ///
+ ///
+ public sealed class BITMAPFILEHEADER
+ {
+ ///
+ /// The file type; must be BM.
+ ///
+ public ushort Type;
+
+ ///
+ /// The size, in bytes, of the bitmap file.
+ ///
+ public uint Size;
+
+ ///
+ /// Reserved; must be zero.
+ ///
+ public ushort Reserved1;
+
+ ///
+ /// Reserved; must be zero.
+ ///
+ public ushort Reserved2;
+
+ ///
+ /// The offset, in bytes, from the beginning of the BITMAPFILEHEADER structure to the bitmap bits.
+ ///
+ public uint OffBits;
+ }
+}
diff --git a/BurnOutSharp.Models/BMP/BITMAPINFOHEADER.cs b/BurnOutSharp.Models/BMP/BITMAPINFOHEADER.cs
new file mode 100644
index 00000000..083c645c
--- /dev/null
+++ b/BurnOutSharp.Models/BMP/BITMAPINFOHEADER.cs
@@ -0,0 +1,94 @@
+namespace BurnOutSharp.Models.BMP
+{
+ ///
+ /// The BITMAPINFOHEADER structure contains information about the dimensions and
+ /// color format of a device-independent bitmap (DIB).
+ ///
+ public sealed class BITMAPINFOHEADER
+ {
+ ///
+ /// Specifies the number of bytes required by the structure. This value does
+ /// not include the size of the color table or the size of the color masks,
+ /// if they are appended to the end of structure.
+ ///
+ public uint Size;
+
+ ///
+ /// Specifies the width of the bitmap, in pixels.
+ ///
+ public int Width;
+
+ ///
+ /// Specifies the height of the bitmap, in pixels.
+ /// - For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is a
+ /// bottom-up DIB with the origin at the lower left corner. If biHeight is
+ /// negative, the bitmap is a top-down DIB with the origin at the upper left
+ /// corner.
+ /// - For YUV bitmaps, the bitmap is always top-down, regardless of the sign of
+ /// biHeight. Decoders should offer YUV formats with positive biHeight, but for
+ /// backward compatibility they should accept YUV formats with either positive
+ /// or negative biHeight.
+ /// - For compressed formats, biHeight must be positive, regardless of image orientation.
+ ///
+ public int Height;
+
+ ///
+ /// Specifies the number of planes for the target device. This value must be set to 1.
+ ///
+ public ushort Planes;
+
+ ///
+ /// Specifies the number of bits per pixel (bpp). For uncompressed formats, this value
+ /// is the average number of bits per pixel. For compressed formats, this value is the
+ /// implied bit depth of the uncompressed image, after the image has been decoded.
+ ///
+ public ushort BitCount;
+
+ ///
+ /// For compressed video and YUV formats, this member is a FOURCC code, specified as a
+ /// DWORD in little-endian order. For example, YUYV video has the FOURCC 'VYUY' or
+ /// 0x56595559. For more information, see FOURCC Codes.
+ ///
+ /// For uncompressed RGB formats, the following values are possible:
+ /// - BI_RGB: Uncompressed RGB.
+ /// - BI_BITFIELDS: Uncompressed RGB with color masks. Valid for 16-bpp and 32-bpp bitmaps.
+ ///
+ /// Note that BI_JPG and BI_PNG are not valid video formats.
+ ///
+ /// For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is always RGB 555.
+ /// If biCompression equals BI_BITFIELDS, the format is either RGB 555 or RGB 565. Use
+ /// the subtype GUID in the AM_MEDIA_TYPE structure to determine the specific RGB type.
+ ///
+ public uint Compression;
+
+ ///
+ /// Specifies the size, in bytes, of the image. This can be set to 0 for uncompressed
+ /// RGB bitmaps.
+ ///
+ public uint SizeImage;
+
+ ///
+ /// Specifies the horizontal resolution, in pixels per meter, of the target device for
+ /// the bitmap.
+ ///
+ public int XPelsPerMeter;
+
+ ///
+ /// Specifies the vertical resolution, in pixels per meter, of the target device for
+ /// the bitmap.
+ ///
+ public int YPelsPerMeter;
+
+ ///
+ /// Specifies the number of color indices in the color table that are actually used by
+ /// the bitmap.
+ ///
+ public uint ClrUsed;
+
+ ///
+ /// Specifies the number of color indices that are considered important for displaying
+ /// the bitmap. If this value is zero, all colors are important.
+ ///
+ public uint ClrImportant;
+ }
+}
diff --git a/BurnOutSharp.Models/BSP/File.cs b/BurnOutSharp.Models/BSP/File.cs
new file mode 100644
index 00000000..337fa2cd
--- /dev/null
+++ b/BurnOutSharp.Models/BSP/File.cs
@@ -0,0 +1,24 @@
+namespace BurnOutSharp.Models.BSP
+{
+ ///
+ /// Half-Life Level
+ ///
+ ///
+ public sealed class File
+ {
+ ///
+ /// Header data
+ ///
+ public Header Header { get; set; }
+
+ ///
+ /// Lumps
+ ///
+ public Lump[] Lumps { get; set; }
+
+ ///
+ /// Texture header data
+ ///
+ public TextureHeader TextureHeader { get; set; }
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/BSP/Header.cs b/BurnOutSharp.Models/BSP/Header.cs
new file mode 100644
index 00000000..41c3715b
--- /dev/null
+++ b/BurnOutSharp.Models/BSP/Header.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.BSP
+{
+ ///
+ public sealed class Header
+ {
+ ///
+ /// Version
+ ///
+ public uint Version;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/BSP/Lump.cs b/BurnOutSharp.Models/BSP/Lump.cs
new file mode 100644
index 00000000..987080f3
--- /dev/null
+++ b/BurnOutSharp.Models/BSP/Lump.cs
@@ -0,0 +1,16 @@
+namespace BurnOutSharp.Models.BSP
+{
+ ///
+ public sealed class Lump
+ {
+ ///
+ /// Offset
+ ///
+ public uint Offset;
+
+ ///
+ /// Length
+ ///
+ public uint Length;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/BSP/Texture.cs b/BurnOutSharp.Models/BSP/Texture.cs
new file mode 100644
index 00000000..974a364a
--- /dev/null
+++ b/BurnOutSharp.Models/BSP/Texture.cs
@@ -0,0 +1,26 @@
+namespace BurnOutSharp.Models.BSP
+{
+ ///
+ public sealed class Texture
+ {
+ ///
+ /// Name
+ ///
+ public string Name;
+
+ ///
+ /// Width
+ ///
+ public uint Width;
+
+ ///
+ /// Height
+ ///
+ public uint Height;
+
+ ///
+ /// Offsets
+ ///
+ public uint[] Offsets;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/BSP/TextureHeader.cs b/BurnOutSharp.Models/BSP/TextureHeader.cs
new file mode 100644
index 00000000..c9a2e96e
--- /dev/null
+++ b/BurnOutSharp.Models/BSP/TextureHeader.cs
@@ -0,0 +1,16 @@
+namespace BurnOutSharp.Models.BSP
+{
+ ///
+ public sealed class TextureHeader
+ {
+ ///
+ /// Texture count
+ ///
+ public uint TextureCount;
+
+ ///
+ /// Offsets
+ ///
+ public uint[] Offsets;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/BlockEntry.cs b/BurnOutSharp.Models/GCF/BlockEntry.cs
new file mode 100644
index 00000000..e407b4f0
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/BlockEntry.cs
@@ -0,0 +1,41 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class BlockEntry
+ {
+ ///
+ /// Flags for the block entry. 0x200F0000 == Not used.
+ ///
+ public uint EntryFlags;
+
+ ///
+ /// The offset for the data contained in this block entry in the file.
+ ///
+ public uint FileDataOffset;
+
+ ///
+ /// The length of the data in this block entry.
+ ///
+ public uint FileDataSize;
+
+ ///
+ /// The offset to the first data block of this block entry's data.
+ ///
+ public uint FirstDataBlockIndex;
+
+ ///
+ /// The next block entry in the series. (N/A if == BlockCount.)
+ ///
+ public uint NextBlockEntryIndex;
+
+ ///
+ /// The previous block entry in the series. (N/A if == BlockCount.)
+ ///
+ public uint PreviousBlockEntryIndex;
+
+ ///
+ /// The offset of the block entry in the directory.
+ ///
+ public uint DirectoryIndex;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/BlockEntryHeader.cs b/BurnOutSharp.Models/GCF/BlockEntryHeader.cs
new file mode 100644
index 00000000..56068498
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/BlockEntryHeader.cs
@@ -0,0 +1,46 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class BlockEntryHeader
+ {
+ ///
+ /// Number of data blocks.
+ ///
+ public uint BlockCount;
+
+ ///
+ /// Number of data blocks that point to data.
+ ///
+ public uint BlocksUsed;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy1;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy2;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy3;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy4;
+
+ ///
+ /// Header checksum.
+ ///
+ public uint Checksum;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/BlockEntryMap.cs b/BurnOutSharp.Models/GCF/BlockEntryMap.cs
new file mode 100644
index 00000000..ba406454
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/BlockEntryMap.cs
@@ -0,0 +1,19 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ /// Part of version 5 but not version 6.
+ ///
+ ///
+ public sealed class BlockEntryMap
+ {
+ ///
+ /// The previous block entry. (N/A if == BlockCount.)
+ ///
+ public uint PreviousBlockEntryIndex;
+
+ ///
+ /// The next block entry. (N/A if == BlockCount.)
+ ///
+ public uint NextBlockEntryIndex;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/BlockEntryMapHeader.cs b/BurnOutSharp.Models/GCF/BlockEntryMapHeader.cs
new file mode 100644
index 00000000..355b9174
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/BlockEntryMapHeader.cs
@@ -0,0 +1,34 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ /// Part of version 5 but not version 6.
+ ///
+ ///
+ public sealed class BlockEntryMapHeader
+ {
+ ///
+ /// Number of data blocks.
+ ///
+ public uint BlockCount;
+
+ ///
+ /// Index of the first block entry.
+ ///
+ public uint FirstBlockEntryIndex;
+
+ ///
+ /// Index of the last block entry.
+ ///
+ public uint LastBlockEntryIndex;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Header checksum.
+ ///
+ public uint Checksum;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/ChecksumEntry.cs b/BurnOutSharp.Models/GCF/ChecksumEntry.cs
new file mode 100644
index 00000000..81681730
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/ChecksumEntry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class ChecksumEntry
+ {
+ ///
+ /// Checksum.
+ ///
+ public uint Checksum;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/ChecksumHeader.cs b/BurnOutSharp.Models/GCF/ChecksumHeader.cs
new file mode 100644
index 00000000..d4c7a442
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/ChecksumHeader.cs
@@ -0,0 +1,16 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class ChecksumHeader
+ {
+ ///
+ /// Always 0x00000001
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Size of LPGCFCHECKSUMHEADER & LPGCFCHECKSUMMAPHEADER & in bytes.
+ ///
+ public uint ChecksumSize;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/ChecksumMapEntry.cs b/BurnOutSharp.Models/GCF/ChecksumMapEntry.cs
new file mode 100644
index 00000000..3d1e9c67
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/ChecksumMapEntry.cs
@@ -0,0 +1,16 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class ChecksumMapEntry
+ {
+ ///
+ /// Number of checksums.
+ ///
+ public uint ChecksumCount;
+
+ ///
+ /// Index of first checksum.
+ ///
+ public uint FirstChecksumIndex;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/ChecksumMapHeader.cs b/BurnOutSharp.Models/GCF/ChecksumMapHeader.cs
new file mode 100644
index 00000000..80d6bcf8
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/ChecksumMapHeader.cs
@@ -0,0 +1,26 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class ChecksumMapHeader
+ {
+ ///
+ /// Always 0x14893721
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Always 0x00000001
+ ///
+ public uint Dummy1;
+
+ ///
+ /// Number of items.
+ ///
+ public uint ItemCount;
+
+ ///
+ /// Number of checksums.
+ ///
+ public uint ChecksumCount;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/DataBlockHeader.cs b/BurnOutSharp.Models/GCF/DataBlockHeader.cs
new file mode 100644
index 00000000..20589f9f
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/DataBlockHeader.cs
@@ -0,0 +1,36 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class DataBlockHeader
+ {
+ ///
+ /// GCF file version. This field is not part of all file versions.
+ ///
+ public uint LastVersionPlayed;
+
+ ///
+ /// Number of data blocks.
+ ///
+ public uint BlockCount;
+
+ ///
+ /// Size of each data block in bytes.
+ ///
+ public uint BlockSize;
+
+ ///
+ /// Offset to first data block.
+ ///
+ public uint FirstBlockOffset;
+
+ ///
+ /// Number of data blocks that contain data.
+ ///
+ public uint BlocksUsed;
+
+ ///
+ /// Header checksum.
+ ///
+ public uint Checksum;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/DirectoryCopyEntry.cs b/BurnOutSharp.Models/GCF/DirectoryCopyEntry.cs
new file mode 100644
index 00000000..6ebc99e5
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/DirectoryCopyEntry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class DirectoryCopyEntry
+ {
+ ///
+ /// Index of the directory item.
+ ///
+ public uint DirectoryIndex;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/DirectoryEntry.cs b/BurnOutSharp.Models/GCF/DirectoryEntry.cs
new file mode 100644
index 00000000..9106ac18
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/DirectoryEntry.cs
@@ -0,0 +1,41 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class DirectoryEntry
+ {
+ ///
+ /// Offset to the directory item name from the end of the directory items.
+ ///
+ public uint NameOffset;
+
+ ///
+ /// Size of the item. (If file, file size. If folder, num items.)
+ ///
+ public uint ItemSize;
+
+ ///
+ /// Checksome index. (0xFFFFFFFF == None).
+ ///
+ public uint ChecksumIndex;
+
+ ///
+ /// Flags for the directory item. (0x00000000 == Folder).
+ ///
+ public uint DirectoryFlags;
+
+ ///
+ /// Index of the parent directory item. (0xFFFFFFFF == None).
+ ///
+ public uint ParentIndex;
+
+ ///
+ /// Index of the next directory item. (0x00000000 == None).
+ ///
+ public uint NextIndex;
+
+ ///
+ /// Index of the first directory item. (0x00000000 == None).
+ ///
+ public uint FirstIndex;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/DirectoryHeader.cs b/BurnOutSharp.Models/GCF/DirectoryHeader.cs
new file mode 100644
index 00000000..52cc05a7
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/DirectoryHeader.cs
@@ -0,0 +1,76 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class DirectoryHeader
+ {
+ ///
+ /// Always 0x00000004
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Cache ID.
+ ///
+ public uint CacheID;
+
+ ///
+ /// GCF file version.
+ ///
+ public uint LastVersionPlayed;
+
+ ///
+ /// Number of items in the directory.
+ ///
+ public uint ItemCount;
+
+ ///
+ /// Number of files in the directory.
+ ///
+ public uint FileCount;
+
+ ///
+ /// Always 0x00008000. Data per checksum?
+ ///
+ public uint Dummy1;
+
+ ///
+ /// Size of lpGCFDirectoryEntries & lpGCFDirectoryNames & lpGCFDirectoryInfo1Entries & lpGCFDirectoryInfo2Entries & lpGCFDirectoryCopyEntries & lpGCFDirectoryLocalEntries in bytes.
+ ///
+ public uint DirectorySize;
+
+ ///
+ /// Size of the directory names in bytes.
+ ///
+ public uint NameSize;
+
+ ///
+ /// Number of Info1 entires.
+ ///
+ public uint Info1Count;
+
+ ///
+ /// Number of files to copy.
+ ///
+ public uint CopyCount;
+
+ ///
+ /// Number of files to keep local.
+ ///
+ public uint LocalCount;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy2;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy3;
+
+ ///
+ /// Header checksum.
+ ///
+ public uint Checksum;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/DirectoryInfo1Entry.cs b/BurnOutSharp.Models/GCF/DirectoryInfo1Entry.cs
new file mode 100644
index 00000000..a92535ef
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/DirectoryInfo1Entry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class DirectoryInfo1Entry
+ {
+ ///
+ /// Reserved
+ ///
+ public uint Dummy0;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/DirectoryInfo2Entry.cs b/BurnOutSharp.Models/GCF/DirectoryInfo2Entry.cs
new file mode 100644
index 00000000..fa8b2a70
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/DirectoryInfo2Entry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class DirectoryInfo2Entry
+ {
+ ///
+ /// Reserved
+ ///
+ public uint Dummy0;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/DirectoryLocalEntry.cs b/BurnOutSharp.Models/GCF/DirectoryLocalEntry.cs
new file mode 100644
index 00000000..2349a5b9
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/DirectoryLocalEntry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class DirectoryLocalEntry
+ {
+ ///
+ /// Index of the directory item.
+ ///
+ public uint DirectoryIndex;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/DirectoryMapEntry.cs b/BurnOutSharp.Models/GCF/DirectoryMapEntry.cs
new file mode 100644
index 00000000..74e59e9e
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/DirectoryMapEntry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class DirectoryMapEntry
+ {
+ ///
+ /// Index of the first data block. (N/A if == BlockCount.)
+ ///
+ public uint FirstBlockIndex;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/DirectoryMapHeader.cs b/BurnOutSharp.Models/GCF/DirectoryMapHeader.cs
new file mode 100644
index 00000000..a88ea6cf
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/DirectoryMapHeader.cs
@@ -0,0 +1,19 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ /// Added in version 4 or version 5.
+ ///
+ ///
+ public sealed class DirectoryMapHeader
+ {
+ ///
+ /// Always 0x00000001
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Always 0x00000000
+ ///
+ public uint Dummy1;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/File.cs b/BurnOutSharp.Models/GCF/File.cs
new file mode 100644
index 00000000..e736dbf4
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/File.cs
@@ -0,0 +1,116 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ /// Half-Life Game Cache File
+ ///
+ ///
+ public sealed class File
+ {
+ ///
+ /// Header data
+ ///
+ public Header Header { get; set; }
+
+ ///
+ /// Block entry header data
+ ///
+ public BlockEntryHeader BlockEntryHeader { get; set; }
+
+ ///
+ /// Block entries data
+ ///
+ public BlockEntry[] BlockEntries { get; set; }
+
+ ///
+ /// Fragmentation map header data
+ ///
+ public FragmentationMapHeader FragmentationMapHeader { get; set; }
+
+ ///
+ /// Fragmentation map data
+ ///
+ public FragmentationMap[] FragmentationMaps { get; set; }
+
+ ///
+ /// Block entry map header data
+ ///
+ /// Part of version 5 but not version 6.
+ public BlockEntryMapHeader BlockEntryMapHeader { get; set; }
+
+ ///
+ /// Block entry map data
+ ///
+ /// Part of version 5 but not version 6.
+ public BlockEntryMap[] BlockEntryMaps { get; set; }
+
+ ///
+ /// Directory header data
+ ///
+ public DirectoryHeader DirectoryHeader { get; set; }
+
+ ///
+ /// Directory entries data
+ ///
+ public DirectoryEntry[] DirectoryEntries { get; set; }
+
+ ///
+ /// Directory names data
+ ///
+ public string DirectoryNames { get; set; }
+
+ ///
+ /// Directory info 1 entries data
+ ///
+ public DirectoryInfo1Entry[] DirectoryInfo1Entries { get; set; }
+
+ ///
+ /// Directory info 2 entries data
+ ///
+ public DirectoryInfo2Entry[] DirectoryInfo2Entries { get; set; }
+
+ ///
+ /// Directory copy entries data
+ ///
+ public DirectoryCopyEntry[] DirectoryCopyEntries { get; set; }
+
+ ///
+ /// Directory local entries data
+ ///
+ public DirectoryLocalEntry[] DirectoryLocalEntries { get; set; }
+
+ ///
+ /// Directory map header data
+ ///
+ public DirectoryMapHeader DirectoryMapHeader { get; set; }
+
+ ///
+ /// Directory map entries data
+ ///
+ public DirectoryMapEntry[] DirectoryMapEntries { get; set; }
+
+ ///
+ /// Checksum header data
+ ///
+ public ChecksumHeader ChecksumHeader { get; set; }
+
+ ///
+ /// Checksum map header data
+ ///
+ public ChecksumMapHeader ChecksumMapHeader { get; set; }
+
+ ///
+ /// Checksum map entries data
+ ///
+ public ChecksumMapEntry[] ChecksumMapEntries { get; set; }
+
+ ///
+ /// Checksum entries data
+ ///
+ public ChecksumEntry[] ChecksumEntries { get; set; }
+
+ ///
+ /// Data block header data
+ ///
+ public DataBlockHeader DataBlockHeader { get; set; }
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/FragmentationMap.cs b/BurnOutSharp.Models/GCF/FragmentationMap.cs
new file mode 100644
index 00000000..4e2d7b24
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/FragmentationMap.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class FragmentationMap
+ {
+ ///
+ /// The index of the next data block.
+ ///
+ public uint NextDataBlockIndex;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/FragmentationMapHeader.cs b/BurnOutSharp.Models/GCF/FragmentationMapHeader.cs
new file mode 100644
index 00000000..ac1752b4
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/FragmentationMapHeader.cs
@@ -0,0 +1,26 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class FragmentationMapHeader
+ {
+ ///
+ /// Number of data blocks.
+ ///
+ public uint BlockCount;
+
+ ///
+ /// The index of the first unused fragmentation map entry.
+ ///
+ public uint FirstUnusedEntry;
+
+ ///
+ /// The block entry terminator; 0 = 0x0000ffff or 1 = 0xffffffff.
+ ///
+ public uint Terminator;
+
+ ///
+ /// Header checksum.
+ ///
+ public uint Checksum;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/GCF/Header.cs b/BurnOutSharp.Models/GCF/Header.cs
new file mode 100644
index 00000000..65fff9d6
--- /dev/null
+++ b/BurnOutSharp.Models/GCF/Header.cs
@@ -0,0 +1,61 @@
+namespace BurnOutSharp.Models.GCF
+{
+ ///
+ public sealed class Header
+ {
+ ///
+ /// Always 0x00000001
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Always 0x00000001
+ ///
+ public uint MajorVersion;
+
+ ///
+ /// GCF version number.
+ ///
+ public uint MinorVersion;
+
+ ///
+ /// Cache ID
+ ///
+ public uint CacheID;
+
+ ///
+ /// Last version played
+ ///
+ public uint LastVersionPlayed;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy1;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy2;
+
+ ///
+ /// Total size of GCF file in bytes.
+ ///
+ public uint FileSize;
+
+ ///
+ /// Size of each data block in bytes.
+ ///
+ public uint BlockSize;
+
+ ///
+ /// Number of data blocks.
+ ///
+ public uint BlockCount;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy3;
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/NCF/ChecksumEntry.cs b/BurnOutSharp.Models/NCF/ChecksumEntry.cs
new file mode 100644
index 00000000..68e23a70
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/ChecksumEntry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class ChecksumEntry
+ {
+ ///
+ /// Checksum.
+ ///
+ public uint Checksum;
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/ChecksumHeader.cs b/BurnOutSharp.Models/NCF/ChecksumHeader.cs
new file mode 100644
index 00000000..b89507da
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/ChecksumHeader.cs
@@ -0,0 +1,16 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class ChecksumHeader
+ {
+ ///
+ /// Always 0x00000001
+ ///
+ public uint Dummy0 { get; set; }
+
+ ///
+ /// Size of LPNCFCHECKSUMHEADER & LPNCFCHECKSUMMAPHEADER & in bytes.
+ ///
+ public uint ChecksumSize { get; set; }
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/ChecksumMapEntry.cs b/BurnOutSharp.Models/NCF/ChecksumMapEntry.cs
new file mode 100644
index 00000000..ead22d38
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/ChecksumMapEntry.cs
@@ -0,0 +1,16 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class ChecksumMapEntry
+ {
+ ///
+ /// Number of checksums.
+ ///
+ public uint ChecksumCount;
+
+ ///
+ /// Index of first checksum.
+ ///
+ public uint FirstChecksumIndex;
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/ChecksumMapHeader.cs b/BurnOutSharp.Models/NCF/ChecksumMapHeader.cs
new file mode 100644
index 00000000..8b34581c
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/ChecksumMapHeader.cs
@@ -0,0 +1,26 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class ChecksumMapHeader
+ {
+ ///
+ /// Always 0x14893721
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Always 0x00000001
+ ///
+ public uint Dummy1;
+
+ ///
+ /// Number of items.
+ ///
+ public uint ItemCount;
+
+ ///
+ /// Number of checksums.
+ ///
+ public uint ChecksumCount;
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/DirectoryCopyEntry.cs b/BurnOutSharp.Models/NCF/DirectoryCopyEntry.cs
new file mode 100644
index 00000000..1fb3bb28
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/DirectoryCopyEntry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class DirectoryCopyEntry
+ {
+ ///
+ /// Index of the directory item.
+ ///
+ public uint DirectoryIndex;
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/DirectoryEntry.cs b/BurnOutSharp.Models/NCF/DirectoryEntry.cs
new file mode 100644
index 00000000..5bb4232d
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/DirectoryEntry.cs
@@ -0,0 +1,41 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class DirectoryEntry
+ {
+ ///
+ /// Offset to the directory item name from the end of the directory items.
+ ///
+ public uint NameOffset;
+
+ ///
+ /// Size of the item. (If file, file size. If folder, num items.)
+ ///
+ public uint ItemSize;
+
+ ///
+ /// Checksome index. (0xFFFFFFFF == None).
+ ///
+ public uint ChecksumIndex;
+
+ ///
+ /// Flags for the directory item. (0x00000000 == Folder).
+ ///
+ public uint DirectoryFlags;
+
+ ///
+ /// Index of the parent directory item. (0xFFFFFFFF == None).
+ ///
+ public uint ParentIndex;
+
+ ///
+ /// Index of the next directory item. (0x00000000 == None).
+ ///
+ public uint NextIndex;
+
+ ///
+ /// Index of the first directory item. (0x00000000 == None).
+ ///
+ public uint FirstIndex;
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/DirectoryHeader.cs b/BurnOutSharp.Models/NCF/DirectoryHeader.cs
new file mode 100644
index 00000000..fb9a52bb
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/DirectoryHeader.cs
@@ -0,0 +1,76 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class DirectoryHeader
+ {
+ ///
+ /// Always 0x00000004
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Cache ID.
+ ///
+ public uint CacheID;
+
+ ///
+ /// NCF file version.
+ ///
+ public uint LastVersionPlayed;
+
+ ///
+ /// Number of items in the directory.
+ ///
+ public uint ItemCount;
+
+ ///
+ /// Number of files in the directory.
+ ///
+ public uint FileCount;
+
+ ///
+ /// Always 0x00008000. Data per checksum?
+ ///
+ public uint ChecksumDataLength;
+
+ ///
+ /// Size of lpNCFDirectoryEntries & lpNCFDirectoryNames & lpNCFDirectoryInfo1Entries & lpNCFDirectoryInfo2Entries & lpNCFDirectoryCopyEntries & lpNCFDirectoryLocalEntries in bytes.
+ ///
+ public uint DirectorySize;
+
+ ///
+ /// Size of the directory names in bytes.
+ ///
+ public uint NameSize;
+
+ ///
+ /// Number of Info1 entires.
+ ///
+ public uint Info1Count;
+
+ ///
+ /// Number of files to copy.
+ ///
+ public uint CopyCount;
+
+ ///
+ /// Number of files to keep local.
+ ///
+ public uint LocalCount;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy1;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy2;
+
+ ///
+ /// Header checksum.
+ ///
+ public uint Checksum;
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/DirectoryInfo1Entry.cs b/BurnOutSharp.Models/NCF/DirectoryInfo1Entry.cs
new file mode 100644
index 00000000..d7705e15
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/DirectoryInfo1Entry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class DirectoryInfo1Entry
+ {
+ ///
+ /// Reserved
+ ///
+ public uint Dummy0;
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/DirectoryInfo2Entry.cs b/BurnOutSharp.Models/NCF/DirectoryInfo2Entry.cs
new file mode 100644
index 00000000..a07ce9af
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/DirectoryInfo2Entry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class DirectoryInfo2Entry
+ {
+ ///
+ /// Reserved
+ ///
+ public uint Dummy0 { get; set; }
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/DirectoryLocalEntry.cs b/BurnOutSharp.Models/NCF/DirectoryLocalEntry.cs
new file mode 100644
index 00000000..029d49ad
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/DirectoryLocalEntry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class DirectoryLocalEntry
+ {
+ ///
+ /// Index of the directory item.
+ ///
+ public uint DirectoryIndex;
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/File.cs b/BurnOutSharp.Models/NCF/File.cs
new file mode 100644
index 00000000..552e6f18
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/File.cs
@@ -0,0 +1,79 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ /// Half-Life No Cache File
+ ///
+ ///
+ public sealed class File
+ {
+ ///
+ /// Header data
+ ///
+ public Header Header { get; set; }
+
+ ///
+ /// Directory header data
+ ///
+ public DirectoryHeader DirectoryHeader { get; set; }
+
+ ///
+ /// Directory entries data
+ ///
+ public DirectoryEntry[] DirectoryEntries { get; set; }
+
+ ///
+ /// Directory names data
+ ///
+ public string DirectoryNames { get; set; }
+
+ ///
+ /// Directory info 1 entries data
+ ///
+ public DirectoryInfo1Entry[] DirectoryInfo1Entries { get; set; }
+
+ ///
+ /// Directory info 2 entries data
+ ///
+ public DirectoryInfo2Entry[] DirectoryInfo2Entries { get; set; }
+
+ ///
+ /// Directory copy entries data
+ ///
+ public DirectoryCopyEntry[] DirectoryCopyEntries { get; set; }
+
+ ///
+ /// Directory local entries data
+ ///
+ public DirectoryLocalEntry[] DirectoryLocalEntries { get; set; }
+
+ ///
+ /// Unknown header data
+ ///
+ public UnknownHeader UnknownHeader { get; set; }
+
+ ///
+ /// Unknown entries data
+ ///
+ public UnknownEntry[] UnknownEntries { get; set; }
+
+ ///
+ /// Checksum header data
+ ///
+ public ChecksumHeader ChecksumHeader { get; set; }
+
+ ///
+ /// Checksum map header data
+ ///
+ public ChecksumMapHeader ChecksumMapHeader { get; set; }
+
+ ///
+ /// Checksum map entries data
+ ///
+ public ChecksumMapEntry[] ChecksumMapEntries { get; set; }
+
+ ///
+ /// Checksum entries data
+ ///
+ public ChecksumEntry[] ChecksumEntries { get; set; }
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/Header.cs b/BurnOutSharp.Models/NCF/Header.cs
new file mode 100644
index 00000000..2f795c3f
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/Header.cs
@@ -0,0 +1,61 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class Header
+ {
+ ///
+ /// Always 0x00000001
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Always 0x00000002
+ ///
+ public uint MajorVersion;
+
+ ///
+ /// NCF version number.
+ ///
+ public uint MinorVersion;
+
+ ///
+ /// Cache ID
+ ///
+ public uint CacheID;
+
+ ///
+ /// Last version played
+ ///
+ public uint LastVersionPlayed;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy3;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy4;
+
+ ///
+ /// Total size of NCF file in bytes.
+ ///
+ public uint FileSize;
+
+ ///
+ /// Size of each data block in bytes.
+ ///
+ public uint BlockSize;
+
+ ///
+ /// Number of data blocks.
+ ///
+ public uint BlockCount;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy5;
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/UnknownEntry.cs b/BurnOutSharp.Models/NCF/UnknownEntry.cs
new file mode 100644
index 00000000..d27f2548
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/UnknownEntry.cs
@@ -0,0 +1,11 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class UnknownEntry
+ {
+ ///
+ /// Reserved
+ ///
+ public uint Dummy0;
+ }
+}
diff --git a/BurnOutSharp.Models/NCF/UnknownHeader.cs b/BurnOutSharp.Models/NCF/UnknownHeader.cs
new file mode 100644
index 00000000..2f9784f4
--- /dev/null
+++ b/BurnOutSharp.Models/NCF/UnknownHeader.cs
@@ -0,0 +1,16 @@
+namespace BurnOutSharp.Models.NCF
+{
+ ///
+ public sealed class UnknownHeader
+ {
+ ///
+ /// Always 0x00000001
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Always 0x00000000
+ ///
+ public uint Dummy1;
+ }
+}
diff --git a/BurnOutSharp.Models/PAK/DirectoryItem.cs b/BurnOutSharp.Models/PAK/DirectoryItem.cs
new file mode 100644
index 00000000..1f8172b7
--- /dev/null
+++ b/BurnOutSharp.Models/PAK/DirectoryItem.cs
@@ -0,0 +1,21 @@
+namespace BurnOutSharp.Models.PAK
+{
+ ///
+ public sealed class DirectoryItem
+ {
+ ///
+ /// Item Name
+ ///
+ public string ItemName;
+
+ ///
+ /// Item Offset
+ ///
+ public uint ItemOffset;
+
+ ///
+ /// Item Length
+ ///
+ public uint ItemLength;
+ }
+}
diff --git a/BurnOutSharp.Models/PAK/File.cs b/BurnOutSharp.Models/PAK/File.cs
new file mode 100644
index 00000000..24d48ea3
--- /dev/null
+++ b/BurnOutSharp.Models/PAK/File.cs
@@ -0,0 +1,19 @@
+namespace BurnOutSharp.Models.PAK
+{
+ ///
+ /// Half-Life Package File
+ ///
+ ///
+ public sealed class File
+ {
+ ///
+ /// Deserialized directory header data
+ ///
+ public Header Header { get; set; }
+
+ ///
+ /// Deserialized directory items data
+ ///
+ public DirectoryItem[] DirectoryItems { get; set; }
+ }
+}
diff --git a/BurnOutSharp.Models/PAK/Header.cs b/BurnOutSharp.Models/PAK/Header.cs
new file mode 100644
index 00000000..923071bf
--- /dev/null
+++ b/BurnOutSharp.Models/PAK/Header.cs
@@ -0,0 +1,21 @@
+namespace BurnOutSharp.Models.PAK
+{
+ ///
+ public sealed class Header
+ {
+ ///
+ /// Signature
+ ///
+ public string Signature;
+
+ ///
+ /// Directory Offset
+ ///
+ public uint DirectoryOffset;
+
+ ///
+ /// Directory Length
+ ///
+ public uint DirectoryLength;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/Directory.cs b/BurnOutSharp.Models/SGA/Directory.cs
new file mode 100644
index 00000000..f0e598b4
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Directory.cs
@@ -0,0 +1,5 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public abstract class Directory { }
+}
diff --git a/BurnOutSharp.Models/SGA/Directory4.cs b/BurnOutSharp.Models/SGA/Directory4.cs
new file mode 100644
index 00000000..43e45e7b
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Directory4.cs
@@ -0,0 +1,5 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class Directory4 : SpecializedDirectory { }
+}
diff --git a/BurnOutSharp.Models/SGA/Directory5.cs b/BurnOutSharp.Models/SGA/Directory5.cs
new file mode 100644
index 00000000..930c58b4
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Directory5.cs
@@ -0,0 +1,5 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class Directory5 : SpecializedDirectory { }
+}
diff --git a/BurnOutSharp.Models/SGA/Directory6.cs b/BurnOutSharp.Models/SGA/Directory6.cs
new file mode 100644
index 00000000..3b1b7697
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Directory6.cs
@@ -0,0 +1,5 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class Directory6 : SpecializedDirectory { }
+}
diff --git a/BurnOutSharp.Models/SGA/Directory7.cs b/BurnOutSharp.Models/SGA/Directory7.cs
new file mode 100644
index 00000000..d7907c54
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Directory7.cs
@@ -0,0 +1,5 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class Directory7 : SpecializedDirectory { }
+}
diff --git a/BurnOutSharp.Models/SGA/DirectoryHeader.cs b/BurnOutSharp.Models/SGA/DirectoryHeader.cs
new file mode 100644
index 00000000..01106392
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/DirectoryHeader.cs
@@ -0,0 +1,22 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class DirectoryHeader
+ {
+ public uint SectionOffset;
+
+ public T SectionCount;
+
+ public uint FolderOffset;
+
+ public T FolderCount;
+
+ public uint FileOffset;
+
+ public T FileCount;
+
+ public uint StringTableOffset;
+
+ public T StringTableCount;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/DirectoryHeader4.cs b/BurnOutSharp.Models/SGA/DirectoryHeader4.cs
new file mode 100644
index 00000000..65a60b3a
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/DirectoryHeader4.cs
@@ -0,0 +1,5 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public sealed class DirectoryHeader4 : DirectoryHeader { }
+}
diff --git a/BurnOutSharp.Models/SGA/DirectoryHeader5.cs b/BurnOutSharp.Models/SGA/DirectoryHeader5.cs
new file mode 100644
index 00000000..a1e01d86
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/DirectoryHeader5.cs
@@ -0,0 +1,5 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class DirectoryHeader5 : DirectoryHeader { }
+}
diff --git a/BurnOutSharp.Models/SGA/DirectoryHeader7.cs b/BurnOutSharp.Models/SGA/DirectoryHeader7.cs
new file mode 100644
index 00000000..93e75802
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/DirectoryHeader7.cs
@@ -0,0 +1,10 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public sealed class DirectoryHeader7 : DirectoryHeader5
+ {
+ public uint HashTableOffset;
+
+ public uint BlockSize;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/File.cs b/BurnOutSharp.Models/SGA/File.cs
new file mode 100644
index 00000000..51a56004
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/File.cs
@@ -0,0 +1,16 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class File
+ {
+ ///
+ ///Header data
+ ///
+ public Header Header { get; set; }
+
+ ///
+ /// Directory data
+ ///
+ public Directory Directory { get; set; }
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/File4.cs b/BurnOutSharp.Models/SGA/File4.cs
new file mode 100644
index 00000000..e4a3d193
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/File4.cs
@@ -0,0 +1,20 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class File4
+ {
+ public uint NameOffset;
+
+ public uint Offset;
+
+ public uint SizeOnDisk;
+
+ public uint Size;
+
+ public uint TimeModified;
+
+ public byte Dummy0;
+
+ public byte Type;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/File6.cs b/BurnOutSharp.Models/SGA/File6.cs
new file mode 100644
index 00000000..0e3a91d6
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/File6.cs
@@ -0,0 +1,8 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class File6 : File4
+ {
+ public uint CRC32;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/File7.cs b/BurnOutSharp.Models/SGA/File7.cs
new file mode 100644
index 00000000..58402fb6
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/File7.cs
@@ -0,0 +1,8 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public sealed class File7 : File6
+ {
+ public uint HashOffset;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/FileHeader.cs b/BurnOutSharp.Models/SGA/FileHeader.cs
new file mode 100644
index 00000000..9bceac71
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/FileHeader.cs
@@ -0,0 +1,10 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public sealed class FileHeader
+ {
+ public string Name;
+
+ public uint CRC32;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/Folder.cs b/BurnOutSharp.Models/SGA/Folder.cs
new file mode 100644
index 00000000..3f475e87
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Folder.cs
@@ -0,0 +1,16 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class Folder
+ {
+ public uint NameOffset;
+
+ public T FolderStartIndex;
+
+ public T FolderEndIndex;
+
+ public T FileStartIndex;
+
+ public T FileEndIndex;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/Folder4.cs b/BurnOutSharp.Models/SGA/Folder4.cs
new file mode 100644
index 00000000..a83191fb
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Folder4.cs
@@ -0,0 +1,5 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public sealed class Folder4 : Folder { }
+}
diff --git a/BurnOutSharp.Models/SGA/Folder5.cs b/BurnOutSharp.Models/SGA/Folder5.cs
new file mode 100644
index 00000000..dd8ccb25
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Folder5.cs
@@ -0,0 +1,5 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public sealed class Folder5 : Folder { }
+}
diff --git a/BurnOutSharp.Models/SGA/Header.cs b/BurnOutSharp.Models/SGA/Header.cs
new file mode 100644
index 00000000..9ed0541b
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Header.cs
@@ -0,0 +1,12 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class Header
+ {
+ public string Signature;
+
+ public ushort MajorVersion;
+
+ public ushort MinorVersion;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/Header4.cs b/BurnOutSharp.Models/SGA/Header4.cs
new file mode 100644
index 00000000..b524f2d6
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Header4.cs
@@ -0,0 +1,18 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public sealed class Header4 : Header
+ {
+ public byte[] FileMD5;
+
+ public string Name;
+
+ public byte[] HeaderMD5;
+
+ public uint HeaderLength;
+
+ public uint FileDataOffset;
+
+ public uint Dummy0;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/Header6.cs b/BurnOutSharp.Models/SGA/Header6.cs
new file mode 100644
index 00000000..3d7dbe50
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Header6.cs
@@ -0,0 +1,14 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public sealed class Header6 : Header
+ {
+ public string Name;
+
+ public uint HeaderLength;
+
+ public uint FileDataOffset;
+
+ public uint Dummy0;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/Section.cs b/BurnOutSharp.Models/SGA/Section.cs
new file mode 100644
index 00000000..78e2f1e3
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Section.cs
@@ -0,0 +1,20 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public class Section
+ {
+ public string Alias;
+
+ public string Name;
+
+ public T FolderStartIndex;
+
+ public T FolderEndIndex;
+
+ public T FileStartIndex;
+
+ public T FileEndIndex;
+
+ public T FolderRootIndex;
+ }
+}
diff --git a/BurnOutSharp.Models/SGA/Section4.cs b/BurnOutSharp.Models/SGA/Section4.cs
new file mode 100644
index 00000000..088fcd0e
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Section4.cs
@@ -0,0 +1,5 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public sealed class Section4 : Section { }
+}
diff --git a/BurnOutSharp.Models/SGA/Section5.cs b/BurnOutSharp.Models/SGA/Section5.cs
new file mode 100644
index 00000000..b37d7552
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/Section5.cs
@@ -0,0 +1,5 @@
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ public sealed class Section5 : Section { }
+}
diff --git a/BurnOutSharp.Models/SGA/SpecializedDirectory.cs b/BurnOutSharp.Models/SGA/SpecializedDirectory.cs
new file mode 100644
index 00000000..68159d4c
--- /dev/null
+++ b/BurnOutSharp.Models/SGA/SpecializedDirectory.cs
@@ -0,0 +1,46 @@
+using System.Collections.Generic;
+
+namespace BurnOutSharp.Models.SGA
+{
+ ///
+ /// Specialization File7 and up where the CRC moved to the header and the CRC is of the compressed data and there are stronger hashes.
+ ///
+ ///
+ public class SpecializedDirectory : Directory
+ where THeader : Header
+ where TDirectoryHeader : DirectoryHeader
+ where TSection : Section
+ where TFolder : Folder
+ where TFile : File4
+ {
+ ///
+ /// Source SGA file
+ ///
+ public File File { get; set; }
+
+ ///
+ /// Directory header data
+ ///
+ public TDirectoryHeader DirectoryHeader { get; set; }
+
+ ///
+ /// Sections data
+ ///
+ public TSection[] Sections { get; set; }
+
+ ///
+ /// Folders data
+ ///
+ public TFolder[] Folders { get; set; }
+
+ ///
+ /// Files data
+ ///
+ public TFile[] Files { get; set; }
+
+ ///
+ /// String table data
+ ///
+ public Dictionary StringTable { get; set; }
+ }
+}
diff --git a/BurnOutSharp.Models/VBSP/File.cs b/BurnOutSharp.Models/VBSP/File.cs
new file mode 100644
index 00000000..b0fd8bbd
--- /dev/null
+++ b/BurnOutSharp.Models/VBSP/File.cs
@@ -0,0 +1,14 @@
+namespace BurnOutSharp.Models.VBSP
+{
+ ///
+ /// Half-Life 2 Level
+ ///
+ ///
+ public sealed class File
+ {
+ ///
+ /// Directory header data
+ ///
+ public Header Header { get; set; }
+ }
+}
\ No newline at end of file
diff --git a/BurnOutSharp.Models/VBSP/Header.cs b/BurnOutSharp.Models/VBSP/Header.cs
new file mode 100644
index 00000000..f086142b
--- /dev/null
+++ b/BurnOutSharp.Models/VBSP/Header.cs
@@ -0,0 +1,31 @@
+namespace BurnOutSharp.Models.VBSP
+{
+ ///
+ public sealed class Header
+ {
+ ///
+ /// BSP file signature.
+ ///
+ public string Signature;
+
+ ///
+ /// BSP file version.
+ ///
+ ///
+ /// 19-20: Source
+ /// 21: Source - The lump version property was moved to the start of the struct.
+ /// 0x00040014: Dark Messiah - Looks like the 32 bit version has been split into two 16 bit fields.
+ ///
+ public int Version;
+
+ ///
+ /// Lumps.
+ ///
+ public Lump[] Lumps;
+
+ ///
+ /// The map's revision (iteration, version) number.
+ ///
+ public int MapRevision;
+ }
+}
diff --git a/BurnOutSharp.Models/VBSP/Lump.cs b/BurnOutSharp.Models/VBSP/Lump.cs
new file mode 100644
index 00000000..b3576a9c
--- /dev/null
+++ b/BurnOutSharp.Models/VBSP/Lump.cs
@@ -0,0 +1,20 @@
+namespace BurnOutSharp.Models.VBSP
+{
+ ///
+ public sealed class Lump
+ {
+ public uint Offset;
+
+ public uint Length;
+
+ ///
+ /// Default to zero.
+ ///
+ public uint Version;
+
+ ///
+ /// Default to (char)0, (char)0, (char)0, (char)0.
+ ///
+ public char[] FourCC;
+ }
+}
diff --git a/BurnOutSharp.Models/VBSP/LumpHeader.cs b/BurnOutSharp.Models/VBSP/LumpHeader.cs
new file mode 100644
index 00000000..498230d0
--- /dev/null
+++ b/BurnOutSharp.Models/VBSP/LumpHeader.cs
@@ -0,0 +1,16 @@
+namespace BurnOutSharp.Models.VBSP
+{
+ ///
+ public sealed class LumpHeader
+ {
+ public int LumpOffset;
+
+ public int LumpID;
+
+ public int LumpVersion;
+
+ public int LumpLength;
+
+ public int MapRevision;
+ }
+}
diff --git a/BurnOutSharp.Models/VPK/ArchiveHash.cs b/BurnOutSharp.Models/VPK/ArchiveHash.cs
new file mode 100644
index 00000000..03084b1b
--- /dev/null
+++ b/BurnOutSharp.Models/VPK/ArchiveHash.cs
@@ -0,0 +1,17 @@
+namespace BurnOutSharp.Models.VPK
+{
+ ///
+ public sealed class ArchiveHash
+ {
+ public uint ArchiveIndex;
+
+ public uint ArchiveOffset;
+
+ public uint Length;
+
+ ///
+ /// MD5
+ ///
+ public byte[] Hash;
+ }
+}
diff --git a/BurnOutSharp.Models/VPK/DirectoryEntry.cs b/BurnOutSharp.Models/VPK/DirectoryEntry.cs
new file mode 100644
index 00000000..320bde86
--- /dev/null
+++ b/BurnOutSharp.Models/VPK/DirectoryEntry.cs
@@ -0,0 +1,21 @@
+namespace BurnOutSharp.Models.VPK
+{
+ ///
+ public sealed class DirectoryEntry
+ {
+ public uint CRC;
+
+ public ushort PreloadBytes;
+
+ public ushort ArchiveIndex;
+
+ public uint EntryOffset;
+
+ public uint EntryLength;
+
+ ///
+ /// Always 0xffff.
+ ///
+ public ushort Dummy0;
+ }
+}
diff --git a/BurnOutSharp.Models/VPK/DirectoryItem.cs b/BurnOutSharp.Models/VPK/DirectoryItem.cs
new file mode 100644
index 00000000..de51549d
--- /dev/null
+++ b/BurnOutSharp.Models/VPK/DirectoryItem.cs
@@ -0,0 +1,16 @@
+namespace BurnOutSharp.Models.VPK
+{
+ ///
+ public sealed class DirectoryItem
+ {
+ public string Extension;
+
+ public string Path;
+
+ public string Name;
+
+ public DirectoryEntry DirectoryEntry;
+
+ public byte[] PreloadData;
+ }
+}
diff --git a/BurnOutSharp.Models/VPK/ExtendedHeader.cs b/BurnOutSharp.Models/VPK/ExtendedHeader.cs
new file mode 100644
index 00000000..043ef4e7
--- /dev/null
+++ b/BurnOutSharp.Models/VPK/ExtendedHeader.cs
@@ -0,0 +1,29 @@
+namespace BurnOutSharp.Models.VPK
+{
+ ///
+ /// Added in version 2.
+ ///
+ ///
+ public sealed class ExtendedHeader
+ {
+ ///
+ /// Reserved
+ ///
+ public uint Dummy0;
+
+ ///
+ /// Archive hash length
+ ///
+ public uint ArchiveHashLength;
+
+ ///
+ /// Looks like some more MD5 hashes.
+ ///
+ public uint ExtraLength;
+
+ ///
+ /// Reserved
+ ///
+ public uint Dummy1;
+ }
+}
diff --git a/BurnOutSharp.Models/VPK/File.cs b/BurnOutSharp.Models/VPK/File.cs
new file mode 100644
index 00000000..e67acf67
--- /dev/null
+++ b/BurnOutSharp.Models/VPK/File.cs
@@ -0,0 +1,29 @@
+namespace BurnOutSharp.Models.VPK
+{
+ ///
+ /// Valve Package File
+ ///
+ ///
+ public sealed class File
+ {
+ ///
+ /// Header data
+ ///
+ public Header Header { get; set; }
+
+ ///
+ /// Extended header data
+ ///
+ public ExtendedHeader ExtendedHeader { get; set; }
+
+ ///
+ /// Archive hashes data
+ ///
+ public ArchiveHash[] ArchiveHashes { get; set; }
+
+ ///
+ /// Directory items data
+ ///
+ public DirectoryItem[] DirectoryItems { get; set; }
+ }
+}
diff --git a/BurnOutSharp.Models/VPK/Header.cs b/BurnOutSharp.Models/VPK/Header.cs
new file mode 100644
index 00000000..20a1e021
--- /dev/null
+++ b/BurnOutSharp.Models/VPK/Header.cs
@@ -0,0 +1,15 @@
+namespace BurnOutSharp.Models.VPK
+{
+ ///
+ public sealed class Header
+ {
+ ///
+ /// Always 0x55aa1234.
+ ///
+ public uint Signature;
+
+ public uint Version;
+
+ public uint DirectoryLength;
+ }
+}
diff --git a/BurnOutSharp.Models/WAD/File.cs b/BurnOutSharp.Models/WAD/File.cs
new file mode 100644
index 00000000..029bac7d
--- /dev/null
+++ b/BurnOutSharp.Models/WAD/File.cs
@@ -0,0 +1,24 @@
+namespace BurnOutSharp.Models.WAD
+{
+ ///
+ /// Half-Life Texture Package File
+ ///
+ ///
+ public sealed class File
+ {
+ ///
+ /// Deserialized header data
+ ///
+ public Header Header { get; set; }
+
+ ///
+ /// Deserialized lumps data
+ ///
+ public Lump[] Lumps { get; set; }
+
+ ///
+ /// Deserialized lump infos data
+ ///
+ public LumpInfo[] LumpInfos { get; set; }
+ }
+}
diff --git a/BurnOutSharp.Models/WAD/Header.cs b/BurnOutSharp.Models/WAD/Header.cs
new file mode 100644
index 00000000..4677b0c1
--- /dev/null
+++ b/BurnOutSharp.Models/WAD/Header.cs
@@ -0,0 +1,12 @@
+namespace BurnOutSharp.Models.WAD
+{
+ ///
+ public sealed class Header
+ {
+ public string Signature;
+
+ public uint LumpCount;
+
+ public uint LumpOffset;
+ }
+}
diff --git a/BurnOutSharp.Models/WAD/Lump.cs b/BurnOutSharp.Models/WAD/Lump.cs
new file mode 100644
index 00000000..7e30fcfb
--- /dev/null
+++ b/BurnOutSharp.Models/WAD/Lump.cs
@@ -0,0 +1,22 @@
+namespace BurnOutSharp.Models.WAD
+{
+ ///
+ public sealed class Lump
+ {
+ public uint Offset;
+
+ public uint DiskLength;
+
+ public uint Length;
+
+ public byte Type;
+
+ public byte Compression;
+
+ public byte Padding0;
+
+ public byte Padding1;
+
+ public string Name;
+ }
+}
diff --git a/BurnOutSharp.Models/WAD/LumpInfo.cs b/BurnOutSharp.Models/WAD/LumpInfo.cs
new file mode 100644
index 00000000..4daec113
--- /dev/null
+++ b/BurnOutSharp.Models/WAD/LumpInfo.cs
@@ -0,0 +1,22 @@
+namespace BurnOutSharp.Models.WAD
+{
+ ///
+ public sealed class LumpInfo
+ {
+ public string Name;
+
+ public uint Width;
+
+ public uint Height;
+
+ public uint PixelOffset;
+
+ // 12 bytes of unknown data
+
+ public byte[] PixelData;
+
+ public uint PaletteSize;
+
+ public byte[] PaletteData;
+ }
+}
diff --git a/BurnOutSharp.Models/XZP/DirectoryEntry.cs b/BurnOutSharp.Models/XZP/DirectoryEntry.cs
new file mode 100644
index 00000000..03d3c7ac
--- /dev/null
+++ b/BurnOutSharp.Models/XZP/DirectoryEntry.cs
@@ -0,0 +1,12 @@
+namespace BurnOutSharp.Models.XZP
+{
+ ///
+ public sealed class DirectoryEntry
+ {
+ public uint FileNameCRC;
+
+ public uint EntryLength;
+
+ public uint EntryOffset;
+ }
+}
diff --git a/BurnOutSharp.Models/XZP/DirectoryItem.cs b/BurnOutSharp.Models/XZP/DirectoryItem.cs
new file mode 100644
index 00000000..6ccdcdc6
--- /dev/null
+++ b/BurnOutSharp.Models/XZP/DirectoryItem.cs
@@ -0,0 +1,12 @@
+namespace BurnOutSharp.Models.XZP
+{
+ ///
+ public sealed class DirectoryItem
+ {
+ public uint FileNameCRC;
+
+ public uint NameOffset;
+
+ public uint TimeCreated;
+ }
+}
diff --git a/BurnOutSharp.Models/XZP/DirectoryMapping.cs b/BurnOutSharp.Models/XZP/DirectoryMapping.cs
new file mode 100644
index 00000000..1c0ba8d2
--- /dev/null
+++ b/BurnOutSharp.Models/XZP/DirectoryMapping.cs
@@ -0,0 +1,8 @@
+namespace BurnOutSharp.Models.XZP
+{
+ ///
+ public sealed class DirectoryMapping
+ {
+ public ushort PreloadDirectoryEntryIndex;
+ }
+}
diff --git a/BurnOutSharp.Models/XZP/File.cs b/BurnOutSharp.Models/XZP/File.cs
new file mode 100644
index 00000000..e262d3d5
--- /dev/null
+++ b/BurnOutSharp.Models/XZP/File.cs
@@ -0,0 +1,39 @@
+namespace BurnOutSharp.Models.XZP
+{
+ ///
+ /// XBox Package File
+ ///
+ ///
+ public class File
+ {
+ ///
+ /// Header data
+ ///
+ public Header Header { get; set; }
+
+ ///
+ /// Directory entries data
+ ///
+ public DirectoryEntry[] DirectoryEntries { get; set; }
+
+ ///
+ /// Preload directory entries data
+ ///
+ public DirectoryEntry[] PreloadDirectoryEntries { get; set; }
+
+ ///
+ /// Preload directory mappings data
+ ///
+ public DirectoryMapping[] PreloadDirectoryMappings { get; set; }
+
+ ///
+ /// Directory items data
+ ///
+ public DirectoryItem[] DirectoryItems { get; set; }
+
+ ///
+ /// Footer data
+ ///
+ public Footer Footer { get; set; }
+ }
+}
diff --git a/BurnOutSharp.Models/XZP/Footer.cs b/BurnOutSharp.Models/XZP/Footer.cs
new file mode 100644
index 00000000..a2c04673
--- /dev/null
+++ b/BurnOutSharp.Models/XZP/Footer.cs
@@ -0,0 +1,10 @@
+namespace BurnOutSharp.Models.XZP
+{
+ ///
+ public sealed class Footer
+ {
+ public uint FileLength;
+
+ public string Signature;
+ }
+}
diff --git a/BurnOutSharp.Models/XZP/Header.cs b/BurnOutSharp.Models/XZP/Header.cs
new file mode 100644
index 00000000..61970da5
--- /dev/null
+++ b/BurnOutSharp.Models/XZP/Header.cs
@@ -0,0 +1,24 @@
+namespace BurnOutSharp.Models.XZP
+{
+ ///
+ public sealed class Header
+ {
+ public string Signature;
+
+ public uint Version;
+
+ public uint PreloadDirectoryEntryCount;
+
+ public uint DirectoryEntryCount;
+
+ public uint PreloadBytes;
+
+ public uint HeaderLength;
+
+ public uint DirectoryItemCount;
+
+ public uint DirectoryItemOffset;
+
+ public uint DirectoryItemLength;
+ }
+}
diff --git a/BurnOutSharp/Enums.cs b/BurnOutSharp/Enums.cs
index d799bc09..313542f9 100644
--- a/BurnOutSharp/Enums.cs
+++ b/BurnOutSharp/Enums.cs
@@ -15,6 +15,11 @@
///
BFPK,
+ ///
+ /// Half-Life Level
+ ///
+ BSP,
+
///
/// bzip2 archive
///
@@ -25,6 +30,11 @@
///
Executable,
+ ///
+ /// Half-Life Game Cache File
+ ///
+ GCF,
+
///
/// gzip archive
///
@@ -60,6 +70,16 @@
///
MSI,
+ ///
+ /// Half-Life No Cache File
+ ///
+ NCF,
+
+ ///
+ /// Half-Life Package File
+ ///
+ PAK,
+
///
/// PKWARE ZIP archive and derivatives
///
@@ -85,6 +105,11 @@
///
SFFS,
+ ///
+ /// SGA
+ ///
+ SGA,
+
///
/// Tape archive
///
@@ -96,13 +121,28 @@
Textfile,
///
- /// Various Valve archive formats
+ /// Half-Life 2 Level
///
- Valve,
+ VBSP,
+
+ ///
+ /// Valve Package File
+ ///
+ VPK,
+
+ ///
+ /// Half-Life Texture Package File
+ ///
+ WAD,
///
/// xz archive
///
XZ,
+
+ ///
+ /// xz archive
+ ///
+ XZP,
}
}
diff --git a/BurnOutSharp/Tools/Utilities.cs b/BurnOutSharp/Tools/Utilities.cs
index f1a00f9f..16150151 100644
--- a/BurnOutSharp/Tools/Utilities.cs
+++ b/BurnOutSharp/Tools/Utilities.cs
@@ -31,7 +31,7 @@ namespace BurnOutSharp.Tools
#region BSP
if (magic.StartsWith(new byte?[] { 0x1e, 0x00, 0x00, 0x00 }))
- return SupportedFileType.Valve;
+ return SupportedFileType.BSP;
#endregion
@@ -81,7 +81,7 @@ namespace BurnOutSharp.Tools
#region GCF
if (magic.StartsWith(new byte?[] { 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 }))
- return SupportedFileType.Valve;
+ return SupportedFileType.GCF;
#endregion
@@ -139,14 +139,14 @@ namespace BurnOutSharp.Tools
#region NCF
if (magic.StartsWith(new byte?[] { 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00 }))
- return SupportedFileType.Valve;
+ return SupportedFileType.NCF;
#endregion
#region PAK
if (magic.StartsWith(new byte?[] { 0x50, 0x41, 0x43, 0x4B }))
- return SupportedFileType.Valve;
+ return SupportedFileType.PAK;
#endregion
@@ -208,7 +208,7 @@ namespace BurnOutSharp.Tools
#region SGA
if (magic.StartsWith(new byte?[] { 0x5F, 0x41, 0x52, 0x43, 0x48, 0x49, 0x56, 0x45 }))
- return SupportedFileType.Valve;
+ return SupportedFileType.SGA;
#endregion
@@ -255,21 +255,21 @@ namespace BurnOutSharp.Tools
#region VBSP
if (magic.StartsWith(new byte?[] { 0x56, 0x42, 0x53, 0x50 }))
- return SupportedFileType.Valve;
+ return SupportedFileType.VBSP;
#endregion
#region VPK
if (magic.StartsWith(new byte?[] { 0x34, 0x12, 0x55, 0xaa }))
- return SupportedFileType.Valve;
+ return SupportedFileType.VPK;
#endregion
#region WAD
if (magic.StartsWith(new byte?[] { 0x57, 0x41, 0x44, 0x33 }))
- return SupportedFileType.Valve;
+ return SupportedFileType.WAD;
#endregion
@@ -283,7 +283,7 @@ namespace BurnOutSharp.Tools
#region XZP
if (magic.StartsWith(new byte?[] { 0x70, 0x69, 0x5A, 0x78 }))
- return SupportedFileType.Valve;
+ return SupportedFileType.XZP;
#endregion
@@ -313,7 +313,7 @@ namespace BurnOutSharp.Tools
#region BSP
if (extension.Equals("bsp", StringComparison.OrdinalIgnoreCase))
- return SupportedFileType.Valve;
+ return SupportedFileType.BSP;
#endregion
@@ -339,7 +339,7 @@ namespace BurnOutSharp.Tools
#region GCF
if (extension.Equals("gcf", StringComparison.OrdinalIgnoreCase))
- return SupportedFileType.Valve;
+ return SupportedFileType.GCF;
#endregion
@@ -395,14 +395,14 @@ namespace BurnOutSharp.Tools
#region NCF
if (extension.Equals("ncf", StringComparison.OrdinalIgnoreCase))
- return SupportedFileType.Valve;
+ return SupportedFileType.NCF;
#endregion
#region PAK
if (extension.Equals("pak", StringComparison.OrdinalIgnoreCase))
- return SupportedFileType.Valve;
+ return SupportedFileType.PAK;
#endregion
@@ -519,7 +519,7 @@ namespace BurnOutSharp.Tools
#region SGA
if (extension.Equals("sga", StringComparison.OrdinalIgnoreCase))
- return SupportedFileType.Valve;
+ return SupportedFileType.SGA;
#endregion
@@ -575,21 +575,21 @@ namespace BurnOutSharp.Tools
#region VBSP
if (extension.Equals("bsp", StringComparison.OrdinalIgnoreCase))
- return SupportedFileType.Valve;
+ return SupportedFileType.VBSP;
#endregion
#region VPK
if (extension.Equals("vpk", StringComparison.OrdinalIgnoreCase))
- return SupportedFileType.Valve;
+ return SupportedFileType.VPK;
#endregion
#region WAD
if (extension.Equals("wad", StringComparison.OrdinalIgnoreCase))
- return SupportedFileType.Valve;
+ return SupportedFileType.WAD;
#endregion
@@ -603,7 +603,7 @@ namespace BurnOutSharp.Tools
#region XZP
if (extension.Equals("xzp", StringComparison.OrdinalIgnoreCase))
- return SupportedFileType.Valve;
+ return SupportedFileType.XZP;
#endregion
@@ -619,8 +619,10 @@ namespace BurnOutSharp.Tools
switch (fileType)
{
case SupportedFileType.BFPK: return new FileType.BFPK();
+ case SupportedFileType.BSP: return new FileType.Valve();
case SupportedFileType.BZip2: return new FileType.BZip2();
case SupportedFileType.Executable: return new FileType.Executable();
+ case SupportedFileType.GCF: return new FileType.Valve();
case SupportedFileType.GZIP: return new FileType.GZIP();
//case FileTypes.IniFile: return new FileType.IniFile();
case SupportedFileType.InstallShieldArchiveV3: return new FileType.InstallShieldArchiveV3();
@@ -628,15 +630,21 @@ namespace BurnOutSharp.Tools
case SupportedFileType.MicrosoftCAB: return new FileType.MicrosoftCAB();
case SupportedFileType.MPQ: return new FileType.MPQ();
case SupportedFileType.MSI: return new FileType.MSI();
+ case SupportedFileType.NCF: return new FileType.Valve();
+ case SupportedFileType.PAK: return new FileType.PKZIP();
case SupportedFileType.PKZIP: return new FileType.PKZIP();
case SupportedFileType.PLJ: return new FileType.PLJ();
case SupportedFileType.RAR: return new FileType.RAR();
case SupportedFileType.SevenZip: return new FileType.SevenZip();
case SupportedFileType.SFFS: return new FileType.SFFS();
+ case SupportedFileType.SGA: return new FileType.Valve();
case SupportedFileType.TapeArchive: return new FileType.TapeArchive();
case SupportedFileType.Textfile: return new FileType.Textfile();
- case SupportedFileType.Valve: return new FileType.Valve();
+ case SupportedFileType.VBSP: return new FileType.Valve();
+ case SupportedFileType.VPK: return new FileType.Valve();
+ case SupportedFileType.WAD: return new FileType.Valve();
case SupportedFileType.XZ: return new FileType.XZ();
+ case SupportedFileType.XZP: return new FileType.Valve();
default: return null;
}
}
diff --git a/HLLibSharp b/HLLibSharp
index 6e8ff528..90ba55ea 160000
--- a/HLLibSharp
+++ b/HLLibSharp
@@ -1 +1 @@
-Subproject commit 6e8ff528514b887c03ced893466985423a0b71bc
+Subproject commit 90ba55ea59c00516a2217a22e5fa871144da44a3