diff --git a/SabreTools.FileTypes/CHD/CHDFile.cs b/SabreTools.FileTypes/CHD/CHDFile.cs
index fbc5e857..f0ff75f1 100644
--- a/SabreTools.FileTypes/CHD/CHDFile.cs
+++ b/SabreTools.FileTypes/CHD/CHDFile.cs
@@ -13,22 +13,17 @@ namespace SabreTools.FileTypes.CHD
{
#region Private instance variables
- // Common header fields
- protected char[] tag = new char[8]; // 'MComprHD'
- protected uint length; // length of header (including tag and length fields)
- protected uint version; // drive format version
+ protected const string Signature = "MComprHD";
+
+ ///
+ /// Model representing the correct CHD header
+ ///
+ protected Models.CHD.Header? _header;
#endregion
#region Constructors
- ///
- /// Empty constructor
- ///
- public CHDFile()
- {
- }
-
///
/// Create a new CHDFile from an input file
///
@@ -47,13 +42,9 @@ namespace SabreTools.FileTypes.CHD
{
try
{
- // Read the standard CHD headers
- (char[] tag, uint length, uint version) = GetHeaderValues(chdstream);
- chdstream.SeekIfPossible(); // Seek back to start
-
// Validate that this is actually a valid CHD
- uint validatedVersion = ValidateHeader(tag, length, version);
- if (validatedVersion == 0)
+ uint version = ValidateHeader(chdstream);
+ if (version == 0)
return null;
// Read and return the current CHD
@@ -78,40 +69,26 @@ namespace SabreTools.FileTypes.CHD
#region Header Parsing
- ///
- /// Get the generic header values of a CHD, if possible
- ///
- ///
- ///
- private static (char[] tag, uint length, uint version) GetHeaderValues(Stream stream)
- {
- char[] parsedTag = new char[8];
- uint parsedLength = 0;
- uint parsedVersion = 0;
-
-#if NET20 || NET35 || NET40
- using (BinaryReader br = new(stream, Encoding.Default))
-#else
- using (BinaryReader br = new(stream, Encoding.Default, true))
-#endif
- {
- parsedTag = br.ReadChars(8);
- parsedLength = br.ReadUInt32BigEndian();
- parsedVersion = br.ReadUInt32BigEndian();
- }
-
- return (parsedTag, parsedLength, parsedVersion);
- }
-
///
/// Validate the header values
///
/// Matching version, 0 if none
- private static uint ValidateHeader(char[] tag, uint length, uint version)
+ private static uint ValidateHeader(Stream stream)
{
- if (!string.Equals(new string(tag), "MComprHD", StringComparison.Ordinal))
+ // Read the header values
+ byte[] tagBytes = stream.ReadBytes(8);
+ string tag = Encoding.ASCII.GetString(tagBytes);
+ uint length = stream.ReadUInt32BigEndian();
+ uint version = stream.ReadUInt32BigEndian();
+
+ // Seek back to start
+ stream.SeekIfPossible();
+
+ // Check the signature
+ if (!string.Equals(tag, Signature, StringComparison.Ordinal))
return 0;
+ // Match the version to header length
return version switch
{
1 => length == CHDFileV1.HeaderSize ? version : 0,
diff --git a/SabreTools.FileTypes/CHD/CHDFileV1.cs b/SabreTools.FileTypes/CHD/CHDFileV1.cs
index 2f1bd20a..196a199e 100644
--- a/SabreTools.FileTypes/CHD/CHDFileV1.cs
+++ b/SabreTools.FileTypes/CHD/CHDFileV1.cs
@@ -1,7 +1,7 @@
-using System;
-using System.IO;
+using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
+using SabreTools.Models.CHD;
namespace SabreTools.FileTypes.CHD
{
@@ -10,78 +10,39 @@ namespace SabreTools.FileTypes.CHD
///
public class CHDFileV1 : CHDFile
{
- ///
- /// CHD flags
- ///
- [Flags]
- public enum Flags : uint
- {
- DriveHasParent = 0x00000001,
- DriveAllowsWrites = 0x00000002,
- }
-
- ///
- /// Compression being used in CHD
- ///
- public enum Compression : uint
- {
- CHDCOMPRESSION_NONE = 0,
- CHDCOMPRESSION_ZLIB = 1,
- }
-
- ///
- /// Map format
- ///
- public class Map
- {
- public ulong offset; // 44; starting offset within the file
- public ulong length; // 20; length of data; if == hunksize, data is uncompressed
- }
-
public const int HeaderSize = 76;
- public const uint Version = 1;
-
- // V1-specific header values
- public Flags flags; // flags (see above)
- public Compression compression; // compression type
- public uint hunksize; // 512-byte sectors per hunk
- public uint totalhunks; // total # of hunks represented
- public uint cylinders; // number of cylinders on hard disk
- public uint heads; // number of heads on hard disk
- public uint sectors; // number of sectors on hard disk
- public byte[] md5 = new byte[16]; // MD5 checksum of raw data
- public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
///
/// Parse and validate the header as if it's V1
///
- public static CHDFileV1 Deserialize(Stream stream)
+ public static CHDFileV1? Deserialize(Stream stream)
{
- CHDFileV1 chd = new();
+ var header = new HeaderV1();
-#if NET20 || NET35 || NET40
- using (BinaryReader br = new(stream, Encoding.Default))
-#else
- using (BinaryReader br = new(stream, Encoding.Default, true))
-#endif
- {
- chd.tag = br.ReadChars(8);
- chd.length = br.ReadUInt32BigEndian();
- chd.version = br.ReadUInt32BigEndian();
- chd.flags = (Flags)br.ReadUInt32BigEndian();
- chd.compression = (Compression)br.ReadUInt32BigEndian();
- chd.hunksize = br.ReadUInt32BigEndian();
- chd.totalhunks = br.ReadUInt32BigEndian();
- chd.cylinders = br.ReadUInt32BigEndian();
- chd.heads = br.ReadUInt32BigEndian();
- chd.sectors = br.ReadUInt32BigEndian();
- chd.md5 = br.ReadBytes(16);
- chd.parentmd5 = br.ReadBytes(16);
+ byte[] tagBytes = stream.ReadBytes(8);
+ header.Tag = Encoding.ASCII.GetString(tagBytes);
+ if (header.Tag != Signature)
+ return null;
- chd.MD5 = chd.md5;
- }
+ header.Length = stream.ReadUInt32BigEndian();
+ if (header.Length != HeaderSize)
+ return null;
- return chd;
+ header.Version = stream.ReadUInt32BigEndian();
+ header.Flags = (Flags)stream.ReadUInt32BigEndian();
+ header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
+ if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
+ return null;
+
+ header.HunkSize = stream.ReadUInt32BigEndian();
+ header.TotalHunks = stream.ReadUInt32BigEndian();
+ header.Cylinders = stream.ReadUInt32BigEndian();
+ header.Heads = stream.ReadUInt32BigEndian();
+ header.Sectors = stream.ReadUInt32BigEndian();
+ header.MD5 = stream.ReadBytes(16);
+ header.ParentMD5 = stream.ReadBytes(16);
+
+ return new CHDFileV1 { _header = header, MD5 = header.MD5 };
}
///
@@ -89,7 +50,7 @@ namespace SabreTools.FileTypes.CHD
///
public override byte[] GetHash()
{
- return md5;
+ return (_header as HeaderV1)?.MD5 ?? [];
}
}
}
diff --git a/SabreTools.FileTypes/CHD/CHDFileV2.cs b/SabreTools.FileTypes/CHD/CHDFileV2.cs
index b1dea7ca..e8ab88f3 100644
--- a/SabreTools.FileTypes/CHD/CHDFileV2.cs
+++ b/SabreTools.FileTypes/CHD/CHDFileV2.cs
@@ -1,7 +1,7 @@
-using System;
-using System.IO;
+using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
+using SabreTools.Models.CHD;
namespace SabreTools.FileTypes.CHD
{
@@ -10,80 +10,40 @@ namespace SabreTools.FileTypes.CHD
///
public class CHDFileV2 : CHDFile
{
- ///
- /// CHD flags
- ///
- [Flags]
- public enum Flags : uint
- {
- DriveHasParent = 0x00000001,
- DriveAllowsWrites = 0x00000002,
- }
-
- ///
- /// Compression being used in CHD
- ///
- public enum Compression : uint
- {
- CHDCOMPRESSION_NONE = 0,
- CHDCOMPRESSION_ZLIB = 1,
- }
-
- ///
- /// Map format
- ///
- public class Map
- {
- public ulong offset; // 44; starting offset within the file
- public ulong length; // 20; length of data; if == hunksize, data is uncompressed
- }
-
public const int HeaderSize = 80;
- public const uint Version = 2;
-
- // V2-specific header values
- public Flags flags; // flags (see above)
- public Compression compression; // compression type
- public uint hunksize; // 512-byte sectors per hunk
- public uint totalhunks; // total # of hunks represented
- public uint cylinders; // number of cylinders on hard disk
- public uint heads; // number of heads on hard disk
- public uint sectors; // number of sectors on hard disk
- public byte[] md5 = new byte[16]; // MD5 checksum of raw data
- public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
- public uint seclen; // number of bytes per sector
///
/// Parse and validate the header as if it's V2
///
- public static CHDFileV2 Deserialize(Stream stream)
+ public static CHDFileV2? Deserialize(Stream stream)
{
- CHDFileV2 chd = new();
+ var header = new HeaderV2();
-#if NET20 || NET35 || NET40
- using (var br = new BinaryReader(stream, Encoding.Default))
-#else
- using (var br = new BinaryReader(stream, Encoding.Default, true))
-#endif
- {
- chd.tag = br.ReadChars(8);
- chd.length = br.ReadUInt32BigEndian();
- chd.version = br.ReadUInt32BigEndian();
- chd.flags = (Flags)br.ReadUInt32BigEndian();
- chd.compression = (Compression)br.ReadUInt32BigEndian();
- chd.hunksize = br.ReadUInt32BigEndian();
- chd.totalhunks = br.ReadUInt32BigEndian();
- chd.cylinders = br.ReadUInt32BigEndian();
- chd.heads = br.ReadUInt32BigEndian();
- chd.sectors = br.ReadUInt32BigEndian();
- chd.md5 = br.ReadBytes(16);
- chd.parentmd5 = br.ReadBytes(16);
- chd.seclen = br.ReadUInt32BigEndian();
+ byte[] tagBytes = stream.ReadBytes(8);
+ header.Tag = Encoding.ASCII.GetString(tagBytes);
+ if (header.Tag != Signature)
+ return null;
- chd.MD5 = chd.md5;
- }
+ header.Length = stream.ReadUInt32BigEndian();
+ if (header.Length != HeaderSize)
+ return null;
- return chd;
+ header.Version = stream.ReadUInt32BigEndian();
+ header.Flags = (Flags)stream.ReadUInt32BigEndian();
+ header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
+ if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
+ return null;
+
+ header.HunkSize = stream.ReadUInt32BigEndian();
+ header.TotalHunks = stream.ReadUInt32BigEndian();
+ header.Cylinders = stream.ReadUInt32BigEndian();
+ header.Heads = stream.ReadUInt32BigEndian();
+ header.Sectors = stream.ReadUInt32BigEndian();
+ header.MD5 = stream.ReadBytes(16);
+ header.ParentMD5 = stream.ReadBytes(16);
+ header.BytesPerSector = stream.ReadUInt32BigEndian();
+
+ return new CHDFileV2 { _header = header, MD5 = header.MD5 };
}
///
@@ -91,7 +51,7 @@ namespace SabreTools.FileTypes.CHD
///
public override byte[] GetHash()
{
- return md5;
+ return (_header as HeaderV2)?.MD5 ?? [];
}
}
}
diff --git a/SabreTools.FileTypes/CHD/CHDFileV3.cs b/SabreTools.FileTypes/CHD/CHDFileV3.cs
index 499d505d..3448095e 100644
--- a/SabreTools.FileTypes/CHD/CHDFileV3.cs
+++ b/SabreTools.FileTypes/CHD/CHDFileV3.cs
@@ -1,7 +1,7 @@
-using System;
-using System.IO;
+using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
+using SabreTools.Models.CHD;
namespace SabreTools.FileTypes.CHD
{
@@ -10,85 +10,40 @@ namespace SabreTools.FileTypes.CHD
///
public class CHDFileV3 : CHDFile
{
- ///
- /// CHD flags
- ///
- [Flags]
- public enum Flags : uint
- {
- DriveHasParent = 0x00000001,
- DriveAllowsWrites = 0x00000002,
- }
-
- ///
- /// Compression being used in CHD
- ///
- public enum Compression : uint
- {
- CHDCOMPRESSION_NONE = 0,
- CHDCOMPRESSION_ZLIB = 1,
- CHDCOMPRESSION_ZLIB_PLUS = 2,
- }
-
- ///
- /// Map format
- ///
- public class Map
- {
- public ulong offset; // starting offset within the file
- public uint crc32; // 32-bit CRC of the uncompressed data
- public ushort length_lo; // lower 16 bits of length
- public byte length_hi; // upper 8 bits of length
- public byte flags; // flags, indicating compression info
- }
-
public const int HeaderSize = 120;
- public const uint Version = 3;
-
- // V3-specific header values
- public Flags flags; // flags (see above)
- public Compression compression; // compression type
- public uint totalhunks; // total # of hunks represented
- public ulong logicalbytes; // logical size of the data (in bytes)
- public ulong metaoffset; // offset to the first blob of metadata
- public byte[] md5 = new byte[16]; // MD5 checksum of raw data
- public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
- public uint hunkbytes; // number of bytes per hunk
- public byte[] sha1 = new byte[20]; // SHA1 checksum of raw data
- public byte[] parentsha1 = new byte[20]; // SHA1 checksum of parent file
///
/// Parse and validate the header as if it's V3
///
- public static CHDFileV3 Deserialize(Stream stream)
+ public static CHDFileV3? Deserialize(Stream stream)
{
- CHDFileV3 chd = new();
+ var header = new HeaderV3();
-#if NET20 || NET35 || NET40
- using (var br = new BinaryReader(stream, Encoding.Default))
-#else
- using (var br = new BinaryReader(stream, Encoding.Default, true))
-#endif
- {
- chd.tag = br.ReadChars(8);
- chd.length = br.ReadUInt32BigEndian();
- chd.version = br.ReadUInt32BigEndian();
- chd.flags = (Flags)br.ReadUInt32BigEndian();
- chd.compression = (Compression)br.ReadUInt32BigEndian();
- chd.totalhunks = br.ReadUInt32BigEndian();
- chd.logicalbytes = br.ReadUInt64BigEndian();
- chd.metaoffset = br.ReadUInt64BigEndian();
- chd.md5 = br.ReadBytes(16);
- chd.parentmd5 = br.ReadBytes(16);
- chd.hunkbytes = br.ReadUInt32BigEndian();
- chd.sha1 = br.ReadBytes(20);
- chd.parentsha1 = br.ReadBytes(20);
+ byte[] tagBytes = stream.ReadBytes(8);
+ header.Tag = Encoding.ASCII.GetString(tagBytes);
+ if (header.Tag != Signature)
+ return null;
- chd.MD5 = chd.md5;
- chd.SHA1 = chd.sha1;
- }
+ header.Length = stream.ReadUInt32BigEndian();
+ if (header.Length != HeaderSize)
+ return null;
- return chd;
+ header.Version = stream.ReadUInt32BigEndian();
+ header.Flags = (Flags)stream.ReadUInt32BigEndian();
+ header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
+ if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB_PLUS)
+ return null;
+
+ header.TotalHunks = stream.ReadUInt32BigEndian();
+ header.LogicalBytes = stream.ReadUInt64BigEndian();
+ header.MetaOffset = stream.ReadUInt64BigEndian();
+ header.MD5 = stream.ReadBytes(16);
+ header.ParentMD5 = stream.ReadBytes(16);
+ header.HunkBytes = stream.ReadUInt32BigEndian();
+ header.SHA1 = stream.ReadBytes(20);
+ header.ParentSHA1 = stream.ReadBytes(20);
+
+ return new CHDFileV3 { _header = header, MD5 = header.MD5, SHA1 = header.SHA1 };
}
///
@@ -96,7 +51,7 @@ namespace SabreTools.FileTypes.CHD
///
public override byte[] GetHash()
{
- return sha1;
+ return (_header as HeaderV3)?.SHA1 ?? [];
}
}
}
diff --git a/SabreTools.FileTypes/CHD/CHDFileV4.cs b/SabreTools.FileTypes/CHD/CHDFileV4.cs
index 3caf50ee..a3324d35 100644
--- a/SabreTools.FileTypes/CHD/CHDFileV4.cs
+++ b/SabreTools.FileTypes/CHD/CHDFileV4.cs
@@ -1,7 +1,7 @@
-using System;
-using System.IO;
+using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
+using SabreTools.Models.CHD;
namespace SabreTools.FileTypes.CHD
{
@@ -10,83 +10,39 @@ namespace SabreTools.FileTypes.CHD
///
public class CHDFileV4 : CHDFile
{
- ///
- /// CHD flags
- ///
- [Flags]
- public enum Flags : uint
- {
- DriveHasParent = 0x00000001,
- DriveAllowsWrites = 0x00000002,
- }
-
- ///
- /// Compression being used in CHD
- ///
- public enum Compression : uint
- {
- CHDCOMPRESSION_NONE = 0,
- CHDCOMPRESSION_ZLIB = 1,
- CHDCOMPRESSION_ZLIB_PLUS = 2,
- CHDCOMPRESSION_AV = 3,
- }
-
- ///
- /// Map format
- ///
- public class Map
- {
- public ulong offset; // starting offset within the file
- public uint crc32; // 32-bit CRC of the uncompressed data
- public ushort length_lo; // lower 16 bits of length
- public byte length_hi; // upper 8 bits of length
- public byte flags; // flags, indicating compression info
- }
-
public const int HeaderSize = 108;
- public const uint Version = 4;
-
- // V4-specific header values
- public Flags flags; // flags (see above)
- public Compression compression; // compression type
- public uint totalhunks; // total # of hunks represented
- public ulong logicalbytes; // logical size of the data (in bytes)
- public ulong metaoffset; // offset to the first blob of metadata
- public uint hunkbytes; // number of bytes per hunk
- public byte[] sha1 = new byte[20]; // combined raw+meta SHA1
- public byte[] parentsha1 = new byte[20]; // combined raw+meta SHA1 of parent
- public byte[] rawsha1 = new byte[20]; // raw data SHA1
///
/// Parse and validate the header as if it's V4
///
- public static CHDFileV4 Deserialize(Stream stream)
+ public static CHDFileV4? Deserialize(Stream stream)
{
- CHDFileV4 chd = new();
+ var header = new HeaderV4();
-#if NET20 || NET35 || NET40
- using (BinaryReader br = new(stream, Encoding.Default))
-#else
- using (BinaryReader br = new(stream, Encoding.Default, true))
-#endif
- {
- chd.tag = br.ReadChars(8);
- chd.length = br.ReadUInt32BigEndian();
- chd.version = br.ReadUInt32BigEndian();
- chd.flags = (Flags)br.ReadUInt32BigEndian();
- chd.compression = (Compression)br.ReadUInt32BigEndian();
- chd.totalhunks = br.ReadUInt32BigEndian();
- chd.logicalbytes = br.ReadUInt64BigEndian();
- chd.metaoffset = br.ReadUInt64BigEndian();
- chd.hunkbytes = br.ReadUInt32BigEndian();
- chd.sha1 = br.ReadBytes(20);
- chd.parentsha1 = br.ReadBytes(20);
- chd.rawsha1 = br.ReadBytes(20);
+ byte[] tagBytes = stream.ReadBytes(8);
+ header.Tag = Encoding.ASCII.GetString(tagBytes);
+ if (header.Tag != Signature)
+ return null;
- chd.SHA1 = chd.sha1;
- }
+ header.Length = stream.ReadUInt32BigEndian();
+ if (header.Length != HeaderSize)
+ return null;
- return chd;
+ header.Version = stream.ReadUInt32BigEndian();
+ header.Flags = (Flags)stream.ReadUInt32BigEndian();
+ header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
+ if (header.Compression > CompressionType.CHDCOMPRESSION_AV)
+ return null;
+
+ header.TotalHunks = stream.ReadUInt32BigEndian();
+ header.LogicalBytes = stream.ReadUInt64BigEndian();
+ header.MetaOffset = stream.ReadUInt64BigEndian();
+ header.HunkBytes = stream.ReadUInt32BigEndian();
+ header.SHA1 = stream.ReadBytes(20);
+ header.ParentSHA1 = stream.ReadBytes(20);
+ header.RawSHA1 = stream.ReadBytes(20);
+
+ return new CHDFileV4 { _header = header, SHA1 = header.SHA1 };
}
///
@@ -94,7 +50,7 @@ namespace SabreTools.FileTypes.CHD
///
public override byte[] GetHash()
{
- return sha1;
+ return (_header as HeaderV4)?.SHA1 ?? [];
}
}
}
diff --git a/SabreTools.FileTypes/CHD/CHDFileV5.cs b/SabreTools.FileTypes/CHD/CHDFileV5.cs
index a49b3b17..43060fae 100644
--- a/SabreTools.FileTypes/CHD/CHDFileV5.cs
+++ b/SabreTools.FileTypes/CHD/CHDFileV5.cs
@@ -1,6 +1,7 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
+using SabreTools.Models.CHD;
namespace SabreTools.FileTypes.CHD
{
@@ -9,87 +10,41 @@ namespace SabreTools.FileTypes.CHD
///
public class CHDFileV5 : CHDFile
{
- ///
- /// Uncompressed map format
- ///
- private class UncompressedMap
- {
- public uint offset; // starting offset within the file
- }
-
- ///
- /// Compressed map header format
- ///
- private class CompressedMapHeader
- {
- public uint length; // length of compressed map
- public byte[] datastart = new byte[12]; // UINT48; offset of first block
- public ushort crc; // crc-16 of the map
- public byte lengthbits; // bits used to encode complength
- public byte hunkbits; // bits used to encode self-refs
- public byte parentunitbits; // bits used to encode parent unit refs
- public byte reserved; // future use
- }
-
- ///
- /// Compressed map entry format
- ///
- private class CompressedMapEntry
- {
- public byte compression; // compression type
- public byte[] complength = new byte[6]; // UINT24; compressed length
- public byte[] offset = new byte[12]; // UINT48; offset
- public ushort crc; // crc-16 of the data
- }
-
public const int HeaderSize = 124;
- public const uint Version = 5;
-
- // V5-specific header values
- public uint[] compressors = new uint[4]; // which custom compressors are used?
- public ulong logicalbytes; // logical size of the data (in bytes)
- public ulong mapoffset; // offset to the map
- public ulong metaoffset; // offset to the first blob of metadata
- public uint hunkbytes; // number of bytes per hunk
- public uint unitbytes; // number of bytes per unit within each hunk
- public byte[] rawsha1 = new byte[20]; // raw data SHA1
- public byte[] sha1 = new byte[20]; // combined raw+meta SHA1
- public byte[] parentsha1 = new byte[20]; // combined raw+meta SHA1 of parent
///
/// Parse and validate the header as if it's V5
///
- public static CHDFileV5 Deserialize(Stream stream)
+ public static CHDFileV5? Deserialize(Stream stream)
{
- CHDFileV5 chd = new CHDFileV5();
+ var header = new HeaderV5();
-#if NET20 || NET35 || NET40
- using (BinaryReader br = new BinaryReader(stream, Encoding.Default))
-#else
- using (BinaryReader br = new BinaryReader(stream, Encoding.Default, true))
-#endif
+ byte[] tagBytes = stream.ReadBytes(8);
+ header.Tag = Encoding.ASCII.GetString(tagBytes);
+ if (header.Tag != Signature)
+ return null;
+
+ header.Length = stream.ReadUInt32BigEndian();
+ if (header.Length != HeaderSize)
+ return null;
+
+ header.Version = stream.ReadUInt32BigEndian();
+ header.Compressors = new uint[4];
+ for (int i = 0; i < header.Compressors.Length; i++)
{
- chd.tag = br.ReadChars(8);
- chd.length = br.ReadUInt32BigEndian();
- chd.version = br.ReadUInt32BigEndian();
- chd.compressors = new uint[4];
- for (int i = 0; i < 4; i++)
- {
- chd.compressors[i] = br.ReadUInt32BigEndian();
- }
- chd.logicalbytes = br.ReadUInt64BigEndian();
- chd.mapoffset = br.ReadUInt64BigEndian();
- chd.metaoffset = br.ReadUInt64BigEndian();
- chd.hunkbytes = br.ReadUInt32BigEndian();
- chd.unitbytes = br.ReadUInt32BigEndian();
- chd.rawsha1 = br.ReadBytes(20);
- chd.sha1 = br.ReadBytes(20);
- chd.parentsha1 = br.ReadBytes(20);
-
- chd.SHA1 = chd.sha1;
+ header.Compressors[i] = stream.ReadUInt32BigEndian();
}
- return chd;
+ header.LogicalBytes = stream.ReadUInt64BigEndian();
+ header.MapOffset = stream.ReadUInt64BigEndian();
+ header.MetaOffset = stream.ReadUInt64BigEndian();
+ header.HunkBytes = stream.ReadUInt32BigEndian();
+ header.UnitBytes = stream.ReadUInt32BigEndian();
+ header.RawSHA1 = stream.ReadBytes(20);
+ header.SHA1 = stream.ReadBytes(20);
+ header.ParentSHA1 = stream.ReadBytes(20);
+
+ return new CHDFileV5 { _header = header, SHA1 = header.SHA1 };
}
///
@@ -97,7 +52,7 @@ namespace SabreTools.FileTypes.CHD
///
public override byte[] GetHash()
{
- return sha1;
+ return (_header as HeaderV5)?.SHA1 ?? [];
}
}
}