Use CHD models from library

This commit is contained in:
Matt Nadareski
2024-10-20 00:03:29 -04:00
parent 4d5ac92125
commit ab93ba406c
6 changed files with 158 additions and 394 deletions

View File

@@ -13,22 +13,17 @@ namespace SabreTools.FileTypes.CHD
{ {
#region Private instance variables #region Private instance variables
// Common header fields protected const string Signature = "MComprHD";
protected char[] tag = new char[8]; // 'MComprHD'
protected uint length; // length of header (including tag and length fields) /// <summary>
protected uint version; // drive format version /// Model representing the correct CHD header
/// </summary>
protected Models.CHD.Header? _header;
#endregion #endregion
#region Constructors #region Constructors
/// <summary>
/// Empty constructor
/// </summary>
public CHDFile()
{
}
/// <summary> /// <summary>
/// Create a new CHDFile from an input file /// Create a new CHDFile from an input file
/// </summary> /// </summary>
@@ -47,13 +42,9 @@ namespace SabreTools.FileTypes.CHD
{ {
try try
{ {
// Read the standard CHD headers
(char[] tag, uint length, uint version) = GetHeaderValues(chdstream);
chdstream.SeekIfPossible(); // Seek back to start
// Validate that this is actually a valid CHD // Validate that this is actually a valid CHD
uint validatedVersion = ValidateHeader(tag, length, version); uint version = ValidateHeader(chdstream);
if (validatedVersion == 0) if (version == 0)
return null; return null;
// Read and return the current CHD // Read and return the current CHD
@@ -78,40 +69,26 @@ namespace SabreTools.FileTypes.CHD
#region Header Parsing #region Header Parsing
/// <summary>
/// Get the generic header values of a CHD, if possible
/// </summary>
/// <param name="stream"></param>
/// <returns></returns>
private static (char[] tag, uint length, uint version) GetHeaderValues(Stream stream)
{
char[] parsedTag = new char[8];
uint parsedLength = 0;
uint parsedVersion = 0;
#if NET20 || NET35 || NET40
using (BinaryReader br = new(stream, Encoding.Default))
#else
using (BinaryReader br = new(stream, Encoding.Default, true))
#endif
{
parsedTag = br.ReadChars(8);
parsedLength = br.ReadUInt32BigEndian();
parsedVersion = br.ReadUInt32BigEndian();
}
return (parsedTag, parsedLength, parsedVersion);
}
/// <summary> /// <summary>
/// Validate the header values /// Validate the header values
/// </summary> /// </summary>
/// <returns>Matching version, 0 if none</returns> /// <returns>Matching version, 0 if none</returns>
private static uint ValidateHeader(char[] tag, uint length, uint version) private static uint ValidateHeader(Stream stream)
{ {
if (!string.Equals(new string(tag), "MComprHD", StringComparison.Ordinal)) // Read the header values
byte[] tagBytes = stream.ReadBytes(8);
string tag = Encoding.ASCII.GetString(tagBytes);
uint length = stream.ReadUInt32BigEndian();
uint version = stream.ReadUInt32BigEndian();
// Seek back to start
stream.SeekIfPossible();
// Check the signature
if (!string.Equals(tag, Signature, StringComparison.Ordinal))
return 0; return 0;
// Match the version to header length
return version switch return version switch
{ {
1 => length == CHDFileV1.HeaderSize ? version : 0, 1 => length == CHDFileV1.HeaderSize ? version : 0,

View File

@@ -1,7 +1,7 @@
using System; using System.IO;
using System.IO;
using System.Text; using System.Text;
using SabreTools.IO.Extensions; using SabreTools.IO.Extensions;
using SabreTools.Models.CHD;
namespace SabreTools.FileTypes.CHD namespace SabreTools.FileTypes.CHD
{ {
@@ -10,78 +10,39 @@ namespace SabreTools.FileTypes.CHD
/// </summary> /// </summary>
public class CHDFileV1 : CHDFile public class CHDFileV1 : CHDFile
{ {
/// <summary>
/// CHD flags
/// </summary>
[Flags]
public enum Flags : uint
{
DriveHasParent = 0x00000001,
DriveAllowsWrites = 0x00000002,
}
/// <summary>
/// Compression being used in CHD
/// </summary>
public enum Compression : uint
{
CHDCOMPRESSION_NONE = 0,
CHDCOMPRESSION_ZLIB = 1,
}
/// <summary>
/// Map format
/// </summary>
public class Map
{
public ulong offset; // 44; starting offset within the file
public ulong length; // 20; length of data; if == hunksize, data is uncompressed
}
public const int HeaderSize = 76; public const int HeaderSize = 76;
public const uint Version = 1;
// V1-specific header values
public Flags flags; // flags (see above)
public Compression compression; // compression type
public uint hunksize; // 512-byte sectors per hunk
public uint totalhunks; // total # of hunks represented
public uint cylinders; // number of cylinders on hard disk
public uint heads; // number of heads on hard disk
public uint sectors; // number of sectors on hard disk
public byte[] md5 = new byte[16]; // MD5 checksum of raw data
public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
/// <summary> /// <summary>
/// Parse and validate the header as if it's V1 /// Parse and validate the header as if it's V1
/// </summary> /// </summary>
public static CHDFileV1 Deserialize(Stream stream) public static CHDFileV1? Deserialize(Stream stream)
{ {
CHDFileV1 chd = new(); var header = new HeaderV1();
#if NET20 || NET35 || NET40 byte[] tagBytes = stream.ReadBytes(8);
using (BinaryReader br = new(stream, Encoding.Default)) header.Tag = Encoding.ASCII.GetString(tagBytes);
#else if (header.Tag != Signature)
using (BinaryReader br = new(stream, Encoding.Default, true)) return null;
#endif
{
chd.tag = br.ReadChars(8);
chd.length = br.ReadUInt32BigEndian();
chd.version = br.ReadUInt32BigEndian();
chd.flags = (Flags)br.ReadUInt32BigEndian();
chd.compression = (Compression)br.ReadUInt32BigEndian();
chd.hunksize = br.ReadUInt32BigEndian();
chd.totalhunks = br.ReadUInt32BigEndian();
chd.cylinders = br.ReadUInt32BigEndian();
chd.heads = br.ReadUInt32BigEndian();
chd.sectors = br.ReadUInt32BigEndian();
chd.md5 = br.ReadBytes(16);
chd.parentmd5 = br.ReadBytes(16);
chd.MD5 = chd.md5; header.Length = stream.ReadUInt32BigEndian();
} if (header.Length != HeaderSize)
return null;
return chd; header.Version = stream.ReadUInt32BigEndian();
header.Flags = (Flags)stream.ReadUInt32BigEndian();
header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
header.HunkSize = stream.ReadUInt32BigEndian();
header.TotalHunks = stream.ReadUInt32BigEndian();
header.Cylinders = stream.ReadUInt32BigEndian();
header.Heads = stream.ReadUInt32BigEndian();
header.Sectors = stream.ReadUInt32BigEndian();
header.MD5 = stream.ReadBytes(16);
header.ParentMD5 = stream.ReadBytes(16);
return new CHDFileV1 { _header = header, MD5 = header.MD5 };
} }
/// <summary> /// <summary>
@@ -89,7 +50,7 @@ namespace SabreTools.FileTypes.CHD
/// </summary> /// </summary>
public override byte[] GetHash() public override byte[] GetHash()
{ {
return md5; return (_header as HeaderV1)?.MD5 ?? [];
} }
} }
} }

View File

@@ -1,7 +1,7 @@
using System; using System.IO;
using System.IO;
using System.Text; using System.Text;
using SabreTools.IO.Extensions; using SabreTools.IO.Extensions;
using SabreTools.Models.CHD;
namespace SabreTools.FileTypes.CHD namespace SabreTools.FileTypes.CHD
{ {
@@ -10,80 +10,40 @@ namespace SabreTools.FileTypes.CHD
/// </summary> /// </summary>
public class CHDFileV2 : CHDFile public class CHDFileV2 : CHDFile
{ {
/// <summary>
/// CHD flags
/// </summary>
[Flags]
public enum Flags : uint
{
DriveHasParent = 0x00000001,
DriveAllowsWrites = 0x00000002,
}
/// <summary>
/// Compression being used in CHD
/// </summary>
public enum Compression : uint
{
CHDCOMPRESSION_NONE = 0,
CHDCOMPRESSION_ZLIB = 1,
}
/// <summary>
/// Map format
/// </summary>
public class Map
{
public ulong offset; // 44; starting offset within the file
public ulong length; // 20; length of data; if == hunksize, data is uncompressed
}
public const int HeaderSize = 80; public const int HeaderSize = 80;
public const uint Version = 2;
// V2-specific header values
public Flags flags; // flags (see above)
public Compression compression; // compression type
public uint hunksize; // 512-byte sectors per hunk
public uint totalhunks; // total # of hunks represented
public uint cylinders; // number of cylinders on hard disk
public uint heads; // number of heads on hard disk
public uint sectors; // number of sectors on hard disk
public byte[] md5 = new byte[16]; // MD5 checksum of raw data
public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
public uint seclen; // number of bytes per sector
/// <summary> /// <summary>
/// Parse and validate the header as if it's V2 /// Parse and validate the header as if it's V2
/// </summary> /// </summary>
public static CHDFileV2 Deserialize(Stream stream) public static CHDFileV2? Deserialize(Stream stream)
{ {
CHDFileV2 chd = new(); var header = new HeaderV2();
#if NET20 || NET35 || NET40 byte[] tagBytes = stream.ReadBytes(8);
using (var br = new BinaryReader(stream, Encoding.Default)) header.Tag = Encoding.ASCII.GetString(tagBytes);
#else if (header.Tag != Signature)
using (var br = new BinaryReader(stream, Encoding.Default, true)) return null;
#endif
{
chd.tag = br.ReadChars(8);
chd.length = br.ReadUInt32BigEndian();
chd.version = br.ReadUInt32BigEndian();
chd.flags = (Flags)br.ReadUInt32BigEndian();
chd.compression = (Compression)br.ReadUInt32BigEndian();
chd.hunksize = br.ReadUInt32BigEndian();
chd.totalhunks = br.ReadUInt32BigEndian();
chd.cylinders = br.ReadUInt32BigEndian();
chd.heads = br.ReadUInt32BigEndian();
chd.sectors = br.ReadUInt32BigEndian();
chd.md5 = br.ReadBytes(16);
chd.parentmd5 = br.ReadBytes(16);
chd.seclen = br.ReadUInt32BigEndian();
chd.MD5 = chd.md5; header.Length = stream.ReadUInt32BigEndian();
} if (header.Length != HeaderSize)
return null;
return chd; header.Version = stream.ReadUInt32BigEndian();
header.Flags = (Flags)stream.ReadUInt32BigEndian();
header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
header.HunkSize = stream.ReadUInt32BigEndian();
header.TotalHunks = stream.ReadUInt32BigEndian();
header.Cylinders = stream.ReadUInt32BigEndian();
header.Heads = stream.ReadUInt32BigEndian();
header.Sectors = stream.ReadUInt32BigEndian();
header.MD5 = stream.ReadBytes(16);
header.ParentMD5 = stream.ReadBytes(16);
header.BytesPerSector = stream.ReadUInt32BigEndian();
return new CHDFileV2 { _header = header, MD5 = header.MD5 };
} }
/// <summary> /// <summary>
@@ -91,7 +51,7 @@ namespace SabreTools.FileTypes.CHD
/// </summary> /// </summary>
public override byte[] GetHash() public override byte[] GetHash()
{ {
return md5; return (_header as HeaderV2)?.MD5 ?? [];
} }
} }
} }

View File

@@ -1,7 +1,7 @@
using System; using System.IO;
using System.IO;
using System.Text; using System.Text;
using SabreTools.IO.Extensions; using SabreTools.IO.Extensions;
using SabreTools.Models.CHD;
namespace SabreTools.FileTypes.CHD namespace SabreTools.FileTypes.CHD
{ {
@@ -10,85 +10,40 @@ namespace SabreTools.FileTypes.CHD
/// </summary> /// </summary>
public class CHDFileV3 : CHDFile public class CHDFileV3 : CHDFile
{ {
/// <summary>
/// CHD flags
/// </summary>
[Flags]
public enum Flags : uint
{
DriveHasParent = 0x00000001,
DriveAllowsWrites = 0x00000002,
}
/// <summary>
/// Compression being used in CHD
/// </summary>
public enum Compression : uint
{
CHDCOMPRESSION_NONE = 0,
CHDCOMPRESSION_ZLIB = 1,
CHDCOMPRESSION_ZLIB_PLUS = 2,
}
/// <summary>
/// Map format
/// </summary>
public class Map
{
public ulong offset; // starting offset within the file
public uint crc32; // 32-bit CRC of the uncompressed data
public ushort length_lo; // lower 16 bits of length
public byte length_hi; // upper 8 bits of length
public byte flags; // flags, indicating compression info
}
public const int HeaderSize = 120; public const int HeaderSize = 120;
public const uint Version = 3;
// V3-specific header values
public Flags flags; // flags (see above)
public Compression compression; // compression type
public uint totalhunks; // total # of hunks represented
public ulong logicalbytes; // logical size of the data (in bytes)
public ulong metaoffset; // offset to the first blob of metadata
public byte[] md5 = new byte[16]; // MD5 checksum of raw data
public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
public uint hunkbytes; // number of bytes per hunk
public byte[] sha1 = new byte[20]; // SHA1 checksum of raw data
public byte[] parentsha1 = new byte[20]; // SHA1 checksum of parent file
/// <summary> /// <summary>
/// Parse and validate the header as if it's V3 /// Parse and validate the header as if it's V3
/// </summary> /// </summary>
public static CHDFileV3 Deserialize(Stream stream) public static CHDFileV3? Deserialize(Stream stream)
{ {
CHDFileV3 chd = new(); var header = new HeaderV3();
#if NET20 || NET35 || NET40 byte[] tagBytes = stream.ReadBytes(8);
using (var br = new BinaryReader(stream, Encoding.Default)) header.Tag = Encoding.ASCII.GetString(tagBytes);
#else if (header.Tag != Signature)
using (var br = new BinaryReader(stream, Encoding.Default, true)) return null;
#endif
{
chd.tag = br.ReadChars(8);
chd.length = br.ReadUInt32BigEndian();
chd.version = br.ReadUInt32BigEndian();
chd.flags = (Flags)br.ReadUInt32BigEndian();
chd.compression = (Compression)br.ReadUInt32BigEndian();
chd.totalhunks = br.ReadUInt32BigEndian();
chd.logicalbytes = br.ReadUInt64BigEndian();
chd.metaoffset = br.ReadUInt64BigEndian();
chd.md5 = br.ReadBytes(16);
chd.parentmd5 = br.ReadBytes(16);
chd.hunkbytes = br.ReadUInt32BigEndian();
chd.sha1 = br.ReadBytes(20);
chd.parentsha1 = br.ReadBytes(20);
chd.MD5 = chd.md5; header.Length = stream.ReadUInt32BigEndian();
chd.SHA1 = chd.sha1; if (header.Length != HeaderSize)
} return null;
return chd; header.Version = stream.ReadUInt32BigEndian();
header.Flags = (Flags)stream.ReadUInt32BigEndian();
header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB_PLUS)
return null;
header.TotalHunks = stream.ReadUInt32BigEndian();
header.LogicalBytes = stream.ReadUInt64BigEndian();
header.MetaOffset = stream.ReadUInt64BigEndian();
header.MD5 = stream.ReadBytes(16);
header.ParentMD5 = stream.ReadBytes(16);
header.HunkBytes = stream.ReadUInt32BigEndian();
header.SHA1 = stream.ReadBytes(20);
header.ParentSHA1 = stream.ReadBytes(20);
return new CHDFileV3 { _header = header, MD5 = header.MD5, SHA1 = header.SHA1 };
} }
/// <summary> /// <summary>
@@ -96,7 +51,7 @@ namespace SabreTools.FileTypes.CHD
/// </summary> /// </summary>
public override byte[] GetHash() public override byte[] GetHash()
{ {
return sha1; return (_header as HeaderV3)?.SHA1 ?? [];
} }
} }
} }

View File

@@ -1,7 +1,7 @@
using System; using System.IO;
using System.IO;
using System.Text; using System.Text;
using SabreTools.IO.Extensions; using SabreTools.IO.Extensions;
using SabreTools.Models.CHD;
namespace SabreTools.FileTypes.CHD namespace SabreTools.FileTypes.CHD
{ {
@@ -10,83 +10,39 @@ namespace SabreTools.FileTypes.CHD
/// </summary> /// </summary>
public class CHDFileV4 : CHDFile public class CHDFileV4 : CHDFile
{ {
/// <summary>
/// CHD flags
/// </summary>
[Flags]
public enum Flags : uint
{
DriveHasParent = 0x00000001,
DriveAllowsWrites = 0x00000002,
}
/// <summary>
/// Compression being used in CHD
/// </summary>
public enum Compression : uint
{
CHDCOMPRESSION_NONE = 0,
CHDCOMPRESSION_ZLIB = 1,
CHDCOMPRESSION_ZLIB_PLUS = 2,
CHDCOMPRESSION_AV = 3,
}
/// <summary>
/// Map format
/// </summary>
public class Map
{
public ulong offset; // starting offset within the file
public uint crc32; // 32-bit CRC of the uncompressed data
public ushort length_lo; // lower 16 bits of length
public byte length_hi; // upper 8 bits of length
public byte flags; // flags, indicating compression info
}
public const int HeaderSize = 108; public const int HeaderSize = 108;
public const uint Version = 4;
// V4-specific header values
public Flags flags; // flags (see above)
public Compression compression; // compression type
public uint totalhunks; // total # of hunks represented
public ulong logicalbytes; // logical size of the data (in bytes)
public ulong metaoffset; // offset to the first blob of metadata
public uint hunkbytes; // number of bytes per hunk
public byte[] sha1 = new byte[20]; // combined raw+meta SHA1
public byte[] parentsha1 = new byte[20]; // combined raw+meta SHA1 of parent
public byte[] rawsha1 = new byte[20]; // raw data SHA1
/// <summary> /// <summary>
/// Parse and validate the header as if it's V4 /// Parse and validate the header as if it's V4
/// </summary> /// </summary>
public static CHDFileV4 Deserialize(Stream stream) public static CHDFileV4? Deserialize(Stream stream)
{ {
CHDFileV4 chd = new(); var header = new HeaderV4();
#if NET20 || NET35 || NET40 byte[] tagBytes = stream.ReadBytes(8);
using (BinaryReader br = new(stream, Encoding.Default)) header.Tag = Encoding.ASCII.GetString(tagBytes);
#else if (header.Tag != Signature)
using (BinaryReader br = new(stream, Encoding.Default, true)) return null;
#endif
{
chd.tag = br.ReadChars(8);
chd.length = br.ReadUInt32BigEndian();
chd.version = br.ReadUInt32BigEndian();
chd.flags = (Flags)br.ReadUInt32BigEndian();
chd.compression = (Compression)br.ReadUInt32BigEndian();
chd.totalhunks = br.ReadUInt32BigEndian();
chd.logicalbytes = br.ReadUInt64BigEndian();
chd.metaoffset = br.ReadUInt64BigEndian();
chd.hunkbytes = br.ReadUInt32BigEndian();
chd.sha1 = br.ReadBytes(20);
chd.parentsha1 = br.ReadBytes(20);
chd.rawsha1 = br.ReadBytes(20);
chd.SHA1 = chd.sha1; header.Length = stream.ReadUInt32BigEndian();
} if (header.Length != HeaderSize)
return null;
return chd; header.Version = stream.ReadUInt32BigEndian();
header.Flags = (Flags)stream.ReadUInt32BigEndian();
header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_AV)
return null;
header.TotalHunks = stream.ReadUInt32BigEndian();
header.LogicalBytes = stream.ReadUInt64BigEndian();
header.MetaOffset = stream.ReadUInt64BigEndian();
header.HunkBytes = stream.ReadUInt32BigEndian();
header.SHA1 = stream.ReadBytes(20);
header.ParentSHA1 = stream.ReadBytes(20);
header.RawSHA1 = stream.ReadBytes(20);
return new CHDFileV4 { _header = header, SHA1 = header.SHA1 };
} }
/// <summary> /// <summary>
@@ -94,7 +50,7 @@ namespace SabreTools.FileTypes.CHD
/// </summary> /// </summary>
public override byte[] GetHash() public override byte[] GetHash()
{ {
return sha1; return (_header as HeaderV4)?.SHA1 ?? [];
} }
} }
} }

View File

@@ -1,6 +1,7 @@
using System.IO; using System.IO;
using System.Text; using System.Text;
using SabreTools.IO.Extensions; using SabreTools.IO.Extensions;
using SabreTools.Models.CHD;
namespace SabreTools.FileTypes.CHD namespace SabreTools.FileTypes.CHD
{ {
@@ -9,87 +10,41 @@ namespace SabreTools.FileTypes.CHD
/// </summary> /// </summary>
public class CHDFileV5 : CHDFile public class CHDFileV5 : CHDFile
{ {
/// <summary>
/// Uncompressed map format
/// </summary>
private class UncompressedMap
{
public uint offset; // starting offset within the file
}
/// <summary>
/// Compressed map header format
/// </summary>
private class CompressedMapHeader
{
public uint length; // length of compressed map
public byte[] datastart = new byte[12]; // UINT48; offset of first block
public ushort crc; // crc-16 of the map
public byte lengthbits; // bits used to encode complength
public byte hunkbits; // bits used to encode self-refs
public byte parentunitbits; // bits used to encode parent unit refs
public byte reserved; // future use
}
/// <summary>
/// Compressed map entry format
/// </summary>
private class CompressedMapEntry
{
public byte compression; // compression type
public byte[] complength = new byte[6]; // UINT24; compressed length
public byte[] offset = new byte[12]; // UINT48; offset
public ushort crc; // crc-16 of the data
}
public const int HeaderSize = 124; public const int HeaderSize = 124;
public const uint Version = 5;
// V5-specific header values
public uint[] compressors = new uint[4]; // which custom compressors are used?
public ulong logicalbytes; // logical size of the data (in bytes)
public ulong mapoffset; // offset to the map
public ulong metaoffset; // offset to the first blob of metadata
public uint hunkbytes; // number of bytes per hunk
public uint unitbytes; // number of bytes per unit within each hunk
public byte[] rawsha1 = new byte[20]; // raw data SHA1
public byte[] sha1 = new byte[20]; // combined raw+meta SHA1
public byte[] parentsha1 = new byte[20]; // combined raw+meta SHA1 of parent
/// <summary> /// <summary>
/// Parse and validate the header as if it's V5 /// Parse and validate the header as if it's V5
/// </summary> /// </summary>
public static CHDFileV5 Deserialize(Stream stream) public static CHDFileV5? Deserialize(Stream stream)
{ {
CHDFileV5 chd = new CHDFileV5(); var header = new HeaderV5();
#if NET20 || NET35 || NET40 byte[] tagBytes = stream.ReadBytes(8);
using (BinaryReader br = new BinaryReader(stream, Encoding.Default)) header.Tag = Encoding.ASCII.GetString(tagBytes);
#else if (header.Tag != Signature)
using (BinaryReader br = new BinaryReader(stream, Encoding.Default, true)) return null;
#endif
header.Length = stream.ReadUInt32BigEndian();
if (header.Length != HeaderSize)
return null;
header.Version = stream.ReadUInt32BigEndian();
header.Compressors = new uint[4];
for (int i = 0; i < header.Compressors.Length; i++)
{ {
chd.tag = br.ReadChars(8); header.Compressors[i] = stream.ReadUInt32BigEndian();
chd.length = br.ReadUInt32BigEndian();
chd.version = br.ReadUInt32BigEndian();
chd.compressors = new uint[4];
for (int i = 0; i < 4; i++)
{
chd.compressors[i] = br.ReadUInt32BigEndian();
}
chd.logicalbytes = br.ReadUInt64BigEndian();
chd.mapoffset = br.ReadUInt64BigEndian();
chd.metaoffset = br.ReadUInt64BigEndian();
chd.hunkbytes = br.ReadUInt32BigEndian();
chd.unitbytes = br.ReadUInt32BigEndian();
chd.rawsha1 = br.ReadBytes(20);
chd.sha1 = br.ReadBytes(20);
chd.parentsha1 = br.ReadBytes(20);
chd.SHA1 = chd.sha1;
} }
return chd; header.LogicalBytes = stream.ReadUInt64BigEndian();
header.MapOffset = stream.ReadUInt64BigEndian();
header.MetaOffset = stream.ReadUInt64BigEndian();
header.HunkBytes = stream.ReadUInt32BigEndian();
header.UnitBytes = stream.ReadUInt32BigEndian();
header.RawSHA1 = stream.ReadBytes(20);
header.SHA1 = stream.ReadBytes(20);
header.ParentSHA1 = stream.ReadBytes(20);
return new CHDFileV5 { _header = header, SHA1 = header.SHA1 };
} }
/// <summary> /// <summary>
@@ -97,7 +52,7 @@ namespace SabreTools.FileTypes.CHD
/// </summary> /// </summary>
public override byte[] GetHash() public override byte[] GetHash()
{ {
return sha1; return (_header as HeaderV5)?.SHA1 ?? [];
} }
} }
} }