mirror of
https://github.com/claunia/SabreTools.git
synced 2025-12-16 19:14:27 +00:00
Use CHD models from library
This commit is contained in:
@@ -13,22 +13,17 @@ namespace SabreTools.FileTypes.CHD
|
||||
{
|
||||
#region Private instance variables
|
||||
|
||||
// Common header fields
|
||||
protected char[] tag = new char[8]; // 'MComprHD'
|
||||
protected uint length; // length of header (including tag and length fields)
|
||||
protected uint version; // drive format version
|
||||
protected const string Signature = "MComprHD";
|
||||
|
||||
/// <summary>
|
||||
/// Model representing the correct CHD header
|
||||
/// </summary>
|
||||
protected Models.CHD.Header? _header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Empty constructor
|
||||
/// </summary>
|
||||
public CHDFile()
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a new CHDFile from an input file
|
||||
/// </summary>
|
||||
@@ -47,13 +42,9 @@ namespace SabreTools.FileTypes.CHD
|
||||
{
|
||||
try
|
||||
{
|
||||
// Read the standard CHD headers
|
||||
(char[] tag, uint length, uint version) = GetHeaderValues(chdstream);
|
||||
chdstream.SeekIfPossible(); // Seek back to start
|
||||
|
||||
// Validate that this is actually a valid CHD
|
||||
uint validatedVersion = ValidateHeader(tag, length, version);
|
||||
if (validatedVersion == 0)
|
||||
uint version = ValidateHeader(chdstream);
|
||||
if (version == 0)
|
||||
return null;
|
||||
|
||||
// Read and return the current CHD
|
||||
@@ -78,40 +69,26 @@ namespace SabreTools.FileTypes.CHD
|
||||
|
||||
#region Header Parsing
|
||||
|
||||
/// <summary>
|
||||
/// Get the generic header values of a CHD, if possible
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <returns></returns>
|
||||
private static (char[] tag, uint length, uint version) GetHeaderValues(Stream stream)
|
||||
{
|
||||
char[] parsedTag = new char[8];
|
||||
uint parsedLength = 0;
|
||||
uint parsedVersion = 0;
|
||||
|
||||
#if NET20 || NET35 || NET40
|
||||
using (BinaryReader br = new(stream, Encoding.Default))
|
||||
#else
|
||||
using (BinaryReader br = new(stream, Encoding.Default, true))
|
||||
#endif
|
||||
{
|
||||
parsedTag = br.ReadChars(8);
|
||||
parsedLength = br.ReadUInt32BigEndian();
|
||||
parsedVersion = br.ReadUInt32BigEndian();
|
||||
}
|
||||
|
||||
return (parsedTag, parsedLength, parsedVersion);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validate the header values
|
||||
/// </summary>
|
||||
/// <returns>Matching version, 0 if none</returns>
|
||||
private static uint ValidateHeader(char[] tag, uint length, uint version)
|
||||
private static uint ValidateHeader(Stream stream)
|
||||
{
|
||||
if (!string.Equals(new string(tag), "MComprHD", StringComparison.Ordinal))
|
||||
// Read the header values
|
||||
byte[] tagBytes = stream.ReadBytes(8);
|
||||
string tag = Encoding.ASCII.GetString(tagBytes);
|
||||
uint length = stream.ReadUInt32BigEndian();
|
||||
uint version = stream.ReadUInt32BigEndian();
|
||||
|
||||
// Seek back to start
|
||||
stream.SeekIfPossible();
|
||||
|
||||
// Check the signature
|
||||
if (!string.Equals(tag, Signature, StringComparison.Ordinal))
|
||||
return 0;
|
||||
|
||||
// Match the version to header length
|
||||
return version switch
|
||||
{
|
||||
1 => length == CHDFileV1.HeaderSize ? version : 0,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.CHD;
|
||||
|
||||
namespace SabreTools.FileTypes.CHD
|
||||
{
|
||||
@@ -10,78 +10,39 @@ namespace SabreTools.FileTypes.CHD
|
||||
/// </summary>
|
||||
public class CHDFileV1 : CHDFile
|
||||
{
|
||||
/// <summary>
|
||||
/// CHD flags
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum Flags : uint
|
||||
{
|
||||
DriveHasParent = 0x00000001,
|
||||
DriveAllowsWrites = 0x00000002,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compression being used in CHD
|
||||
/// </summary>
|
||||
public enum Compression : uint
|
||||
{
|
||||
CHDCOMPRESSION_NONE = 0,
|
||||
CHDCOMPRESSION_ZLIB = 1,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Map format
|
||||
/// </summary>
|
||||
public class Map
|
||||
{
|
||||
public ulong offset; // 44; starting offset within the file
|
||||
public ulong length; // 20; length of data; if == hunksize, data is uncompressed
|
||||
}
|
||||
|
||||
public const int HeaderSize = 76;
|
||||
public const uint Version = 1;
|
||||
|
||||
// V1-specific header values
|
||||
public Flags flags; // flags (see above)
|
||||
public Compression compression; // compression type
|
||||
public uint hunksize; // 512-byte sectors per hunk
|
||||
public uint totalhunks; // total # of hunks represented
|
||||
public uint cylinders; // number of cylinders on hard disk
|
||||
public uint heads; // number of heads on hard disk
|
||||
public uint sectors; // number of sectors on hard disk
|
||||
public byte[] md5 = new byte[16]; // MD5 checksum of raw data
|
||||
public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
|
||||
|
||||
/// <summary>
|
||||
/// Parse and validate the header as if it's V1
|
||||
/// </summary>
|
||||
public static CHDFileV1 Deserialize(Stream stream)
|
||||
public static CHDFileV1? Deserialize(Stream stream)
|
||||
{
|
||||
CHDFileV1 chd = new();
|
||||
var header = new HeaderV1();
|
||||
|
||||
#if NET20 || NET35 || NET40
|
||||
using (BinaryReader br = new(stream, Encoding.Default))
|
||||
#else
|
||||
using (BinaryReader br = new(stream, Encoding.Default, true))
|
||||
#endif
|
||||
{
|
||||
chd.tag = br.ReadChars(8);
|
||||
chd.length = br.ReadUInt32BigEndian();
|
||||
chd.version = br.ReadUInt32BigEndian();
|
||||
chd.flags = (Flags)br.ReadUInt32BigEndian();
|
||||
chd.compression = (Compression)br.ReadUInt32BigEndian();
|
||||
chd.hunksize = br.ReadUInt32BigEndian();
|
||||
chd.totalhunks = br.ReadUInt32BigEndian();
|
||||
chd.cylinders = br.ReadUInt32BigEndian();
|
||||
chd.heads = br.ReadUInt32BigEndian();
|
||||
chd.sectors = br.ReadUInt32BigEndian();
|
||||
chd.md5 = br.ReadBytes(16);
|
||||
chd.parentmd5 = br.ReadBytes(16);
|
||||
byte[] tagBytes = stream.ReadBytes(8);
|
||||
header.Tag = Encoding.ASCII.GetString(tagBytes);
|
||||
if (header.Tag != Signature)
|
||||
return null;
|
||||
|
||||
chd.MD5 = chd.md5;
|
||||
}
|
||||
header.Length = stream.ReadUInt32BigEndian();
|
||||
if (header.Length != HeaderSize)
|
||||
return null;
|
||||
|
||||
return chd;
|
||||
header.Version = stream.ReadUInt32BigEndian();
|
||||
header.Flags = (Flags)stream.ReadUInt32BigEndian();
|
||||
header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
|
||||
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
|
||||
return null;
|
||||
|
||||
header.HunkSize = stream.ReadUInt32BigEndian();
|
||||
header.TotalHunks = stream.ReadUInt32BigEndian();
|
||||
header.Cylinders = stream.ReadUInt32BigEndian();
|
||||
header.Heads = stream.ReadUInt32BigEndian();
|
||||
header.Sectors = stream.ReadUInt32BigEndian();
|
||||
header.MD5 = stream.ReadBytes(16);
|
||||
header.ParentMD5 = stream.ReadBytes(16);
|
||||
|
||||
return new CHDFileV1 { _header = header, MD5 = header.MD5 };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -89,7 +50,7 @@ namespace SabreTools.FileTypes.CHD
|
||||
/// </summary>
|
||||
public override byte[] GetHash()
|
||||
{
|
||||
return md5;
|
||||
return (_header as HeaderV1)?.MD5 ?? [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.CHD;
|
||||
|
||||
namespace SabreTools.FileTypes.CHD
|
||||
{
|
||||
@@ -10,80 +10,40 @@ namespace SabreTools.FileTypes.CHD
|
||||
/// </summary>
|
||||
public class CHDFileV2 : CHDFile
|
||||
{
|
||||
/// <summary>
|
||||
/// CHD flags
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum Flags : uint
|
||||
{
|
||||
DriveHasParent = 0x00000001,
|
||||
DriveAllowsWrites = 0x00000002,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compression being used in CHD
|
||||
/// </summary>
|
||||
public enum Compression : uint
|
||||
{
|
||||
CHDCOMPRESSION_NONE = 0,
|
||||
CHDCOMPRESSION_ZLIB = 1,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Map format
|
||||
/// </summary>
|
||||
public class Map
|
||||
{
|
||||
public ulong offset; // 44; starting offset within the file
|
||||
public ulong length; // 20; length of data; if == hunksize, data is uncompressed
|
||||
}
|
||||
|
||||
public const int HeaderSize = 80;
|
||||
public const uint Version = 2;
|
||||
|
||||
// V2-specific header values
|
||||
public Flags flags; // flags (see above)
|
||||
public Compression compression; // compression type
|
||||
public uint hunksize; // 512-byte sectors per hunk
|
||||
public uint totalhunks; // total # of hunks represented
|
||||
public uint cylinders; // number of cylinders on hard disk
|
||||
public uint heads; // number of heads on hard disk
|
||||
public uint sectors; // number of sectors on hard disk
|
||||
public byte[] md5 = new byte[16]; // MD5 checksum of raw data
|
||||
public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
|
||||
public uint seclen; // number of bytes per sector
|
||||
|
||||
/// <summary>
|
||||
/// Parse and validate the header as if it's V2
|
||||
/// </summary>
|
||||
public static CHDFileV2 Deserialize(Stream stream)
|
||||
public static CHDFileV2? Deserialize(Stream stream)
|
||||
{
|
||||
CHDFileV2 chd = new();
|
||||
var header = new HeaderV2();
|
||||
|
||||
#if NET20 || NET35 || NET40
|
||||
using (var br = new BinaryReader(stream, Encoding.Default))
|
||||
#else
|
||||
using (var br = new BinaryReader(stream, Encoding.Default, true))
|
||||
#endif
|
||||
{
|
||||
chd.tag = br.ReadChars(8);
|
||||
chd.length = br.ReadUInt32BigEndian();
|
||||
chd.version = br.ReadUInt32BigEndian();
|
||||
chd.flags = (Flags)br.ReadUInt32BigEndian();
|
||||
chd.compression = (Compression)br.ReadUInt32BigEndian();
|
||||
chd.hunksize = br.ReadUInt32BigEndian();
|
||||
chd.totalhunks = br.ReadUInt32BigEndian();
|
||||
chd.cylinders = br.ReadUInt32BigEndian();
|
||||
chd.heads = br.ReadUInt32BigEndian();
|
||||
chd.sectors = br.ReadUInt32BigEndian();
|
||||
chd.md5 = br.ReadBytes(16);
|
||||
chd.parentmd5 = br.ReadBytes(16);
|
||||
chd.seclen = br.ReadUInt32BigEndian();
|
||||
byte[] tagBytes = stream.ReadBytes(8);
|
||||
header.Tag = Encoding.ASCII.GetString(tagBytes);
|
||||
if (header.Tag != Signature)
|
||||
return null;
|
||||
|
||||
chd.MD5 = chd.md5;
|
||||
}
|
||||
header.Length = stream.ReadUInt32BigEndian();
|
||||
if (header.Length != HeaderSize)
|
||||
return null;
|
||||
|
||||
return chd;
|
||||
header.Version = stream.ReadUInt32BigEndian();
|
||||
header.Flags = (Flags)stream.ReadUInt32BigEndian();
|
||||
header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
|
||||
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
|
||||
return null;
|
||||
|
||||
header.HunkSize = stream.ReadUInt32BigEndian();
|
||||
header.TotalHunks = stream.ReadUInt32BigEndian();
|
||||
header.Cylinders = stream.ReadUInt32BigEndian();
|
||||
header.Heads = stream.ReadUInt32BigEndian();
|
||||
header.Sectors = stream.ReadUInt32BigEndian();
|
||||
header.MD5 = stream.ReadBytes(16);
|
||||
header.ParentMD5 = stream.ReadBytes(16);
|
||||
header.BytesPerSector = stream.ReadUInt32BigEndian();
|
||||
|
||||
return new CHDFileV2 { _header = header, MD5 = header.MD5 };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -91,7 +51,7 @@ namespace SabreTools.FileTypes.CHD
|
||||
/// </summary>
|
||||
public override byte[] GetHash()
|
||||
{
|
||||
return md5;
|
||||
return (_header as HeaderV2)?.MD5 ?? [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.CHD;
|
||||
|
||||
namespace SabreTools.FileTypes.CHD
|
||||
{
|
||||
@@ -10,85 +10,40 @@ namespace SabreTools.FileTypes.CHD
|
||||
/// </summary>
|
||||
public class CHDFileV3 : CHDFile
|
||||
{
|
||||
/// <summary>
|
||||
/// CHD flags
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum Flags : uint
|
||||
{
|
||||
DriveHasParent = 0x00000001,
|
||||
DriveAllowsWrites = 0x00000002,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compression being used in CHD
|
||||
/// </summary>
|
||||
public enum Compression : uint
|
||||
{
|
||||
CHDCOMPRESSION_NONE = 0,
|
||||
CHDCOMPRESSION_ZLIB = 1,
|
||||
CHDCOMPRESSION_ZLIB_PLUS = 2,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Map format
|
||||
/// </summary>
|
||||
public class Map
|
||||
{
|
||||
public ulong offset; // starting offset within the file
|
||||
public uint crc32; // 32-bit CRC of the uncompressed data
|
||||
public ushort length_lo; // lower 16 bits of length
|
||||
public byte length_hi; // upper 8 bits of length
|
||||
public byte flags; // flags, indicating compression info
|
||||
}
|
||||
|
||||
public const int HeaderSize = 120;
|
||||
public const uint Version = 3;
|
||||
|
||||
// V3-specific header values
|
||||
public Flags flags; // flags (see above)
|
||||
public Compression compression; // compression type
|
||||
public uint totalhunks; // total # of hunks represented
|
||||
public ulong logicalbytes; // logical size of the data (in bytes)
|
||||
public ulong metaoffset; // offset to the first blob of metadata
|
||||
public byte[] md5 = new byte[16]; // MD5 checksum of raw data
|
||||
public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
|
||||
public uint hunkbytes; // number of bytes per hunk
|
||||
public byte[] sha1 = new byte[20]; // SHA1 checksum of raw data
|
||||
public byte[] parentsha1 = new byte[20]; // SHA1 checksum of parent file
|
||||
|
||||
/// <summary>
|
||||
/// Parse and validate the header as if it's V3
|
||||
/// </summary>
|
||||
public static CHDFileV3 Deserialize(Stream stream)
|
||||
public static CHDFileV3? Deserialize(Stream stream)
|
||||
{
|
||||
CHDFileV3 chd = new();
|
||||
var header = new HeaderV3();
|
||||
|
||||
#if NET20 || NET35 || NET40
|
||||
using (var br = new BinaryReader(stream, Encoding.Default))
|
||||
#else
|
||||
using (var br = new BinaryReader(stream, Encoding.Default, true))
|
||||
#endif
|
||||
{
|
||||
chd.tag = br.ReadChars(8);
|
||||
chd.length = br.ReadUInt32BigEndian();
|
||||
chd.version = br.ReadUInt32BigEndian();
|
||||
chd.flags = (Flags)br.ReadUInt32BigEndian();
|
||||
chd.compression = (Compression)br.ReadUInt32BigEndian();
|
||||
chd.totalhunks = br.ReadUInt32BigEndian();
|
||||
chd.logicalbytes = br.ReadUInt64BigEndian();
|
||||
chd.metaoffset = br.ReadUInt64BigEndian();
|
||||
chd.md5 = br.ReadBytes(16);
|
||||
chd.parentmd5 = br.ReadBytes(16);
|
||||
chd.hunkbytes = br.ReadUInt32BigEndian();
|
||||
chd.sha1 = br.ReadBytes(20);
|
||||
chd.parentsha1 = br.ReadBytes(20);
|
||||
byte[] tagBytes = stream.ReadBytes(8);
|
||||
header.Tag = Encoding.ASCII.GetString(tagBytes);
|
||||
if (header.Tag != Signature)
|
||||
return null;
|
||||
|
||||
chd.MD5 = chd.md5;
|
||||
chd.SHA1 = chd.sha1;
|
||||
}
|
||||
header.Length = stream.ReadUInt32BigEndian();
|
||||
if (header.Length != HeaderSize)
|
||||
return null;
|
||||
|
||||
return chd;
|
||||
header.Version = stream.ReadUInt32BigEndian();
|
||||
header.Flags = (Flags)stream.ReadUInt32BigEndian();
|
||||
header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
|
||||
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB_PLUS)
|
||||
return null;
|
||||
|
||||
header.TotalHunks = stream.ReadUInt32BigEndian();
|
||||
header.LogicalBytes = stream.ReadUInt64BigEndian();
|
||||
header.MetaOffset = stream.ReadUInt64BigEndian();
|
||||
header.MD5 = stream.ReadBytes(16);
|
||||
header.ParentMD5 = stream.ReadBytes(16);
|
||||
header.HunkBytes = stream.ReadUInt32BigEndian();
|
||||
header.SHA1 = stream.ReadBytes(20);
|
||||
header.ParentSHA1 = stream.ReadBytes(20);
|
||||
|
||||
return new CHDFileV3 { _header = header, MD5 = header.MD5, SHA1 = header.SHA1 };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -96,7 +51,7 @@ namespace SabreTools.FileTypes.CHD
|
||||
/// </summary>
|
||||
public override byte[] GetHash()
|
||||
{
|
||||
return sha1;
|
||||
return (_header as HeaderV3)?.SHA1 ?? [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.CHD;
|
||||
|
||||
namespace SabreTools.FileTypes.CHD
|
||||
{
|
||||
@@ -10,83 +10,39 @@ namespace SabreTools.FileTypes.CHD
|
||||
/// </summary>
|
||||
public class CHDFileV4 : CHDFile
|
||||
{
|
||||
/// <summary>
|
||||
/// CHD flags
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum Flags : uint
|
||||
{
|
||||
DriveHasParent = 0x00000001,
|
||||
DriveAllowsWrites = 0x00000002,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compression being used in CHD
|
||||
/// </summary>
|
||||
public enum Compression : uint
|
||||
{
|
||||
CHDCOMPRESSION_NONE = 0,
|
||||
CHDCOMPRESSION_ZLIB = 1,
|
||||
CHDCOMPRESSION_ZLIB_PLUS = 2,
|
||||
CHDCOMPRESSION_AV = 3,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Map format
|
||||
/// </summary>
|
||||
public class Map
|
||||
{
|
||||
public ulong offset; // starting offset within the file
|
||||
public uint crc32; // 32-bit CRC of the uncompressed data
|
||||
public ushort length_lo; // lower 16 bits of length
|
||||
public byte length_hi; // upper 8 bits of length
|
||||
public byte flags; // flags, indicating compression info
|
||||
}
|
||||
|
||||
public const int HeaderSize = 108;
|
||||
public const uint Version = 4;
|
||||
|
||||
// V4-specific header values
|
||||
public Flags flags; // flags (see above)
|
||||
public Compression compression; // compression type
|
||||
public uint totalhunks; // total # of hunks represented
|
||||
public ulong logicalbytes; // logical size of the data (in bytes)
|
||||
public ulong metaoffset; // offset to the first blob of metadata
|
||||
public uint hunkbytes; // number of bytes per hunk
|
||||
public byte[] sha1 = new byte[20]; // combined raw+meta SHA1
|
||||
public byte[] parentsha1 = new byte[20]; // combined raw+meta SHA1 of parent
|
||||
public byte[] rawsha1 = new byte[20]; // raw data SHA1
|
||||
|
||||
/// <summary>
|
||||
/// Parse and validate the header as if it's V4
|
||||
/// </summary>
|
||||
public static CHDFileV4 Deserialize(Stream stream)
|
||||
public static CHDFileV4? Deserialize(Stream stream)
|
||||
{
|
||||
CHDFileV4 chd = new();
|
||||
var header = new HeaderV4();
|
||||
|
||||
#if NET20 || NET35 || NET40
|
||||
using (BinaryReader br = new(stream, Encoding.Default))
|
||||
#else
|
||||
using (BinaryReader br = new(stream, Encoding.Default, true))
|
||||
#endif
|
||||
{
|
||||
chd.tag = br.ReadChars(8);
|
||||
chd.length = br.ReadUInt32BigEndian();
|
||||
chd.version = br.ReadUInt32BigEndian();
|
||||
chd.flags = (Flags)br.ReadUInt32BigEndian();
|
||||
chd.compression = (Compression)br.ReadUInt32BigEndian();
|
||||
chd.totalhunks = br.ReadUInt32BigEndian();
|
||||
chd.logicalbytes = br.ReadUInt64BigEndian();
|
||||
chd.metaoffset = br.ReadUInt64BigEndian();
|
||||
chd.hunkbytes = br.ReadUInt32BigEndian();
|
||||
chd.sha1 = br.ReadBytes(20);
|
||||
chd.parentsha1 = br.ReadBytes(20);
|
||||
chd.rawsha1 = br.ReadBytes(20);
|
||||
byte[] tagBytes = stream.ReadBytes(8);
|
||||
header.Tag = Encoding.ASCII.GetString(tagBytes);
|
||||
if (header.Tag != Signature)
|
||||
return null;
|
||||
|
||||
chd.SHA1 = chd.sha1;
|
||||
}
|
||||
header.Length = stream.ReadUInt32BigEndian();
|
||||
if (header.Length != HeaderSize)
|
||||
return null;
|
||||
|
||||
return chd;
|
||||
header.Version = stream.ReadUInt32BigEndian();
|
||||
header.Flags = (Flags)stream.ReadUInt32BigEndian();
|
||||
header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
|
||||
if (header.Compression > CompressionType.CHDCOMPRESSION_AV)
|
||||
return null;
|
||||
|
||||
header.TotalHunks = stream.ReadUInt32BigEndian();
|
||||
header.LogicalBytes = stream.ReadUInt64BigEndian();
|
||||
header.MetaOffset = stream.ReadUInt64BigEndian();
|
||||
header.HunkBytes = stream.ReadUInt32BigEndian();
|
||||
header.SHA1 = stream.ReadBytes(20);
|
||||
header.ParentSHA1 = stream.ReadBytes(20);
|
||||
header.RawSHA1 = stream.ReadBytes(20);
|
||||
|
||||
return new CHDFileV4 { _header = header, SHA1 = header.SHA1 };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -94,7 +50,7 @@ namespace SabreTools.FileTypes.CHD
|
||||
/// </summary>
|
||||
public override byte[] GetHash()
|
||||
{
|
||||
return sha1;
|
||||
return (_header as HeaderV4)?.SHA1 ?? [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.CHD;
|
||||
|
||||
namespace SabreTools.FileTypes.CHD
|
||||
{
|
||||
@@ -9,87 +10,41 @@ namespace SabreTools.FileTypes.CHD
|
||||
/// </summary>
|
||||
public class CHDFileV5 : CHDFile
|
||||
{
|
||||
/// <summary>
|
||||
/// Uncompressed map format
|
||||
/// </summary>
|
||||
private class UncompressedMap
|
||||
{
|
||||
public uint offset; // starting offset within the file
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compressed map header format
|
||||
/// </summary>
|
||||
private class CompressedMapHeader
|
||||
{
|
||||
public uint length; // length of compressed map
|
||||
public byte[] datastart = new byte[12]; // UINT48; offset of first block
|
||||
public ushort crc; // crc-16 of the map
|
||||
public byte lengthbits; // bits used to encode complength
|
||||
public byte hunkbits; // bits used to encode self-refs
|
||||
public byte parentunitbits; // bits used to encode parent unit refs
|
||||
public byte reserved; // future use
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compressed map entry format
|
||||
/// </summary>
|
||||
private class CompressedMapEntry
|
||||
{
|
||||
public byte compression; // compression type
|
||||
public byte[] complength = new byte[6]; // UINT24; compressed length
|
||||
public byte[] offset = new byte[12]; // UINT48; offset
|
||||
public ushort crc; // crc-16 of the data
|
||||
}
|
||||
|
||||
public const int HeaderSize = 124;
|
||||
public const uint Version = 5;
|
||||
|
||||
// V5-specific header values
|
||||
public uint[] compressors = new uint[4]; // which custom compressors are used?
|
||||
public ulong logicalbytes; // logical size of the data (in bytes)
|
||||
public ulong mapoffset; // offset to the map
|
||||
public ulong metaoffset; // offset to the first blob of metadata
|
||||
public uint hunkbytes; // number of bytes per hunk
|
||||
public uint unitbytes; // number of bytes per unit within each hunk
|
||||
public byte[] rawsha1 = new byte[20]; // raw data SHA1
|
||||
public byte[] sha1 = new byte[20]; // combined raw+meta SHA1
|
||||
public byte[] parentsha1 = new byte[20]; // combined raw+meta SHA1 of parent
|
||||
|
||||
/// <summary>
|
||||
/// Parse and validate the header as if it's V5
|
||||
/// </summary>
|
||||
public static CHDFileV5 Deserialize(Stream stream)
|
||||
public static CHDFileV5? Deserialize(Stream stream)
|
||||
{
|
||||
CHDFileV5 chd = new CHDFileV5();
|
||||
var header = new HeaderV5();
|
||||
|
||||
#if NET20 || NET35 || NET40
|
||||
using (BinaryReader br = new BinaryReader(stream, Encoding.Default))
|
||||
#else
|
||||
using (BinaryReader br = new BinaryReader(stream, Encoding.Default, true))
|
||||
#endif
|
||||
{
|
||||
chd.tag = br.ReadChars(8);
|
||||
chd.length = br.ReadUInt32BigEndian();
|
||||
chd.version = br.ReadUInt32BigEndian();
|
||||
chd.compressors = new uint[4];
|
||||
for (int i = 0; i < 4; i++)
|
||||
{
|
||||
chd.compressors[i] = br.ReadUInt32BigEndian();
|
||||
}
|
||||
chd.logicalbytes = br.ReadUInt64BigEndian();
|
||||
chd.mapoffset = br.ReadUInt64BigEndian();
|
||||
chd.metaoffset = br.ReadUInt64BigEndian();
|
||||
chd.hunkbytes = br.ReadUInt32BigEndian();
|
||||
chd.unitbytes = br.ReadUInt32BigEndian();
|
||||
chd.rawsha1 = br.ReadBytes(20);
|
||||
chd.sha1 = br.ReadBytes(20);
|
||||
chd.parentsha1 = br.ReadBytes(20);
|
||||
byte[] tagBytes = stream.ReadBytes(8);
|
||||
header.Tag = Encoding.ASCII.GetString(tagBytes);
|
||||
if (header.Tag != Signature)
|
||||
return null;
|
||||
|
||||
chd.SHA1 = chd.sha1;
|
||||
header.Length = stream.ReadUInt32BigEndian();
|
||||
if (header.Length != HeaderSize)
|
||||
return null;
|
||||
|
||||
header.Version = stream.ReadUInt32BigEndian();
|
||||
header.Compressors = new uint[4];
|
||||
for (int i = 0; i < header.Compressors.Length; i++)
|
||||
{
|
||||
header.Compressors[i] = stream.ReadUInt32BigEndian();
|
||||
}
|
||||
|
||||
return chd;
|
||||
header.LogicalBytes = stream.ReadUInt64BigEndian();
|
||||
header.MapOffset = stream.ReadUInt64BigEndian();
|
||||
header.MetaOffset = stream.ReadUInt64BigEndian();
|
||||
header.HunkBytes = stream.ReadUInt32BigEndian();
|
||||
header.UnitBytes = stream.ReadUInt32BigEndian();
|
||||
header.RawSHA1 = stream.ReadBytes(20);
|
||||
header.SHA1 = stream.ReadBytes(20);
|
||||
header.ParentSHA1 = stream.ReadBytes(20);
|
||||
|
||||
return new CHDFileV5 { _header = header, SHA1 = header.SHA1 };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -97,7 +52,7 @@ namespace SabreTools.FileTypes.CHD
|
||||
/// </summary>
|
||||
public override byte[] GetHash()
|
||||
{
|
||||
return sha1;
|
||||
return (_header as HeaderV5)?.SHA1 ?? [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user