Use CHD models from library

This commit is contained in:
Matt Nadareski
2024-10-20 00:03:29 -04:00
parent 4d5ac92125
commit ab93ba406c
6 changed files with 158 additions and 394 deletions

View File

@@ -1,7 +1,7 @@
using System;
using System.IO;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.CHD;
namespace SabreTools.FileTypes.CHD
{
@@ -10,85 +10,40 @@ namespace SabreTools.FileTypes.CHD
/// </summary>
public class CHDFileV3 : CHDFile
{
/// <summary>
/// CHD flags
/// </summary>
[Flags]
public enum Flags : uint
{
DriveHasParent = 0x00000001,
DriveAllowsWrites = 0x00000002,
}
/// <summary>
/// Compression being used in CHD
/// </summary>
public enum Compression : uint
{
CHDCOMPRESSION_NONE = 0,
CHDCOMPRESSION_ZLIB = 1,
CHDCOMPRESSION_ZLIB_PLUS = 2,
}
/// <summary>
/// Map format
/// </summary>
public class Map
{
public ulong offset; // starting offset within the file
public uint crc32; // 32-bit CRC of the uncompressed data
public ushort length_lo; // lower 16 bits of length
public byte length_hi; // upper 8 bits of length
public byte flags; // flags, indicating compression info
}
public const int HeaderSize = 120;
public const uint Version = 3;
// V3-specific header values
public Flags flags; // flags (see above)
public Compression compression; // compression type
public uint totalhunks; // total # of hunks represented
public ulong logicalbytes; // logical size of the data (in bytes)
public ulong metaoffset; // offset to the first blob of metadata
public byte[] md5 = new byte[16]; // MD5 checksum of raw data
public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
public uint hunkbytes; // number of bytes per hunk
public byte[] sha1 = new byte[20]; // SHA1 checksum of raw data
public byte[] parentsha1 = new byte[20]; // SHA1 checksum of parent file
/// <summary>
/// Parse and validate the header as if it's V3
/// </summary>
public static CHDFileV3 Deserialize(Stream stream)
public static CHDFileV3? Deserialize(Stream stream)
{
CHDFileV3 chd = new();
var header = new HeaderV3();
#if NET20 || NET35 || NET40
using (var br = new BinaryReader(stream, Encoding.Default))
#else
using (var br = new BinaryReader(stream, Encoding.Default, true))
#endif
{
chd.tag = br.ReadChars(8);
chd.length = br.ReadUInt32BigEndian();
chd.version = br.ReadUInt32BigEndian();
chd.flags = (Flags)br.ReadUInt32BigEndian();
chd.compression = (Compression)br.ReadUInt32BigEndian();
chd.totalhunks = br.ReadUInt32BigEndian();
chd.logicalbytes = br.ReadUInt64BigEndian();
chd.metaoffset = br.ReadUInt64BigEndian();
chd.md5 = br.ReadBytes(16);
chd.parentmd5 = br.ReadBytes(16);
chd.hunkbytes = br.ReadUInt32BigEndian();
chd.sha1 = br.ReadBytes(20);
chd.parentsha1 = br.ReadBytes(20);
byte[] tagBytes = stream.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Signature)
return null;
chd.MD5 = chd.md5;
chd.SHA1 = chd.sha1;
}
header.Length = stream.ReadUInt32BigEndian();
if (header.Length != HeaderSize)
return null;
return chd;
header.Version = stream.ReadUInt32BigEndian();
header.Flags = (Flags)stream.ReadUInt32BigEndian();
header.Compression = (CompressionType)stream.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB_PLUS)
return null;
header.TotalHunks = stream.ReadUInt32BigEndian();
header.LogicalBytes = stream.ReadUInt64BigEndian();
header.MetaOffset = stream.ReadUInt64BigEndian();
header.MD5 = stream.ReadBytes(16);
header.ParentMD5 = stream.ReadBytes(16);
header.HunkBytes = stream.ReadUInt32BigEndian();
header.SHA1 = stream.ReadBytes(20);
header.ParentSHA1 = stream.ReadBytes(20);
return new CHDFileV3 { _header = header, MD5 = header.MD5, SHA1 = header.SHA1 };
}
/// <summary>
@@ -96,7 +51,7 @@ namespace SabreTools.FileTypes.CHD
/// </summary>
public override byte[] GetHash()
{
return sha1;
return (_header as HeaderV3)?.SHA1 ?? [];
}
}
}