Extract out FileTypes namespace

This commit is contained in:
Matt Nadareski
2020-12-08 14:53:49 -08:00
parent 0512e393c8
commit 82e3a3939b
134 changed files with 500 additions and 641 deletions

View File

@@ -0,0 +1,92 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO;
namespace SabreTools.FileTypes.CHD
{
/// <summary>
/// CHD V1 File
/// </summary>
internal class CHDFileV1 : CHDFile
{
/// <summary>
/// CHD flags
/// </summary>
[Flags]
public enum Flags : uint
{
DriveHasParent = 0x00000001,
DriveAllowsWrites = 0x00000002,
}
/// <summary>
/// Compression being used in CHD
/// </summary>
public enum Compression : uint
{
CHDCOMPRESSION_NONE = 0,
CHDCOMPRESSION_ZLIB = 1,
}
/// <summary>
/// Map format
/// </summary>
public class Map
{
public ulong offset; // 44; starting offset within the file
public ulong length; // 20; length of data; if == hunksize, data is uncompressed
}
public const int HeaderSize = 76;
public const uint Version = 1;
// V1-specific header values
public Flags flags; // flags (see above)
public Compression compression; // compression type
public uint hunksize; // 512-byte sectors per hunk
public uint totalhunks; // total # of hunks represented
public uint cylinders; // number of cylinders on hard disk
public uint heads; // number of heads on hard disk
public uint sectors; // number of sectors on hard disk
public byte[] md5 = new byte[16]; // MD5 checksum of raw data
public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
/// <summary>
/// Parse and validate the header as if it's V1
/// </summary>
public static CHDFileV1 Deserialize(Stream stream)
{
CHDFileV1 chd = new CHDFileV1();
using (BinaryReader br = new BinaryReader(stream, Encoding.Default, true))
{
chd.tag = br.ReadChars(8);
chd.length = br.ReadUInt32BigEndian();
chd.version = br.ReadUInt32BigEndian();
chd.flags = (Flags)br.ReadUInt32BigEndian();
chd.compression = (Compression)br.ReadUInt32BigEndian();
chd.hunksize = br.ReadUInt32BigEndian();
chd.totalhunks = br.ReadUInt32BigEndian();
chd.cylinders = br.ReadUInt32BigEndian();
chd.heads = br.ReadUInt32BigEndian();
chd.sectors = br.ReadUInt32BigEndian();
chd.md5 = br.ReadBytes(16);
chd.parentmd5 = br.ReadBytes(16);
chd.MD5 = chd.md5;
}
return chd;
}
/// <summary>
/// Return internal MD5 hash
/// </summary>
public override byte[] GetHash()
{
return md5;
}
}
}

View File

@@ -0,0 +1,94 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO;
namespace SabreTools.FileTypes.CHD
{
/// <summary>
/// CHD V2 File
/// </summary>
internal class CHDFileV2 : CHDFile
{
/// <summary>
/// CHD flags
/// </summary>
[Flags]
public enum Flags : uint
{
DriveHasParent = 0x00000001,
DriveAllowsWrites = 0x00000002,
}
/// <summary>
/// Compression being used in CHD
/// </summary>
public enum Compression : uint
{
CHDCOMPRESSION_NONE = 0,
CHDCOMPRESSION_ZLIB = 1,
}
/// <summary>
/// Map format
/// </summary>
public class Map
{
public ulong offset; // 44; starting offset within the file
public ulong length; // 20; length of data; if == hunksize, data is uncompressed
}
public const int HeaderSize = 80;
public const uint Version = 2;
// V2-specific header values
public Flags flags; // flags (see above)
public Compression compression; // compression type
public uint hunksize; // 512-byte sectors per hunk
public uint totalhunks; // total # of hunks represented
public uint cylinders; // number of cylinders on hard disk
public uint heads; // number of heads on hard disk
public uint sectors; // number of sectors on hard disk
public byte[] md5 = new byte[16]; // MD5 checksum of raw data
public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
public uint seclen; // number of bytes per sector
/// <summary>
/// Parse and validate the header as if it's V2
/// </summary>
public static CHDFileV2 Deserialize(Stream stream)
{
CHDFileV2 chd = new CHDFileV2();
using (BinaryReader br = new BinaryReader(stream, Encoding.Default, true))
{
chd.tag = br.ReadChars(8);
chd.length = br.ReadUInt32BigEndian();
chd.version = br.ReadUInt32BigEndian();
chd.flags = (Flags)br.ReadUInt32BigEndian();
chd.compression = (Compression)br.ReadUInt32BigEndian();
chd.hunksize = br.ReadUInt32BigEndian();
chd.totalhunks = br.ReadUInt32BigEndian();
chd.cylinders = br.ReadUInt32BigEndian();
chd.heads = br.ReadUInt32BigEndian();
chd.sectors = br.ReadUInt32BigEndian();
chd.md5 = br.ReadBytes(16);
chd.parentmd5 = br.ReadBytes(16);
chd.seclen = br.ReadUInt32BigEndian();
chd.MD5 = chd.md5;
}
return chd;
}
/// <summary>
/// Return internal MD5 hash
/// </summary>
public override byte[] GetHash()
{
return md5;
}
}
}

View File

@@ -0,0 +1,99 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO;
namespace SabreTools.FileTypes.CHD
{
/// <summary>
/// CHD V3 File
/// </summary>
internal class CHDFileV3 : CHDFile
{
/// <summary>
/// CHD flags
/// </summary>
[Flags]
public enum Flags : uint
{
DriveHasParent = 0x00000001,
DriveAllowsWrites = 0x00000002,
}
/// <summary>
/// Compression being used in CHD
/// </summary>
public enum Compression : uint
{
CHDCOMPRESSION_NONE = 0,
CHDCOMPRESSION_ZLIB = 1,
CHDCOMPRESSION_ZLIB_PLUS = 2,
}
/// <summary>
/// Map format
/// </summary>
public class Map
{
public ulong offset; // starting offset within the file
public uint crc32; // 32-bit CRC of the uncompressed data
public ushort length_lo; // lower 16 bits of length
public byte length_hi; // upper 8 bits of length
public byte flags; // flags, indicating compression info
}
public const int HeaderSize = 120;
public const uint Version = 3;
// V3-specific header values
public Flags flags; // flags (see above)
public Compression compression; // compression type
public uint totalhunks; // total # of hunks represented
public ulong logicalbytes; // logical size of the data (in bytes)
public ulong metaoffset; // offset to the first blob of metadata
public byte[] md5 = new byte[16]; // MD5 checksum of raw data
public byte[] parentmd5 = new byte[16]; // MD5 checksum of parent file
public uint hunkbytes; // number of bytes per hunk
public byte[] sha1 = new byte[20]; // SHA1 checksum of raw data
public byte[] parentsha1 = new byte[20]; // SHA1 checksum of parent file
/// <summary>
/// Parse and validate the header as if it's V3
/// </summary>
public static CHDFileV3 Deserialize(Stream stream)
{
CHDFileV3 chd = new CHDFileV3();
using (BinaryReader br = new BinaryReader(stream, Encoding.Default, true))
{
chd.tag = br.ReadChars(8);
chd.length = br.ReadUInt32BigEndian();
chd.version = br.ReadUInt32BigEndian();
chd.flags = (Flags)br.ReadUInt32BigEndian();
chd.compression = (Compression)br.ReadUInt32BigEndian();
chd.totalhunks = br.ReadUInt32BigEndian();
chd.logicalbytes = br.ReadUInt64BigEndian();
chd.metaoffset = br.ReadUInt64BigEndian();
chd.md5 = br.ReadBytes(16);
chd.parentmd5 = br.ReadBytes(16);
chd.hunkbytes = br.ReadUInt32BigEndian();
chd.sha1 = br.ReadBytes(20);
chd.parentsha1 = br.ReadBytes(20);
chd.MD5 = chd.md5;
chd.SHA1 = chd.sha1;
}
return chd;
}
/// <summary>
/// Return internal SHA1 hash
/// </summary>
public override byte[] GetHash()
{
return sha1;
}
}
}

View File

@@ -0,0 +1,97 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO;
namespace SabreTools.FileTypes.CHD
{
/// <summary>
/// CHD V4 File
/// </summary>
internal class CHDFileV4 : CHDFile
{
/// <summary>
/// CHD flags
/// </summary>
[Flags]
public enum Flags : uint
{
DriveHasParent = 0x00000001,
DriveAllowsWrites = 0x00000002,
}
/// <summary>
/// Compression being used in CHD
/// </summary>
public enum Compression : uint
{
CHDCOMPRESSION_NONE = 0,
CHDCOMPRESSION_ZLIB = 1,
CHDCOMPRESSION_ZLIB_PLUS = 2,
CHDCOMPRESSION_AV = 3,
}
/// <summary>
/// Map format
/// </summary>
public class Map
{
public ulong offset; // starting offset within the file
public uint crc32; // 32-bit CRC of the uncompressed data
public ushort length_lo; // lower 16 bits of length
public byte length_hi; // upper 8 bits of length
public byte flags; // flags, indicating compression info
}
public const int HeaderSize = 108;
public const uint Version = 4;
// V4-specific header values
public Flags flags; // flags (see above)
public Compression compression; // compression type
public uint totalhunks; // total # of hunks represented
public ulong logicalbytes; // logical size of the data (in bytes)
public ulong metaoffset; // offset to the first blob of metadata
public uint hunkbytes; // number of bytes per hunk
public byte[] sha1 = new byte[20]; // combined raw+meta SHA1
public byte[] parentsha1 = new byte[20]; // combined raw+meta SHA1 of parent
public byte[] rawsha1 = new byte[20]; // raw data SHA1
/// <summary>
/// Parse and validate the header as if it's V4
/// </summary>
public static CHDFileV4 Deserialize(Stream stream)
{
CHDFileV4 chd = new CHDFileV4();
using (BinaryReader br = new BinaryReader(stream, Encoding.Default, true))
{
chd.tag = br.ReadChars(8);
chd.length = br.ReadUInt32BigEndian();
chd.version = br.ReadUInt32BigEndian();
chd.flags = (Flags)br.ReadUInt32BigEndian();
chd.compression = (Compression)br.ReadUInt32BigEndian();
chd.totalhunks = br.ReadUInt32BigEndian();
chd.logicalbytes = br.ReadUInt64BigEndian();
chd.metaoffset = br.ReadUInt64BigEndian();
chd.hunkbytes = br.ReadUInt32BigEndian();
chd.sha1 = br.ReadBytes(20);
chd.parentsha1 = br.ReadBytes(20);
chd.rawsha1 = br.ReadBytes(20);
chd.SHA1 = chd.sha1;
}
return chd;
}
/// <summary>
/// Return internal SHA1 hash
/// </summary>
public override byte[] GetHash()
{
return sha1;
}
}
}

View File

@@ -0,0 +1,100 @@
using System.IO;
using System.Text;
using SabreTools.IO;
namespace SabreTools.FileTypes.CHD
{
/// <summary>
/// CHD V5 File
/// </summary>
internal class CHDFileV5 : CHDFile
{
/// <summary>
/// Uncompressed map format
/// </summary>
private class UncompressedMap
{
public uint offset; // starting offset within the file
}
/// <summary>
/// Compressed map header format
/// </summary>
private class CompressedMapHeader
{
public uint length; // length of compressed map
public byte[] datastart = new byte[12]; // UINT48; offset of first block
public ushort crc; // crc-16 of the map
public byte lengthbits; // bits used to encode complength
public byte hunkbits; // bits used to encode self-refs
public byte parentunitbits; // bits used to encode parent unit refs
public byte reserved; // future use
}
/// <summary>
/// Compressed map entry format
/// </summary>
private class CompressedMapEntry
{
public byte compression; // compression type
public byte[] complength = new byte[6]; // UINT24; compressed length
public byte[] offset = new byte[12]; // UINT48; offset
public ushort crc; // crc-16 of the data
}
public const int HeaderSize = 124;
public const uint Version = 5;
// V5-specific header values
public uint[] compressors = new uint[4]; // which custom compressors are used?
public ulong logicalbytes; // logical size of the data (in bytes)
public ulong mapoffset; // offset to the map
public ulong metaoffset; // offset to the first blob of metadata
public uint hunkbytes; // number of bytes per hunk
public uint unitbytes; // number of bytes per unit within each hunk
public byte[] rawsha1 = new byte[20]; // raw data SHA1
public byte[] sha1 = new byte[20]; // combined raw+meta SHA1
public byte[] parentsha1 = new byte[20]; // combined raw+meta SHA1 of parent
/// <summary>
/// Parse and validate the header as if it's V5
/// </summary>
public static CHDFileV5 Deserialize(Stream stream)
{
CHDFileV5 chd = new CHDFileV5();
using (BinaryReader br = new BinaryReader(stream, Encoding.Default, true))
{
chd.tag = br.ReadChars(8);
chd.length = br.ReadUInt32BigEndian();
chd.version = br.ReadUInt32BigEndian();
chd.compressors = new uint[4];
for (int i = 0; i < 4; i++)
{
chd.compressors[i] = br.ReadUInt32BigEndian();
}
chd.logicalbytes = br.ReadUInt64BigEndian();
chd.mapoffset = br.ReadUInt64BigEndian();
chd.metaoffset = br.ReadUInt64BigEndian();
chd.hunkbytes = br.ReadUInt32BigEndian();
chd.unitbytes = br.ReadUInt32BigEndian();
chd.rawsha1 = br.ReadBytes(20);
chd.sha1 = br.ReadBytes(20);
chd.parentsha1 = br.ReadBytes(20);
chd.SHA1 = chd.sha1;
}
return chd;
}
/// <summary>
/// Return internal SHA1 hash
/// </summary>
public override byte[] GetHash()
{
return sha1;
}
}
}