mirror of
https://github.com/SabreTools/BinaryObjectScanner.git
synced 2026-04-24 23:30:07 +00:00
Migrate to Serialization package
This commit is contained in:
@@ -1,470 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.AACS;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public class AACS
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into an AACS media key block
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled archive on success, null on error</returns>
|
||||
public static MediaKeyBlock ParseMediaKeyBlock(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseMediaKeyBlock(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an AACS media key block
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled cmedia key block on success, null on error</returns>
|
||||
public static MediaKeyBlock ParseMediaKeyBlock(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new media key block to fill
|
||||
var mediaKeyBlock = new MediaKeyBlock();
|
||||
|
||||
#region Records
|
||||
|
||||
// Create the records list
|
||||
var records = new List<Record>();
|
||||
|
||||
// Try to parse the records
|
||||
while (data.Position < data.Length)
|
||||
{
|
||||
// Try to parse the record
|
||||
var record = ParseRecord(data);
|
||||
if (record == null)
|
||||
return null;
|
||||
|
||||
// Add the record
|
||||
records.Add(record);
|
||||
|
||||
// If we have an end of media key block record
|
||||
if (record.RecordType == RecordType.EndOfMediaKeyBlock)
|
||||
break;
|
||||
|
||||
// Align to the 4-byte boundary if we're not at the end
|
||||
if (data.Position != data.Length)
|
||||
{
|
||||
while ((data.Position % 4) != 0)
|
||||
_ = data.ReadByteValue();
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Set the records
|
||||
mediaKeyBlock.Records = records.ToArray();
|
||||
|
||||
#endregion
|
||||
|
||||
return mediaKeyBlock;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a record
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled record on success, null on error</returns>
|
||||
private static Record ParseRecord(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
|
||||
// The first 4 bytes make up the type and length
|
||||
byte[] typeAndLength = data.ReadBytes(4);
|
||||
RecordType type = (RecordType)typeAndLength[0];
|
||||
|
||||
// Remove the first byte and parse as big-endian
|
||||
typeAndLength[0] = 0x00;
|
||||
Array.Reverse(typeAndLength);
|
||||
uint length = BitConverter.ToUInt32(typeAndLength, 0);
|
||||
|
||||
// Create a record based on the type
|
||||
switch (type)
|
||||
{
|
||||
// Recognized record types
|
||||
case RecordType.EndOfMediaKeyBlock: return ParseEndOfMediaKeyBlockRecord(data, type, length);
|
||||
case RecordType.ExplicitSubsetDifference: return ParseExplicitSubsetDifferenceRecord(data, type, length);
|
||||
case RecordType.MediaKeyData: return ParseMediaKeyDataRecord(data, type, length);
|
||||
case RecordType.SubsetDifferenceIndex: return ParseSubsetDifferenceIndexRecord(data, type, length);
|
||||
case RecordType.TypeAndVersion: return ParseTypeAndVersionRecord(data, type, length);
|
||||
case RecordType.DriveRevocationList: return ParseDriveRevocationListRecord(data, type, length);
|
||||
case RecordType.HostRevocationList: return ParseHostRevocationListRecord(data, type, length);
|
||||
case RecordType.VerifyMediaKey: return ParseVerifyMediaKeyRecord(data, type, length);
|
||||
case RecordType.Copyright: return ParseCopyrightRecord(data, type, length);
|
||||
|
||||
// Unrecognized record type
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an end of media key block record
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled end of media key block record on success, null on error</returns>
|
||||
private static EndOfMediaKeyBlockRecord ParseEndOfMediaKeyBlockRecord(Stream data, RecordType type, uint length)
|
||||
{
|
||||
// Verify we're calling the right parser
|
||||
if (type != RecordType.EndOfMediaKeyBlock)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new EndOfMediaKeyBlockRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
record.RecordLength = length;
|
||||
if (length > 4)
|
||||
record.SignatureData = data.ReadBytes((int)(length - 4));
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an explicit subset-difference record
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled explicit subset-difference record on success, null on error</returns>
|
||||
private static ExplicitSubsetDifferenceRecord ParseExplicitSubsetDifferenceRecord(Stream data, RecordType type, uint length)
|
||||
{
|
||||
// Verify we're calling the right parser
|
||||
if (type != RecordType.ExplicitSubsetDifference)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new ExplicitSubsetDifferenceRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
record.RecordLength = length;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position - 4;
|
||||
|
||||
// Create the subset difference list
|
||||
var subsetDifferences = new List<SubsetDifference>();
|
||||
|
||||
// Try to parse the subset differences
|
||||
while (data.Position < initialOffset + length - 5)
|
||||
{
|
||||
var subsetDifference = new SubsetDifference();
|
||||
|
||||
subsetDifference.Mask = data.ReadByteValue();
|
||||
subsetDifference.Number = data.ReadUInt32BigEndian();
|
||||
|
||||
subsetDifferences.Add(subsetDifference);
|
||||
}
|
||||
|
||||
// Set the subset differences
|
||||
record.SubsetDifferences = subsetDifferences.ToArray();
|
||||
|
||||
// If there's any data left, discard it
|
||||
if (data.Position < initialOffset + length)
|
||||
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a media key data record
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled media key data record on success, null on error</returns>
|
||||
private static MediaKeyDataRecord ParseMediaKeyDataRecord(Stream data, RecordType type, uint length)
|
||||
{
|
||||
// Verify we're calling the right parser
|
||||
if (type != RecordType.MediaKeyData)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new MediaKeyDataRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
record.RecordLength = length;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position - 4;
|
||||
|
||||
// Create the media key list
|
||||
var mediaKeys = new List<byte[]>();
|
||||
|
||||
// Try to parse the media keys
|
||||
while (data.Position < initialOffset + length)
|
||||
{
|
||||
byte[] mediaKey = data.ReadBytes(0x10);
|
||||
mediaKeys.Add(mediaKey);
|
||||
}
|
||||
|
||||
// Set the media keys
|
||||
record.MediaKeyData = mediaKeys.ToArray();
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a subset-difference index record
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled subset-difference index record on success, null on error</returns>
|
||||
private static SubsetDifferenceIndexRecord ParseSubsetDifferenceIndexRecord(Stream data, RecordType type, uint length)
|
||||
{
|
||||
// Verify we're calling the right parser
|
||||
if (type != RecordType.SubsetDifferenceIndex)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new SubsetDifferenceIndexRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
record.RecordLength = length;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position - 4;
|
||||
|
||||
record.Span = data.ReadUInt32BigEndian();
|
||||
|
||||
// Create the offset list
|
||||
var offsets = new List<uint>();
|
||||
|
||||
// Try to parse the offsets
|
||||
while (data.Position < initialOffset + length)
|
||||
{
|
||||
uint offset = data.ReadUInt32BigEndian();
|
||||
offsets.Add(offset);
|
||||
}
|
||||
|
||||
// Set the offsets
|
||||
record.Offsets = offsets.ToArray();
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a type and version record
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled type and version record on success, null on error</returns>
|
||||
private static TypeAndVersionRecord ParseTypeAndVersionRecord(Stream data, RecordType type, uint length)
|
||||
{
|
||||
// Verify we're calling the right parser
|
||||
if (type != RecordType.TypeAndVersion)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new TypeAndVersionRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
record.RecordLength = length;
|
||||
record.MediaKeyBlockType = (MediaKeyBlockType)data.ReadUInt32BigEndian();
|
||||
record.VersionNumber = data.ReadUInt32BigEndian();
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a drive revocation list record
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled drive revocation list record on success, null on error</returns>
|
||||
private static DriveRevocationListRecord ParseDriveRevocationListRecord(Stream data, RecordType type, uint length)
|
||||
{
|
||||
// Verify we're calling the right parser
|
||||
if (type != RecordType.DriveRevocationList)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new DriveRevocationListRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
record.RecordLength = length;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position - 4;
|
||||
|
||||
record.TotalNumberOfEntries = data.ReadUInt32BigEndian();
|
||||
|
||||
// Create the signature blocks list
|
||||
var blocks = new List<DriveRevocationSignatureBlock>();
|
||||
|
||||
// Try to parse the signature blocks
|
||||
int entryCount = 0;
|
||||
while (entryCount < record.TotalNumberOfEntries && data.Position < initialOffset + length)
|
||||
{
|
||||
var block = new DriveRevocationSignatureBlock();
|
||||
|
||||
block.NumberOfEntries = data.ReadUInt32BigEndian();
|
||||
block.EntryFields = new DriveRevocationListEntry[block.NumberOfEntries];
|
||||
for (int i = 0; i < block.EntryFields.Length; i++)
|
||||
{
|
||||
var entry = new DriveRevocationListEntry();
|
||||
|
||||
entry.Range = data.ReadUInt16BigEndian();
|
||||
entry.DriveID = data.ReadBytes(6);
|
||||
|
||||
block.EntryFields[i] = entry;
|
||||
entryCount++;
|
||||
}
|
||||
|
||||
blocks.Add(block);
|
||||
|
||||
// If we have an empty block
|
||||
if (block.NumberOfEntries == 0)
|
||||
break;
|
||||
}
|
||||
|
||||
// Set the signature blocks
|
||||
record.SignatureBlocks = blocks.ToArray();
|
||||
|
||||
// If there's any data left, discard it
|
||||
if (data.Position < initialOffset + length)
|
||||
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a host revocation list record
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled host revocation list record on success, null on error</returns>
|
||||
private static HostRevocationListRecord ParseHostRevocationListRecord(Stream data, RecordType type, uint length)
|
||||
{
|
||||
// Verify we're calling the right parser
|
||||
if (type != RecordType.HostRevocationList)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new HostRevocationListRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
record.RecordLength = length;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position - 4;
|
||||
|
||||
record.TotalNumberOfEntries = data.ReadUInt32BigEndian();
|
||||
|
||||
// Create the signature blocks list
|
||||
var blocks = new List<HostRevocationSignatureBlock>();
|
||||
|
||||
// Try to parse the signature blocks
|
||||
int entryCount = 0;
|
||||
while (entryCount < record.TotalNumberOfEntries && data.Position < initialOffset + length)
|
||||
{
|
||||
var block = new HostRevocationSignatureBlock();
|
||||
|
||||
block.NumberOfEntries = data.ReadUInt32BigEndian();
|
||||
block.EntryFields = new HostRevocationListEntry[block.NumberOfEntries];
|
||||
for (int i = 0; i < block.EntryFields.Length; i++)
|
||||
{
|
||||
var entry = new HostRevocationListEntry();
|
||||
|
||||
entry.Range = data.ReadUInt16BigEndian();
|
||||
entry.HostID = data.ReadBytes(6);
|
||||
|
||||
block.EntryFields[i] = entry;
|
||||
entryCount++;
|
||||
}
|
||||
|
||||
blocks.Add(block);
|
||||
|
||||
// If we have an empty block
|
||||
if (block.NumberOfEntries == 0)
|
||||
break;
|
||||
}
|
||||
|
||||
// Set the signature blocks
|
||||
record.SignatureBlocks = blocks.ToArray();
|
||||
|
||||
// If there's any data left, discard it
|
||||
if (data.Position < initialOffset + length)
|
||||
_ = data.ReadBytes((int)(initialOffset + length - data.Position));
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a verify media key record
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled verify media key record on success, null on error</returns>
|
||||
private static VerifyMediaKeyRecord ParseVerifyMediaKeyRecord(Stream data, RecordType type, uint length)
|
||||
{
|
||||
// Verify we're calling the right parser
|
||||
if (type != RecordType.VerifyMediaKey)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new VerifyMediaKeyRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
record.RecordLength = length;
|
||||
record.CiphertextValue = data.ReadBytes(0x10);
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a copyright record
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled copyright record on success, null on error</returns>
|
||||
private static CopyrightRecord ParseCopyrightRecord(Stream data, RecordType type, uint length)
|
||||
{
|
||||
// Verify we're calling the right parser
|
||||
if (type != RecordType.Copyright)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new CopyrightRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
record.RecordLength = length;
|
||||
if (length > 4)
|
||||
{
|
||||
byte[] copyright = data.ReadBytes((int)(length - 4));
|
||||
record.Copyright = Encoding.ASCII.GetString(copyright).TrimEnd('\0');
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.BDPlus;
|
||||
using static SabreTools.Models.BDPlus.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public class BDPlus
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a BD+ SVM
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled BD+ SVM on success, null on error</returns>
|
||||
public static SVM ParseSVM(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseSVM(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an BD+ SVM
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled BD+ SVM on success, null on error</returns>
|
||||
public static SVM ParseSVM(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Try to parse the SVM
|
||||
return ParseSVMData(data);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SVM
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SVM on success, null on error</returns>
|
||||
private static SVM ParseSVMData(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var svm = new SVM();
|
||||
|
||||
byte[] signature = data.ReadBytes(8);
|
||||
svm.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (svm.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
svm.Unknown1 = data.ReadBytes(5);
|
||||
svm.Year = data.ReadUInt16BigEndian();
|
||||
svm.Month = data.ReadByteValue();
|
||||
if (svm.Month < 1 || svm.Month > 12)
|
||||
return null;
|
||||
|
||||
svm.Day = data.ReadByteValue();
|
||||
if (svm.Day < 1 || svm.Day > 31)
|
||||
return null;
|
||||
|
||||
svm.Unknown2 = data.ReadBytes(4);
|
||||
svm.Length = data.ReadUInt32();
|
||||
// if (svm.Length > 0)
|
||||
// svm.Data = data.ReadBytes((int)svm.Length);
|
||||
|
||||
return svm;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,150 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.BFPK;
|
||||
using static SabreTools.Models.BFPK.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public class BFPK
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a BFPK archive
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled archive on success, null on error</returns>
|
||||
public static Archive ParseArchive(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseArchive(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a BFPK archive
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled archive on success, null on error</returns>
|
||||
public static Archive ParseArchive(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the archive header
|
||||
archive.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Files
|
||||
|
||||
// If we have any files
|
||||
if (header.Files > 0)
|
||||
{
|
||||
var files = new FileEntry[header.Files];
|
||||
|
||||
// Read all entries in turn
|
||||
for (int i = 0; i < header.Files; i++)
|
||||
{
|
||||
var file = ParseFileEntry(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
files[i] = file;
|
||||
}
|
||||
|
||||
// Set the files
|
||||
archive.Files = files;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return archive;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled header on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Header header = new Header();
|
||||
|
||||
byte[] magic = data.ReadBytes(4);
|
||||
header.Magic = Encoding.ASCII.GetString(magic);
|
||||
if (header.Magic != SignatureString)
|
||||
return null;
|
||||
|
||||
header.Version = data.ReadInt32();
|
||||
header.Files = data.ReadInt32();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a file entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled file entry on success, null on error</returns>
|
||||
private static FileEntry ParseFileEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
FileEntry fileEntry = new FileEntry();
|
||||
|
||||
fileEntry.NameSize = data.ReadInt32();
|
||||
if (fileEntry.NameSize > 0)
|
||||
{
|
||||
byte[] name = data.ReadBytes(fileEntry.NameSize);
|
||||
fileEntry.Name = Encoding.ASCII.GetString(name);
|
||||
}
|
||||
|
||||
fileEntry.UncompressedSize = data.ReadInt32();
|
||||
fileEntry.Offset = data.ReadInt32();
|
||||
if (fileEntry.Offset > 0)
|
||||
{
|
||||
long currentOffset = data.Position;
|
||||
data.Seek(fileEntry.Offset, SeekOrigin.Begin);
|
||||
fileEntry.CompressedSize = data.ReadInt32();
|
||||
data.Seek(currentOffset, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
return fileEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,250 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.BSP;
|
||||
using static SabreTools.Models.BSP.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class BSP
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a Half-Life Level
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled Half-Life Level on success, null on error</returns>
|
||||
public static SabreTools.Models.BSP.File ParseFile(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseFile(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Level
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Level on success, null on error</returns>
|
||||
public static SabreTools.Models.BSP.File ParseFile(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new Half-Life Level to fill
|
||||
var file = new SabreTools.Models.BSP.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the level header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Lumps
|
||||
|
||||
// Create the lump array
|
||||
file.Lumps = new Lump[HL_BSP_LUMP_COUNT];
|
||||
|
||||
// Try to parse the lumps
|
||||
for (int i = 0; i < HL_BSP_LUMP_COUNT; i++)
|
||||
{
|
||||
var lump = ParseLump(data);
|
||||
file.Lumps[i] = lump;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Texture header
|
||||
|
||||
// Try to get the texture header lump
|
||||
var textureDataLump = file.Lumps[HL_BSP_LUMP_TEXTUREDATA];
|
||||
if (textureDataLump.Offset == 0 || textureDataLump.Length == 0)
|
||||
return null;
|
||||
|
||||
// Seek to the texture header
|
||||
data.Seek(textureDataLump.Offset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the texture header
|
||||
var textureHeader = ParseTextureHeader(data);
|
||||
if (textureHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the texture header
|
||||
file.TextureHeader = textureHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Textures
|
||||
|
||||
// Create the texture array
|
||||
file.Textures = new Texture[textureHeader.TextureCount];
|
||||
|
||||
// Try to parse the textures
|
||||
for (int i = 0; i < textureHeader.TextureCount; i++)
|
||||
{
|
||||
// Get the texture offset
|
||||
int offset = (int)(textureHeader.Offsets[i] + file.Lumps[HL_BSP_LUMP_TEXTUREDATA].Offset);
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
continue;
|
||||
|
||||
// Seek to the texture
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
var texture = ParseTexture(data);
|
||||
file.Textures[i] = texture;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Level header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Level header on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Header header = new Header();
|
||||
|
||||
// Only recognized versions are 29 and 30
|
||||
header.Version = data.ReadUInt32();
|
||||
if (header.Version != 29 && header.Version != 30)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a lump
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled lump on success, null on error</returns>
|
||||
private static Lump ParseLump(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Lump lump = new Lump();
|
||||
|
||||
lump.Offset = data.ReadUInt32();
|
||||
lump.Length = data.ReadUInt32();
|
||||
|
||||
return lump;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Level texture header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Level texture header on success, null on error</returns>
|
||||
private static TextureHeader ParseTextureHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
TextureHeader textureHeader = new TextureHeader();
|
||||
|
||||
textureHeader.TextureCount = data.ReadUInt32();
|
||||
|
||||
var offsets = new uint[textureHeader.TextureCount];
|
||||
|
||||
for (int i = 0; i < textureHeader.TextureCount; i++)
|
||||
{
|
||||
offsets[i] = data.ReadUInt32();
|
||||
}
|
||||
|
||||
textureHeader.Offsets = offsets;
|
||||
|
||||
return textureHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a texture
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="mipmap">Mipmap level</param>
|
||||
/// <returns>Filled texture on success, null on error</returns>
|
||||
private static Texture ParseTexture(Stream data, uint mipmap = 0)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Texture texture = new Texture();
|
||||
|
||||
byte[] name = data.ReadBytes(16).TakeWhile(c => c != '\0').ToArray();
|
||||
texture.Name = Encoding.ASCII.GetString(name);
|
||||
texture.Width = data.ReadUInt32();
|
||||
texture.Height = data.ReadUInt32();
|
||||
texture.Offsets = new uint[4];
|
||||
for (int i = 0; i < 4; i++)
|
||||
{
|
||||
texture.Offsets[i] = data.ReadUInt32();
|
||||
}
|
||||
|
||||
// Get the size of the pixel data
|
||||
uint pixelSize = 0;
|
||||
for (int i = 0; i < HL_BSP_MIPMAP_COUNT; i++)
|
||||
{
|
||||
if (texture.Offsets[i] != 0)
|
||||
{
|
||||
pixelSize += (texture.Width >> i) * (texture.Height >> i);
|
||||
}
|
||||
}
|
||||
|
||||
// If we have no pixel data
|
||||
if (pixelSize == 0)
|
||||
return texture;
|
||||
|
||||
texture.TextureData = data.ReadBytes((int)pixelSize);
|
||||
texture.PaletteSize = data.ReadUInt16();
|
||||
texture.PaletteData = data.ReadBytes((int)(texture.PaletteSize * 3));
|
||||
|
||||
// Adjust the dimensions based on mipmap level
|
||||
switch (mipmap)
|
||||
{
|
||||
case 1:
|
||||
texture.Width /= 2;
|
||||
texture.Height /= 2;
|
||||
break;
|
||||
case 2:
|
||||
texture.Width /= 4;
|
||||
texture.Height /= 4;
|
||||
break;
|
||||
case 3:
|
||||
texture.Width /= 8;
|
||||
texture.Height /= 8;
|
||||
break;
|
||||
}
|
||||
|
||||
return texture;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFrameworks>net48;net6.0;net7.0</TargetFrameworks>
|
||||
<RuntimeIdentifiers>win-x86;win-x64;linux-x64;osx-x64</RuntimeIdentifiers>
|
||||
<Title>BinaryObjectScanner.Builders</Title>
|
||||
<AssemblyName>BinaryObjectScanner.Builders</AssemblyName>
|
||||
<Authors>Matt Nadareski</Authors>
|
||||
<Product>BurnOutSharp</Product>
|
||||
<Copyright>Copyright (c)2022 Matt Nadareski</Copyright>
|
||||
<RepositoryUrl>https://github.com/mnadareski/BurnOutSharp</RepositoryUrl>
|
||||
<Version>2.8</Version>
|
||||
<AssemblyVersion>2.8</AssemblyVersion>
|
||||
<FileVersion>2.8</FileVersion>
|
||||
<IncludeSource>true</IncludeSource>
|
||||
<IncludeSymbols>true</IncludeSymbols>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\BinaryObjectScanner.Utilities\BinaryObjectScanner.Utilities.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="SabreTools.IO" Version="1.1.1" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.1.1" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -1,419 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.CFB;
|
||||
using static SabreTools.Models.CFB.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public class CFB
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a Compound File Binary
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled Compound File Binary on success, null on error</returns>
|
||||
public static Binary ParseBinary(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseBinary(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Compound File Binary
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Compound File Binary on success, null on error</returns>
|
||||
public static Binary ParseBinary(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new binary to fill
|
||||
var binary = new Binary();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the file header
|
||||
var fileHeader = ParseFileHeader(data);
|
||||
if (fileHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the file header
|
||||
binary.Header = fileHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region DIFAT Sector Numbers
|
||||
|
||||
// Create a DIFAT sector table
|
||||
#if NET48
|
||||
var difatSectors = new List<SectorNumber>();
|
||||
#else
|
||||
var difatSectors = new List<SectorNumber?>();
|
||||
#endif
|
||||
|
||||
// Add the sectors from the header
|
||||
difatSectors.AddRange(fileHeader.DIFAT);
|
||||
|
||||
// Loop through and add the DIFAT sectors
|
||||
#if NET48
|
||||
var currentSector = (SectorNumber)fileHeader.FirstDIFATSectorLocation;
|
||||
#else
|
||||
var currentSector = (SectorNumber?)fileHeader.FirstDIFATSectorLocation;
|
||||
#endif
|
||||
for (int i = 0; i < fileHeader.NumberOfDIFATSectors; i++)
|
||||
{
|
||||
// If we have a readable sector
|
||||
if (currentSector <= SectorNumber.MAXREGSECT)
|
||||
{
|
||||
// Get the new next sector information
|
||||
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
|
||||
if (sectorOffset < 0 || sectorOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the next sector
|
||||
data.Seek(sectorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the sectors
|
||||
var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift);
|
||||
if (sectorNumbers == null)
|
||||
return null;
|
||||
|
||||
// Add the sector shifts
|
||||
difatSectors.AddRange(sectorNumbers);
|
||||
}
|
||||
|
||||
// Get the next sector from the DIFAT
|
||||
currentSector = difatSectors[i];
|
||||
}
|
||||
|
||||
// Assign the DIFAT sectors table
|
||||
binary.DIFATSectorNumbers = difatSectors.ToArray();
|
||||
|
||||
#endregion
|
||||
|
||||
#region FAT Sector Numbers
|
||||
|
||||
// Create a FAT sector table
|
||||
#if NET48
|
||||
var fatSectors = new List<SectorNumber>();
|
||||
#else
|
||||
var fatSectors = new List<SectorNumber?>();
|
||||
#endif
|
||||
|
||||
// Loop through and add the FAT sectors
|
||||
currentSector = binary.DIFATSectorNumbers[0];
|
||||
for (int i = 0; i < fileHeader.NumberOfFATSectors; i++)
|
||||
{
|
||||
// If we have a readable sector
|
||||
if (currentSector <= SectorNumber.MAXREGSECT)
|
||||
{
|
||||
// Get the new next sector information
|
||||
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
|
||||
if (sectorOffset < 0 || sectorOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the next sector
|
||||
data.Seek(sectorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the sectors
|
||||
var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift);
|
||||
if (sectorNumbers == null)
|
||||
return null;
|
||||
|
||||
// Add the sector shifts
|
||||
fatSectors.AddRange(sectorNumbers);
|
||||
}
|
||||
|
||||
// Get the next sector from the DIFAT
|
||||
currentSector = binary.DIFATSectorNumbers[i];
|
||||
}
|
||||
|
||||
// Assign the FAT sectors table
|
||||
binary.FATSectorNumbers = fatSectors.ToArray();
|
||||
|
||||
#endregion
|
||||
|
||||
#region Mini FAT Sector Numbers
|
||||
|
||||
// Create a mini FAT sector table
|
||||
#if NET48
|
||||
var miniFatSectors = new List<SectorNumber>();
|
||||
#else
|
||||
var miniFatSectors = new List<SectorNumber?>();
|
||||
#endif
|
||||
|
||||
// Loop through and add the mini FAT sectors
|
||||
currentSector = (SectorNumber)fileHeader.FirstMiniFATSectorLocation;
|
||||
for (int i = 0; i < fileHeader.NumberOfMiniFATSectors; i++)
|
||||
{
|
||||
// If we have a readable sector
|
||||
if (currentSector <= SectorNumber.MAXREGSECT)
|
||||
{
|
||||
// Get the new next sector information
|
||||
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
|
||||
if (sectorOffset < 0 || sectorOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the next sector
|
||||
data.Seek(sectorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the sectors
|
||||
var sectorNumbers = ParseSectorNumbers(data, fileHeader.SectorShift);
|
||||
if (sectorNumbers == null)
|
||||
return null;
|
||||
|
||||
// Add the sector shifts
|
||||
miniFatSectors.AddRange(sectorNumbers);
|
||||
}
|
||||
|
||||
// Get the next sector from the DIFAT
|
||||
currentSector = binary.DIFATSectorNumbers[i];
|
||||
}
|
||||
|
||||
// Assign the mini FAT sectors table
|
||||
binary.MiniFATSectorNumbers = miniFatSectors.ToArray();
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Entries
|
||||
|
||||
// Get the offset of the first directory sector
|
||||
long firstDirectoryOffset = (long)(fileHeader.FirstDirectorySectorLocation * Math.Pow(2, fileHeader.SectorShift));
|
||||
if (firstDirectoryOffset < 0 || firstDirectoryOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the first directory sector
|
||||
data.Seek(firstDirectoryOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create a directory sector table
|
||||
var directorySectors = new List<DirectoryEntry>();
|
||||
|
||||
// Get the number of directory sectors
|
||||
uint directorySectorCount = 0;
|
||||
switch (fileHeader.MajorVersion)
|
||||
{
|
||||
case 3:
|
||||
directorySectorCount = int.MaxValue;
|
||||
break;
|
||||
case 4:
|
||||
directorySectorCount = fileHeader.NumberOfDirectorySectors;
|
||||
break;
|
||||
}
|
||||
|
||||
// Loop through and add the directory sectors
|
||||
currentSector = (SectorNumber)fileHeader.FirstDirectorySectorLocation;
|
||||
for (int i = 0; i < directorySectorCount; i++)
|
||||
{
|
||||
// If we have an end of chain
|
||||
if (currentSector == SectorNumber.ENDOFCHAIN)
|
||||
break;
|
||||
|
||||
// If we have a readable sector
|
||||
if (currentSector <= SectorNumber.MAXREGSECT)
|
||||
{
|
||||
// Get the new next sector information
|
||||
long sectorOffset = (long)((long)(currentSector + 1) * Math.Pow(2, fileHeader.SectorShift));
|
||||
if (sectorOffset < 0 || sectorOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the next sector
|
||||
data.Seek(sectorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the sectors
|
||||
var directoryEntries = ParseDirectoryEntries(data, fileHeader.SectorShift, fileHeader.MajorVersion);
|
||||
if (directoryEntries == null)
|
||||
return null;
|
||||
|
||||
// Add the sector shifts
|
||||
directorySectors.AddRange(directoryEntries);
|
||||
}
|
||||
|
||||
// Get the next sector from the DIFAT
|
||||
currentSector = binary.DIFATSectorNumbers[i];
|
||||
}
|
||||
|
||||
// Assign the Directory sectors table
|
||||
binary.DirectoryEntries = directorySectors.ToArray();
|
||||
|
||||
#endregion
|
||||
|
||||
return binary;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a file header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled file header on success, null on error</returns>
|
||||
private static FileHeader ParseFileHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
FileHeader header = new FileHeader();
|
||||
|
||||
header.Signature = data.ReadUInt64();
|
||||
if (header.Signature != SignatureUInt64)
|
||||
return null;
|
||||
|
||||
header.CLSID = data.ReadGuid();
|
||||
header.MinorVersion = data.ReadUInt16();
|
||||
header.MajorVersion = data.ReadUInt16();
|
||||
header.ByteOrder = data.ReadUInt16();
|
||||
if (header.ByteOrder != 0xFFFE)
|
||||
return null;
|
||||
|
||||
header.SectorShift = data.ReadUInt16();
|
||||
if (header.MajorVersion == 3 && header.SectorShift != 0x0009)
|
||||
return null;
|
||||
else if (header.MajorVersion == 4 && header.SectorShift != 0x000C)
|
||||
return null;
|
||||
|
||||
header.MiniSectorShift = data.ReadUInt16();
|
||||
header.Reserved = data.ReadBytes(6);
|
||||
header.NumberOfDirectorySectors = data.ReadUInt32();
|
||||
if (header.MajorVersion == 3 && header.NumberOfDirectorySectors != 0)
|
||||
return null;
|
||||
|
||||
header.NumberOfFATSectors = data.ReadUInt32();
|
||||
header.FirstDirectorySectorLocation = data.ReadUInt32();
|
||||
header.TransactionSignatureNumber = data.ReadUInt32();
|
||||
header.MiniStreamCutoffSize = data.ReadUInt32();
|
||||
if (header.MiniStreamCutoffSize != 0x00001000)
|
||||
return null;
|
||||
|
||||
header.FirstMiniFATSectorLocation = data.ReadUInt32();
|
||||
header.NumberOfMiniFATSectors = data.ReadUInt32();
|
||||
header.FirstDIFATSectorLocation = data.ReadUInt32();
|
||||
header.NumberOfDIFATSectors = data.ReadUInt32();
|
||||
#if NET48
|
||||
header.DIFAT = new SectorNumber[109];
|
||||
#else
|
||||
header.DIFAT = new SectorNumber?[109];
|
||||
#endif
|
||||
for (int i = 0; i < header.DIFAT.Length; i++)
|
||||
{
|
||||
header.DIFAT[i] = (SectorNumber)data.ReadUInt32();
|
||||
}
|
||||
|
||||
// Skip rest of sector for version 4
|
||||
if (header.MajorVersion == 4)
|
||||
_ = data.ReadBytes(3584);
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a sector full of sector numbers
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="sectorShift">Sector shift from the header</param>
|
||||
/// <returns>Filled sector full of sector numbers on success, null on error</returns>
|
||||
#if NET48
|
||||
private static SectorNumber[] ParseSectorNumbers(Stream data, ushort sectorShift)
|
||||
#else
|
||||
private static SectorNumber?[] ParseSectorNumbers(Stream data, ushort sectorShift)
|
||||
#endif
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
int sectorCount = (int)(Math.Pow(2, sectorShift) / sizeof(uint));
|
||||
#if NET48
|
||||
var sectorNumbers = new SectorNumber[sectorCount];
|
||||
#else
|
||||
var sectorNumbers = new SectorNumber?[sectorCount];
|
||||
#endif
|
||||
|
||||
for (int i = 0; i < sectorNumbers.Length; i++)
|
||||
{
|
||||
sectorNumbers[i] = (SectorNumber)data.ReadUInt32();
|
||||
}
|
||||
|
||||
return sectorNumbers;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a sector full of directory entries
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="sectorShift">Sector shift from the header</param>
|
||||
/// <param name="majorVersion">Major version from the header</param>
|
||||
/// <returns>Filled sector full of directory entries on success, null on error</returns>
|
||||
private static DirectoryEntry[] ParseDirectoryEntries(Stream data, ushort sectorShift, ushort majorVersion)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
const int directoryEntrySize = 64 + 2 + 1 + 1 + 4 + 4 + 4 + 16 + 4 + 8 + 8 + 4 + 8;
|
||||
int sectorCount = (int)(Math.Pow(2, sectorShift) / directoryEntrySize);
|
||||
DirectoryEntry[] directoryEntries = new DirectoryEntry[sectorCount];
|
||||
|
||||
for (int i = 0; i < directoryEntries.Length; i++)
|
||||
{
|
||||
var directoryEntry = ParseDirectoryEntry(data, majorVersion);
|
||||
if (directoryEntry == null)
|
||||
return null;
|
||||
|
||||
directoryEntries[i] = directoryEntry;
|
||||
}
|
||||
|
||||
return directoryEntries;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a directory entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">Major version from the header</param>
|
||||
/// <returns>Filled directory entry on success, null on error</returns>
|
||||
private static DirectoryEntry ParseDirectoryEntry(Stream data, ushort majorVersion)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryEntry directoryEntry = new DirectoryEntry();
|
||||
|
||||
byte[] name = data.ReadBytes(64);
|
||||
directoryEntry.Name = Encoding.Unicode.GetString(name).TrimEnd('\0');
|
||||
directoryEntry.NameLength = data.ReadUInt16();
|
||||
directoryEntry.ObjectType = (ObjectType)data.ReadByteValue();
|
||||
directoryEntry.ColorFlag = (ColorFlag)data.ReadByteValue();
|
||||
directoryEntry.LeftSiblingID = (StreamID)data.ReadUInt32();
|
||||
directoryEntry.RightSiblingID = (StreamID)data.ReadUInt32();
|
||||
directoryEntry.ChildID = (StreamID)data.ReadUInt32();
|
||||
directoryEntry.CLSID = data.ReadGuid();
|
||||
directoryEntry.StateBits = data.ReadUInt32();
|
||||
directoryEntry.CreationTime = data.ReadUInt64();
|
||||
directoryEntry.ModifiedTime = data.ReadUInt64();
|
||||
directoryEntry.StartingSectorLocation = data.ReadUInt32();
|
||||
directoryEntry.StreamSize = data.ReadUInt64();
|
||||
if (majorVersion == 3)
|
||||
directoryEntry.StreamSize &= 0x0000FFFF;
|
||||
|
||||
return directoryEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,775 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.GCF;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class GCF
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a Half-Life Game Cache
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
|
||||
public static SabreTools.Models.GCF.File ParseFile(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseFile(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
|
||||
public static SabreTools.Models.GCF.File ParseFile(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new Half-Life Game Cache to fill
|
||||
var file = new SabreTools.Models.GCF.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Block Entry Header
|
||||
|
||||
// Try to parse the block entry header
|
||||
var blockEntryHeader = ParseBlockEntryHeader(data);
|
||||
if (blockEntryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache block entry header
|
||||
file.BlockEntryHeader = blockEntryHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Block Entries
|
||||
|
||||
// Create the block entry array
|
||||
file.BlockEntries = new BlockEntry[blockEntryHeader.BlockCount];
|
||||
|
||||
// Try to parse the block entries
|
||||
for (int i = 0; i < blockEntryHeader.BlockCount; i++)
|
||||
{
|
||||
var blockEntry = ParseBlockEntry(data);
|
||||
file.BlockEntries[i] = blockEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Fragmentation Map Header
|
||||
|
||||
// Try to parse the fragmentation map header
|
||||
var fragmentationMapHeader = ParseFragmentationMapHeader(data);
|
||||
if (fragmentationMapHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache fragmentation map header
|
||||
file.FragmentationMapHeader = fragmentationMapHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Fragmentation Maps
|
||||
|
||||
// Create the fragmentation map array
|
||||
file.FragmentationMaps = new FragmentationMap[fragmentationMapHeader.BlockCount];
|
||||
|
||||
// Try to parse the fragmentation maps
|
||||
for (int i = 0; i < fragmentationMapHeader.BlockCount; i++)
|
||||
{
|
||||
var fragmentationMap = ParseFragmentationMap(data);
|
||||
file.FragmentationMaps[i] = fragmentationMap;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Block Entry Map Header
|
||||
|
||||
if (header.MinorVersion < 6)
|
||||
{
|
||||
// Try to parse the block entry map header
|
||||
var blockEntryMapHeader = ParseBlockEntryMapHeader(data);
|
||||
if (blockEntryMapHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache block entry map header
|
||||
file.BlockEntryMapHeader = blockEntryMapHeader;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Block Entry Maps
|
||||
|
||||
if (header.MinorVersion < 6)
|
||||
{
|
||||
// Create the block entry map array
|
||||
file.BlockEntryMaps = new BlockEntryMap[file.BlockEntryMapHeader.BlockCount];
|
||||
|
||||
// Try to parse the block entry maps
|
||||
for (int i = 0; i < file.BlockEntryMapHeader.BlockCount; i++)
|
||||
{
|
||||
var blockEntryMap = ParseBlockEntryMap(data);
|
||||
file.BlockEntryMaps[i] = blockEntryMap;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
// Cache the current offset
|
||||
initialOffset = data.Position;
|
||||
|
||||
#region Directory Header
|
||||
|
||||
// Try to parse the directory header
|
||||
var directoryHeader = ParseDirectoryHeader(data);
|
||||
if (directoryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache directory header
|
||||
file.DirectoryHeader = directoryHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Entries
|
||||
|
||||
// Create the directory entry array
|
||||
file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount];
|
||||
|
||||
// Try to parse the directory entries
|
||||
for (int i = 0; i < directoryHeader.ItemCount; i++)
|
||||
{
|
||||
var directoryEntry = ParseDirectoryEntry(data);
|
||||
file.DirectoryEntries[i] = directoryEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Names
|
||||
|
||||
if (directoryHeader.NameSize > 0)
|
||||
{
|
||||
// Get the current offset for adjustment
|
||||
long directoryNamesStart = data.Position;
|
||||
|
||||
// Get the ending offset
|
||||
long directoryNamesEnd = data.Position + directoryHeader.NameSize;
|
||||
|
||||
// Create the string dictionary
|
||||
file.DirectoryNames = new Dictionary<long, string>();
|
||||
|
||||
// Loop and read the null-terminated strings
|
||||
while (data.Position < directoryNamesEnd)
|
||||
{
|
||||
long nameOffset = data.Position - directoryNamesStart;
|
||||
string directoryName = data.ReadString(Encoding.ASCII);
|
||||
if (data.Position > directoryNamesEnd)
|
||||
{
|
||||
data.Seek(-directoryName.Length, SeekOrigin.Current);
|
||||
byte[] endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
|
||||
if (endingData != null)
|
||||
directoryName = Encoding.ASCII.GetString(endingData);
|
||||
else
|
||||
directoryName = null;
|
||||
}
|
||||
|
||||
file.DirectoryNames[nameOffset] = directoryName;
|
||||
}
|
||||
|
||||
// Loop and assign to entries
|
||||
foreach (var directoryEntry in file.DirectoryEntries)
|
||||
{
|
||||
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Info 1 Entries
|
||||
|
||||
// Create the directory info 1 entry array
|
||||
file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count];
|
||||
|
||||
// Try to parse the directory info 1 entries
|
||||
for (int i = 0; i < directoryHeader.Info1Count; i++)
|
||||
{
|
||||
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
|
||||
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Info 2 Entries
|
||||
|
||||
// Create the directory info 2 entry array
|
||||
file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount];
|
||||
|
||||
// Try to parse the directory info 2 entries
|
||||
for (int i = 0; i < directoryHeader.ItemCount; i++)
|
||||
{
|
||||
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
|
||||
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Copy Entries
|
||||
|
||||
// Create the directory copy entry array
|
||||
file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount];
|
||||
|
||||
// Try to parse the directory copy entries
|
||||
for (int i = 0; i < directoryHeader.CopyCount; i++)
|
||||
{
|
||||
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
|
||||
file.DirectoryCopyEntries[i] = directoryCopyEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Local Entries
|
||||
|
||||
// Create the directory local entry array
|
||||
file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount];
|
||||
|
||||
// Try to parse the directory local entries
|
||||
for (int i = 0; i < directoryHeader.LocalCount; i++)
|
||||
{
|
||||
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
|
||||
file.DirectoryLocalEntries[i] = directoryLocalEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
// Seek to end of directory section, just in case
|
||||
data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin);
|
||||
|
||||
#region Directory Map Header
|
||||
|
||||
if (header.MinorVersion >= 5)
|
||||
{
|
||||
// Try to parse the directory map header
|
||||
var directoryMapHeader = ParseDirectoryMapHeader(data);
|
||||
if (directoryMapHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache directory map header
|
||||
file.DirectoryMapHeader = directoryMapHeader;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Map Entries
|
||||
|
||||
// Create the directory map entry array
|
||||
file.DirectoryMapEntries = new DirectoryMapEntry[directoryHeader.ItemCount];
|
||||
|
||||
// Try to parse the directory map entries
|
||||
for (int i = 0; i < directoryHeader.ItemCount; i++)
|
||||
{
|
||||
var directoryMapEntry = ParseDirectoryMapEntry(data);
|
||||
file.DirectoryMapEntries[i] = directoryMapEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Checksum Header
|
||||
|
||||
// Try to parse the checksum header
|
||||
var checksumHeader = ParseChecksumHeader(data);
|
||||
if (checksumHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache checksum header
|
||||
file.ChecksumHeader = checksumHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
// Cache the current offset
|
||||
initialOffset = data.Position;
|
||||
|
||||
#region Checksum Map Header
|
||||
|
||||
// Try to parse the checksum map header
|
||||
var checksumMapHeader = ParseChecksumMapHeader(data);
|
||||
if (checksumMapHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache checksum map header
|
||||
file.ChecksumMapHeader = checksumMapHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Checksum Map Entries
|
||||
|
||||
// Create the checksum map entry array
|
||||
file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount];
|
||||
|
||||
// Try to parse the checksum map entries
|
||||
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
|
||||
{
|
||||
var checksumMapEntry = ParseChecksumMapEntry(data);
|
||||
file.ChecksumMapEntries[i] = checksumMapEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Checksum Entries
|
||||
|
||||
// Create the checksum entry array
|
||||
file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount];
|
||||
|
||||
// Try to parse the checksum entries
|
||||
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
|
||||
{
|
||||
var checksumEntry = ParseChecksumEntry(data);
|
||||
file.ChecksumEntries[i] = checksumEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
// Seek to end of checksum section, just in case
|
||||
data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin);
|
||||
|
||||
#region Data Block Header
|
||||
|
||||
// Try to parse the data block header
|
||||
var dataBlockHeader = ParseDataBlockHeader(data, header.MinorVersion);
|
||||
if (dataBlockHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache data block header
|
||||
file.DataBlockHeader = dataBlockHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Header header = new Header();
|
||||
|
||||
header.Dummy0 = data.ReadUInt32();
|
||||
if (header.Dummy0 != 0x00000001)
|
||||
return null;
|
||||
|
||||
header.MajorVersion = data.ReadUInt32();
|
||||
if (header.MajorVersion != 0x00000001)
|
||||
return null;
|
||||
|
||||
header.MinorVersion = data.ReadUInt32();
|
||||
if (header.MinorVersion != 3 && header.MinorVersion != 5 && header.MinorVersion != 6)
|
||||
return null;
|
||||
|
||||
header.CacheID = data.ReadUInt32();
|
||||
header.LastVersionPlayed = data.ReadUInt32();
|
||||
header.Dummy1 = data.ReadUInt32();
|
||||
header.Dummy2 = data.ReadUInt32();
|
||||
header.FileSize = data.ReadUInt32();
|
||||
header.BlockSize = data.ReadUInt32();
|
||||
header.BlockCount = data.ReadUInt32();
|
||||
header.Dummy3 = data.ReadUInt32();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache block entry header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache block entry header on success, null on error</returns>
|
||||
private static BlockEntryHeader ParseBlockEntryHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
BlockEntryHeader blockEntryHeader = new BlockEntryHeader();
|
||||
|
||||
blockEntryHeader.BlockCount = data.ReadUInt32();
|
||||
blockEntryHeader.BlocksUsed = data.ReadUInt32();
|
||||
blockEntryHeader.Dummy0 = data.ReadUInt32();
|
||||
blockEntryHeader.Dummy1 = data.ReadUInt32();
|
||||
blockEntryHeader.Dummy2 = data.ReadUInt32();
|
||||
blockEntryHeader.Dummy3 = data.ReadUInt32();
|
||||
blockEntryHeader.Dummy4 = data.ReadUInt32();
|
||||
blockEntryHeader.Checksum = data.ReadUInt32();
|
||||
|
||||
return blockEntryHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache block entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache block entry on success, null on error</returns>
|
||||
private static BlockEntry ParseBlockEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
BlockEntry blockEntry = new BlockEntry();
|
||||
|
||||
blockEntry.EntryFlags = data.ReadUInt32();
|
||||
blockEntry.FileDataOffset = data.ReadUInt32();
|
||||
blockEntry.FileDataSize = data.ReadUInt32();
|
||||
blockEntry.FirstDataBlockIndex = data.ReadUInt32();
|
||||
blockEntry.NextBlockEntryIndex = data.ReadUInt32();
|
||||
blockEntry.PreviousBlockEntryIndex = data.ReadUInt32();
|
||||
blockEntry.DirectoryIndex = data.ReadUInt32();
|
||||
|
||||
return blockEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache fragmentation map header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache fragmentation map header on success, null on error</returns>
|
||||
private static FragmentationMapHeader ParseFragmentationMapHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
FragmentationMapHeader fragmentationMapHeader = new FragmentationMapHeader();
|
||||
|
||||
fragmentationMapHeader.BlockCount = data.ReadUInt32();
|
||||
fragmentationMapHeader.FirstUnusedEntry = data.ReadUInt32();
|
||||
fragmentationMapHeader.Terminator = data.ReadUInt32();
|
||||
fragmentationMapHeader.Checksum = data.ReadUInt32();
|
||||
|
||||
return fragmentationMapHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache fragmentation map
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache fragmentation map on success, null on error</returns>
|
||||
private static FragmentationMap ParseFragmentationMap(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
FragmentationMap fragmentationMap = new FragmentationMap();
|
||||
|
||||
fragmentationMap.NextDataBlockIndex = data.ReadUInt32();
|
||||
|
||||
return fragmentationMap;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache block entry map header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache block entry map header on success, null on error</returns>
|
||||
private static BlockEntryMapHeader ParseBlockEntryMapHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
BlockEntryMapHeader blockEntryMapHeader = new BlockEntryMapHeader();
|
||||
|
||||
blockEntryMapHeader.BlockCount = data.ReadUInt32();
|
||||
blockEntryMapHeader.FirstBlockEntryIndex = data.ReadUInt32();
|
||||
blockEntryMapHeader.LastBlockEntryIndex = data.ReadUInt32();
|
||||
blockEntryMapHeader.Dummy0 = data.ReadUInt32();
|
||||
blockEntryMapHeader.Checksum = data.ReadUInt32();
|
||||
|
||||
return blockEntryMapHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache block entry map
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache block entry map on success, null on error</returns>
|
||||
private static BlockEntryMap ParseBlockEntryMap(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
BlockEntryMap blockEntryMap = new BlockEntryMap();
|
||||
|
||||
blockEntryMap.PreviousBlockEntryIndex = data.ReadUInt32();
|
||||
blockEntryMap.NextBlockEntryIndex = data.ReadUInt32();
|
||||
|
||||
return blockEntryMap;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache directory header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache directory header on success, null on error</returns>
|
||||
private static DirectoryHeader ParseDirectoryHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryHeader directoryHeader = new DirectoryHeader();
|
||||
|
||||
directoryHeader.Dummy0 = data.ReadUInt32();
|
||||
directoryHeader.CacheID = data.ReadUInt32();
|
||||
directoryHeader.LastVersionPlayed = data.ReadUInt32();
|
||||
directoryHeader.ItemCount = data.ReadUInt32();
|
||||
directoryHeader.FileCount = data.ReadUInt32();
|
||||
directoryHeader.Dummy1 = data.ReadUInt32();
|
||||
directoryHeader.DirectorySize = data.ReadUInt32();
|
||||
directoryHeader.NameSize = data.ReadUInt32();
|
||||
directoryHeader.Info1Count = data.ReadUInt32();
|
||||
directoryHeader.CopyCount = data.ReadUInt32();
|
||||
directoryHeader.LocalCount = data.ReadUInt32();
|
||||
directoryHeader.Dummy2 = data.ReadUInt32();
|
||||
directoryHeader.Dummy3 = data.ReadUInt32();
|
||||
directoryHeader.Checksum = data.ReadUInt32();
|
||||
|
||||
return directoryHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache directory entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache directory entry on success, null on error</returns>
|
||||
private static DirectoryEntry ParseDirectoryEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryEntry directoryEntry = new DirectoryEntry();
|
||||
|
||||
directoryEntry.NameOffset = data.ReadUInt32();
|
||||
directoryEntry.ItemSize = data.ReadUInt32();
|
||||
directoryEntry.ChecksumIndex = data.ReadUInt32();
|
||||
directoryEntry.DirectoryFlags = (HL_GCF_FLAG)data.ReadUInt32();
|
||||
directoryEntry.ParentIndex = data.ReadUInt32();
|
||||
directoryEntry.NextIndex = data.ReadUInt32();
|
||||
directoryEntry.FirstIndex = data.ReadUInt32();
|
||||
|
||||
return directoryEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache directory info 1 entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache directory info 1 entry on success, null on error</returns>
|
||||
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
|
||||
|
||||
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
|
||||
|
||||
return directoryInfo1Entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache directory info 2 entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache directory info 2 entry on success, null on error</returns>
|
||||
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
|
||||
|
||||
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
|
||||
|
||||
return directoryInfo2Entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache directory copy entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache directory copy entry on success, null on error</returns>
|
||||
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
|
||||
|
||||
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
|
||||
|
||||
return directoryCopyEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache directory local entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache directory local entry on success, null on error</returns>
|
||||
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
|
||||
|
||||
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
|
||||
|
||||
return directoryLocalEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache directory map header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache directory map header on success, null on error</returns>
|
||||
private static DirectoryMapHeader ParseDirectoryMapHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryMapHeader directoryMapHeader = new DirectoryMapHeader();
|
||||
|
||||
directoryMapHeader.Dummy0 = data.ReadUInt32();
|
||||
if (directoryMapHeader.Dummy0 != 0x00000001)
|
||||
return null;
|
||||
|
||||
directoryMapHeader.Dummy1 = data.ReadUInt32();
|
||||
if (directoryMapHeader.Dummy1 != 0x00000000)
|
||||
return null;
|
||||
|
||||
return directoryMapHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache directory map entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache directory map entry on success, null on error</returns>
|
||||
private static DirectoryMapEntry ParseDirectoryMapEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryMapEntry directoryMapEntry = new DirectoryMapEntry();
|
||||
|
||||
directoryMapEntry.FirstBlockIndex = data.ReadUInt32();
|
||||
|
||||
return directoryMapEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache checksum header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache checksum header on success, null on error</returns>
|
||||
private static ChecksumHeader ParseChecksumHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
ChecksumHeader checksumHeader = new ChecksumHeader();
|
||||
|
||||
checksumHeader.Dummy0 = data.ReadUInt32();
|
||||
if (checksumHeader.Dummy0 != 0x00000001)
|
||||
return null;
|
||||
|
||||
checksumHeader.ChecksumSize = data.ReadUInt32();
|
||||
|
||||
return checksumHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache checksum map header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache checksum map header on success, null on error</returns>
|
||||
private static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
|
||||
|
||||
checksumMapHeader.Dummy0 = data.ReadUInt32();
|
||||
if (checksumMapHeader.Dummy0 != 0x14893721)
|
||||
return null;
|
||||
|
||||
checksumMapHeader.Dummy1 = data.ReadUInt32();
|
||||
if (checksumMapHeader.Dummy1 != 0x00000001)
|
||||
return null;
|
||||
|
||||
checksumMapHeader.ItemCount = data.ReadUInt32();
|
||||
checksumMapHeader.ChecksumCount = data.ReadUInt32();
|
||||
|
||||
return checksumMapHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache checksum map entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache checksum map entry on success, null on error</returns>
|
||||
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
|
||||
|
||||
checksumMapEntry.ChecksumCount = data.ReadUInt32();
|
||||
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
|
||||
|
||||
return checksumMapEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache checksum entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Game Cache checksum entry on success, null on error</returns>
|
||||
private static ChecksumEntry ParseChecksumEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
ChecksumEntry checksumEntry = new ChecksumEntry();
|
||||
|
||||
checksumEntry.Checksum = data.ReadUInt32();
|
||||
|
||||
return checksumEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Game Cache data block header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="minorVersion">Minor version field from the header</param>
|
||||
/// <returns>Filled Half-Life Game Cache data block header on success, null on error</returns>
|
||||
private static DataBlockHeader ParseDataBlockHeader(Stream data, uint minorVersion)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DataBlockHeader dataBlockHeader = new DataBlockHeader();
|
||||
|
||||
// In version 3 the DataBlockHeader is missing the LastVersionPlayed field.
|
||||
if (minorVersion >= 5)
|
||||
dataBlockHeader.LastVersionPlayed = data.ReadUInt32();
|
||||
|
||||
dataBlockHeader.BlockCount = data.ReadUInt32();
|
||||
dataBlockHeader.BlockSize = data.ReadUInt32();
|
||||
dataBlockHeader.FirstBlockOffset = data.ReadUInt32();
|
||||
dataBlockHeader.BlocksUsed = data.ReadUInt32();
|
||||
dataBlockHeader.Checksum = data.ReadUInt32();
|
||||
|
||||
return dataBlockHeader;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,808 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.InstallShieldCabinet;
|
||||
using static SabreTools.Models.InstallShieldCabinet.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
// TODO: Add multi-cabinet reading
|
||||
public class InstallShieldCabinet
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a InstallShield Cabinet file
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled cabinet on success, null on error</returns>
|
||||
public static Cabinet ParseCabinet(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseCabinet(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a InstallShield Cabinet file
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled cabinet on success, null on error</returns>
|
||||
public static Cabinet ParseCabinet(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new cabinet to fill
|
||||
var cabinet = new Cabinet();
|
||||
|
||||
#region Common Header
|
||||
|
||||
// Try to parse the cabinet header
|
||||
var commonHeader = ParseCommonHeader(data);
|
||||
if (commonHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the cabinet header
|
||||
cabinet.CommonHeader = commonHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Volume Header
|
||||
|
||||
// Try to parse the volume header
|
||||
var volumeHeader = ParseVolumeHeader(data, GetMajorVersion(commonHeader));
|
||||
if (volumeHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the volume header
|
||||
cabinet.VolumeHeader = volumeHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Descriptor
|
||||
|
||||
// Get the descriptor offset
|
||||
uint descriptorOffset = commonHeader.DescriptorOffset;
|
||||
if (descriptorOffset < 0 || descriptorOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the descriptor
|
||||
data.Seek(descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the descriptor
|
||||
var descriptor = ParseDescriptor(data);
|
||||
if (descriptor == null)
|
||||
return null;
|
||||
|
||||
// Set the descriptor
|
||||
cabinet.Descriptor = descriptor;
|
||||
|
||||
#endregion
|
||||
|
||||
#region File Descriptor Offsets
|
||||
|
||||
// Get the file table offset
|
||||
uint fileTableOffset = commonHeader.DescriptorOffset + descriptor.FileTableOffset;
|
||||
if (fileTableOffset < 0 || fileTableOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the file table
|
||||
data.Seek(fileTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the number of file table items
|
||||
uint fileTableItems;
|
||||
if (GetMajorVersion(commonHeader) <= 5)
|
||||
fileTableItems = descriptor.DirectoryCount + descriptor.FileCount;
|
||||
else
|
||||
fileTableItems = descriptor.DirectoryCount;
|
||||
|
||||
// Create and fill the file table
|
||||
cabinet.FileDescriptorOffsets = new uint[fileTableItems];
|
||||
for (int i = 0; i < cabinet.FileDescriptorOffsets.Length; i++)
|
||||
{
|
||||
cabinet.FileDescriptorOffsets[i] = data.ReadUInt32();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Descriptors
|
||||
|
||||
// Create and fill the directory descriptors
|
||||
cabinet.DirectoryNames = new string[descriptor.DirectoryCount];
|
||||
for (int i = 0; i < descriptor.DirectoryCount; i++)
|
||||
{
|
||||
// Get the directory descriptor offset
|
||||
uint offset = descriptorOffset
|
||||
+ descriptor.FileTableOffset
|
||||
+ cabinet.FileDescriptorOffsets[i];
|
||||
|
||||
// If we have an invalid offset
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
continue;
|
||||
|
||||
// Seek to the file descriptor offset
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create and add the file descriptor
|
||||
string directoryName = ParseDirectoryName(data, GetMajorVersion(commonHeader));
|
||||
cabinet.DirectoryNames[i] = directoryName;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region File Descriptors
|
||||
|
||||
// Create and fill the file descriptors
|
||||
cabinet.FileDescriptors = new FileDescriptor[descriptor.FileCount];
|
||||
for (int i = 0; i < descriptor.FileCount; i++)
|
||||
{
|
||||
// Get the file descriptor offset
|
||||
uint offset;
|
||||
if (GetMajorVersion(commonHeader) <= 5)
|
||||
{
|
||||
offset = descriptorOffset
|
||||
+ descriptor.FileTableOffset
|
||||
+ cabinet.FileDescriptorOffsets[descriptor.DirectoryCount + i];
|
||||
}
|
||||
else
|
||||
{
|
||||
offset = descriptorOffset
|
||||
+ descriptor.FileTableOffset
|
||||
+ descriptor.FileTableOffset2
|
||||
+ (uint)(i * 0x57);
|
||||
}
|
||||
|
||||
// If we have an invalid offset
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
continue;
|
||||
|
||||
// Seek to the file descriptor offset
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create and add the file descriptor
|
||||
FileDescriptor fileDescriptor = ParseFileDescriptor(data, GetMajorVersion(commonHeader), descriptorOffset + descriptor.FileTableOffset);
|
||||
cabinet.FileDescriptors[i] = fileDescriptor;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region File Group Offsets
|
||||
|
||||
// Create and fill the file group offsets
|
||||
cabinet.FileGroupOffsets = new Dictionary<long, OffsetList>();
|
||||
for (int i = 0; i < descriptor.FileGroupOffsets.Length; i++)
|
||||
{
|
||||
// Get the file group offset
|
||||
uint offset = descriptor.FileGroupOffsets[i];
|
||||
if (offset == 0)
|
||||
continue;
|
||||
|
||||
// Adjust the file group offset
|
||||
offset += commonHeader.DescriptorOffset;
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
continue;
|
||||
|
||||
// Seek to the file group offset
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create and add the offset
|
||||
OffsetList offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
|
||||
cabinet.FileGroupOffsets[descriptor.FileGroupOffsets[i]] = offsetList;
|
||||
|
||||
// If we have a nonzero next offset
|
||||
uint nextOffset = offsetList.NextOffset;
|
||||
while (nextOffset != 0)
|
||||
{
|
||||
// Get the next offset to read
|
||||
uint internalOffset = nextOffset + commonHeader.DescriptorOffset;
|
||||
|
||||
// Seek to the file group offset
|
||||
data.Seek(internalOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create and add the offset
|
||||
offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
|
||||
cabinet.FileGroupOffsets[nextOffset] = offsetList;
|
||||
|
||||
// Set the next offset
|
||||
nextOffset = offsetList.NextOffset;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region File Groups
|
||||
|
||||
// Create the file groups array
|
||||
cabinet.FileGroups = new FileGroup[cabinet.FileGroupOffsets.Count];
|
||||
|
||||
// Create and fill the file groups
|
||||
int fileGroupId = 0;
|
||||
foreach (var kvp in cabinet.FileGroupOffsets)
|
||||
{
|
||||
// Get the offset
|
||||
OffsetList list = kvp.Value;
|
||||
if (list == null)
|
||||
{
|
||||
fileGroupId++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we have an invalid offset
|
||||
if (list.DescriptorOffset <= 0)
|
||||
{
|
||||
fileGroupId++;
|
||||
continue;
|
||||
}
|
||||
|
||||
/// Seek to the file group
|
||||
data.Seek(list.DescriptorOffset + descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the file group
|
||||
var fileGroup = ParseFileGroup(data, GetMajorVersion(commonHeader), descriptorOffset);
|
||||
if (fileGroup == null)
|
||||
return null;
|
||||
|
||||
// Add the file group
|
||||
cabinet.FileGroups[fileGroupId++] = fileGroup;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Component Offsets
|
||||
|
||||
// Create and fill the component offsets
|
||||
cabinet.ComponentOffsets = new Dictionary<long, OffsetList>();
|
||||
for (int i = 0; i < descriptor.ComponentOffsets.Length; i++)
|
||||
{
|
||||
// Get the component offset
|
||||
uint offset = descriptor.ComponentOffsets[i];
|
||||
if (offset == 0)
|
||||
continue;
|
||||
|
||||
// Adjust the component offset
|
||||
offset += commonHeader.DescriptorOffset;
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
continue;
|
||||
|
||||
// Seek to the component offset
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create and add the offset
|
||||
OffsetList offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
|
||||
cabinet.ComponentOffsets[descriptor.ComponentOffsets[i]] = offsetList;
|
||||
|
||||
// If we have a nonzero next offset
|
||||
uint nextOffset = offsetList.NextOffset;
|
||||
while (nextOffset != 0)
|
||||
{
|
||||
// Get the next offset to read
|
||||
uint internalOffset = nextOffset + commonHeader.DescriptorOffset;
|
||||
|
||||
// Seek to the file group offset
|
||||
data.Seek(internalOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create and add the offset
|
||||
offsetList = ParseOffsetList(data, GetMajorVersion(commonHeader), descriptorOffset);
|
||||
cabinet.ComponentOffsets[nextOffset] = offsetList;
|
||||
|
||||
// Set the next offset
|
||||
nextOffset = offsetList.NextOffset;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Components
|
||||
|
||||
// Create the components array
|
||||
cabinet.Components = new Component[cabinet.ComponentOffsets.Count];
|
||||
|
||||
// Create and fill the components
|
||||
int componentId = 0;
|
||||
foreach (KeyValuePair<long, OffsetList> kvp in cabinet.ComponentOffsets)
|
||||
{
|
||||
// Get the offset
|
||||
OffsetList list = kvp.Value;
|
||||
if (list == null)
|
||||
{
|
||||
componentId++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we have an invalid offset
|
||||
if (list.DescriptorOffset <= 0)
|
||||
{
|
||||
componentId++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Seek to the component
|
||||
data.Seek(list.DescriptorOffset + descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the component
|
||||
var component = ParseComponent(data, GetMajorVersion(commonHeader), descriptorOffset);
|
||||
if (component == null)
|
||||
return null;
|
||||
|
||||
// Add the component
|
||||
cabinet.Components[componentId++] = component;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
// TODO: Parse setup types
|
||||
|
||||
return cabinet;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a common header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled common header on success, null on error</returns>
|
||||
private static CommonHeader ParseCommonHeader(Stream data)
|
||||
{
|
||||
CommonHeader commonHeader = new CommonHeader();
|
||||
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
commonHeader.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (commonHeader.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
commonHeader.Version = data.ReadUInt32();
|
||||
commonHeader.VolumeInfo = data.ReadUInt32();
|
||||
commonHeader.DescriptorOffset = data.ReadUInt32();
|
||||
commonHeader.DescriptorSize = data.ReadUInt32();
|
||||
|
||||
return commonHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a volume header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">Major version of the cabinet</param>
|
||||
/// <returns>Filled volume header on success, null on error</returns>
|
||||
private static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
|
||||
{
|
||||
VolumeHeader volumeHeader = new VolumeHeader();
|
||||
|
||||
// Read the descriptor based on version
|
||||
if (majorVersion <= 5)
|
||||
{
|
||||
volumeHeader.DataOffset = data.ReadUInt32();
|
||||
_ = data.ReadBytes(0x04); // Skip 0x04 bytes, unknown data?
|
||||
volumeHeader.FirstFileIndex = data.ReadUInt32();
|
||||
volumeHeader.LastFileIndex = data.ReadUInt32();
|
||||
volumeHeader.FirstFileOffset = data.ReadUInt32();
|
||||
volumeHeader.FirstFileSizeExpanded = data.ReadUInt32();
|
||||
volumeHeader.FirstFileSizeCompressed = data.ReadUInt32();
|
||||
volumeHeader.LastFileOffset = data.ReadUInt32();
|
||||
volumeHeader.LastFileSizeExpanded = data.ReadUInt32();
|
||||
volumeHeader.LastFileSizeCompressed = data.ReadUInt32();
|
||||
}
|
||||
else
|
||||
{
|
||||
// TODO: Should standard and high values be combined?
|
||||
volumeHeader.DataOffset = data.ReadUInt32();
|
||||
volumeHeader.DataOffsetHigh = data.ReadUInt32();
|
||||
volumeHeader.FirstFileIndex = data.ReadUInt32();
|
||||
volumeHeader.LastFileIndex = data.ReadUInt32();
|
||||
volumeHeader.FirstFileOffset = data.ReadUInt32();
|
||||
volumeHeader.FirstFileOffsetHigh = data.ReadUInt32();
|
||||
volumeHeader.FirstFileSizeExpanded = data.ReadUInt32();
|
||||
volumeHeader.FirstFileSizeExpandedHigh = data.ReadUInt32();
|
||||
volumeHeader.FirstFileSizeCompressed = data.ReadUInt32();
|
||||
volumeHeader.FirstFileSizeCompressedHigh = data.ReadUInt32();
|
||||
volumeHeader.LastFileOffset = data.ReadUInt32();
|
||||
volumeHeader.LastFileOffsetHigh = data.ReadUInt32();
|
||||
volumeHeader.LastFileSizeExpanded = data.ReadUInt32();
|
||||
volumeHeader.LastFileSizeExpandedHigh = data.ReadUInt32();
|
||||
volumeHeader.LastFileSizeCompressed = data.ReadUInt32();
|
||||
volumeHeader.LastFileSizeCompressedHigh = data.ReadUInt32();
|
||||
}
|
||||
|
||||
return volumeHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a descriptor
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled descriptor on success, null on error</returns>
|
||||
private static Descriptor ParseDescriptor(Stream data)
|
||||
{
|
||||
Descriptor descriptor = new Descriptor();
|
||||
|
||||
descriptor.StringsOffset = data.ReadUInt32();
|
||||
descriptor.Reserved0 = data.ReadBytes(4);
|
||||
descriptor.ComponentListOffset = data.ReadUInt32();
|
||||
descriptor.FileTableOffset = data.ReadUInt32();
|
||||
descriptor.Reserved1 = data.ReadBytes(4);
|
||||
descriptor.FileTableSize = data.ReadUInt32();
|
||||
descriptor.FileTableSize2 = data.ReadUInt32();
|
||||
descriptor.DirectoryCount = data.ReadUInt16();
|
||||
descriptor.Reserved2 = data.ReadBytes(4);
|
||||
descriptor.Reserved3 = data.ReadBytes(2);
|
||||
descriptor.Reserved4 = data.ReadBytes(4);
|
||||
descriptor.FileCount = data.ReadUInt32();
|
||||
descriptor.FileTableOffset2 = data.ReadUInt32();
|
||||
descriptor.ComponentTableInfoCount = data.ReadUInt16();
|
||||
descriptor.ComponentTableOffset = data.ReadUInt32();
|
||||
descriptor.Reserved5 = data.ReadBytes(4);
|
||||
descriptor.Reserved6 = data.ReadBytes(4);
|
||||
|
||||
descriptor.FileGroupOffsets = new uint[MAX_FILE_GROUP_COUNT];
|
||||
for (int i = 0; i < descriptor.FileGroupOffsets.Length; i++)
|
||||
{
|
||||
descriptor.FileGroupOffsets[i] = data.ReadUInt32();
|
||||
}
|
||||
|
||||
descriptor.ComponentOffsets = new uint[MAX_COMPONENT_COUNT];
|
||||
for (int i = 0; i < descriptor.ComponentOffsets.Length; i++)
|
||||
{
|
||||
descriptor.ComponentOffsets[i] = data.ReadUInt32();
|
||||
}
|
||||
|
||||
descriptor.SetupTypesOffset = data.ReadUInt32();
|
||||
descriptor.SetupTableOffset = data.ReadUInt32();
|
||||
descriptor.Reserved7 = data.ReadBytes(4);
|
||||
descriptor.Reserved8 = data.ReadBytes(4);
|
||||
|
||||
return descriptor;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an offset list
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">Major version of the cabinet</param>
|
||||
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
|
||||
/// <returns>Filled offset list on success, null on error</returns>
|
||||
private static OffsetList ParseOffsetList(Stream data, int majorVersion, uint descriptorOffset)
|
||||
{
|
||||
OffsetList offsetList = new OffsetList();
|
||||
|
||||
offsetList.NameOffset = data.ReadUInt32();
|
||||
offsetList.DescriptorOffset = data.ReadUInt32();
|
||||
offsetList.NextOffset = data.ReadUInt32();
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
|
||||
// Seek to the name offset
|
||||
data.Seek(offsetList.NameOffset + descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the string
|
||||
if (majorVersion >= 17)
|
||||
offsetList.Name = data.ReadString(Encoding.Unicode);
|
||||
else
|
||||
offsetList.Name = data.ReadString(Encoding.ASCII);
|
||||
|
||||
// Seek back to the correct offset
|
||||
data.Seek(currentOffset, SeekOrigin.Begin);
|
||||
|
||||
return offsetList;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a file group
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">Major version of the cabinet</param>
|
||||
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
|
||||
/// <returns>Filled file group on success, null on error</returns>
|
||||
private static FileGroup ParseFileGroup(Stream data, int majorVersion, uint descriptorOffset)
|
||||
{
|
||||
FileGroup fileGroup = new FileGroup();
|
||||
|
||||
fileGroup.NameOffset = data.ReadUInt32();
|
||||
|
||||
fileGroup.ExpandedSize = data.ReadUInt32();
|
||||
fileGroup.Reserved0 = data.ReadBytes(4);
|
||||
fileGroup.CompressedSize = data.ReadUInt32();
|
||||
fileGroup.Reserved1 = data.ReadBytes(4);
|
||||
fileGroup.Reserved2 = data.ReadBytes(2);
|
||||
fileGroup.Attribute1 = data.ReadUInt16();
|
||||
fileGroup.Attribute2 = data.ReadUInt16();
|
||||
|
||||
// TODO: Figure out what data lives in this area for V5 and below
|
||||
if (majorVersion <= 5)
|
||||
data.Seek(0x36, SeekOrigin.Current);
|
||||
|
||||
fileGroup.FirstFile = data.ReadUInt32();
|
||||
fileGroup.LastFile = data.ReadUInt32();
|
||||
fileGroup.UnknownOffset = data.ReadUInt32();
|
||||
fileGroup.Var4Offset = data.ReadUInt32();
|
||||
fileGroup.Var1Offset = data.ReadUInt32();
|
||||
fileGroup.HTTPLocationOffset = data.ReadUInt32();
|
||||
fileGroup.FTPLocationOffset = data.ReadUInt32();
|
||||
fileGroup.MiscOffset = data.ReadUInt32();
|
||||
fileGroup.Var2Offset = data.ReadUInt32();
|
||||
fileGroup.TargetDirectoryOffset = data.ReadUInt32();
|
||||
fileGroup.Reserved3 = data.ReadBytes(2);
|
||||
fileGroup.Reserved4 = data.ReadBytes(2);
|
||||
fileGroup.Reserved5 = data.ReadBytes(2);
|
||||
fileGroup.Reserved6 = data.ReadBytes(2);
|
||||
fileGroup.Reserved7 = data.ReadBytes(2);
|
||||
|
||||
// Cache the current position
|
||||
long currentPosition = data.Position;
|
||||
|
||||
// Read the name, if possible
|
||||
if (fileGroup.NameOffset != 0)
|
||||
{
|
||||
// Seek to the name
|
||||
data.Seek(fileGroup.NameOffset + descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the string
|
||||
if (majorVersion >= 17)
|
||||
fileGroup.Name = data.ReadString(Encoding.Unicode);
|
||||
else
|
||||
fileGroup.Name = data.ReadString(Encoding.ASCII);
|
||||
}
|
||||
|
||||
// Seek back to the correct offset
|
||||
data.Seek(currentPosition, SeekOrigin.Begin);
|
||||
|
||||
return fileGroup;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a component
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">Major version of the cabinet</param>
|
||||
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
|
||||
/// <returns>Filled component on success, null on error</returns>
|
||||
private static Component ParseComponent(Stream data, int majorVersion, uint descriptorOffset)
|
||||
{
|
||||
Component component = new Component();
|
||||
|
||||
component.IdentifierOffset = data.ReadUInt32();
|
||||
component.DescriptorOffset = data.ReadUInt32();
|
||||
component.DisplayNameOffset = data.ReadUInt32();
|
||||
component.Reserved0 = data.ReadBytes(2);
|
||||
component.ReservedOffset0 = data.ReadUInt32();
|
||||
component.ReservedOffset1 = data.ReadUInt32();
|
||||
component.ComponentIndex = data.ReadUInt16();
|
||||
component.NameOffset = data.ReadUInt32();
|
||||
component.ReservedOffset2 = data.ReadUInt32();
|
||||
component.ReservedOffset3 = data.ReadUInt32();
|
||||
component.ReservedOffset4 = data.ReadUInt32();
|
||||
component.Reserved1 = data.ReadBytes(32);
|
||||
component.CLSIDOffset = data.ReadUInt32();
|
||||
component.Reserved2 = data.ReadBytes(28);
|
||||
component.Reserved3 = data.ReadBytes(majorVersion <= 5 ? 2 : 1);
|
||||
component.DependsCount = data.ReadUInt16();
|
||||
component.DependsOffset = data.ReadUInt32();
|
||||
component.FileGroupCount = data.ReadUInt16();
|
||||
component.FileGroupNamesOffset = data.ReadUInt32();
|
||||
component.X3Count = data.ReadUInt16();
|
||||
component.X3Offset = data.ReadUInt32();
|
||||
component.SubComponentsCount = data.ReadUInt16();
|
||||
component.SubComponentsOffset = data.ReadUInt32();
|
||||
component.NextComponentOffset = data.ReadUInt32();
|
||||
component.ReservedOffset5 = data.ReadUInt32();
|
||||
component.ReservedOffset6 = data.ReadUInt32();
|
||||
component.ReservedOffset7 = data.ReadUInt32();
|
||||
component.ReservedOffset8 = data.ReadUInt32();
|
||||
|
||||
// Cache the current position
|
||||
long currentPosition = data.Position;
|
||||
|
||||
// Read the identifier, if possible
|
||||
if (component.IdentifierOffset != 0)
|
||||
{
|
||||
// Seek to the identifier
|
||||
data.Seek(component.IdentifierOffset + descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the string
|
||||
if (majorVersion >= 17)
|
||||
component.Identifier = data.ReadString(Encoding.Unicode);
|
||||
else
|
||||
component.Identifier = data.ReadString(Encoding.ASCII);
|
||||
}
|
||||
|
||||
// Read the display name, if possible
|
||||
if (component.DisplayNameOffset != 0)
|
||||
{
|
||||
// Seek to the name
|
||||
data.Seek(component.DisplayNameOffset + descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the string
|
||||
if (majorVersion >= 17)
|
||||
component.DisplayName = data.ReadString(Encoding.Unicode);
|
||||
else
|
||||
component.DisplayName = data.ReadString(Encoding.ASCII);
|
||||
}
|
||||
|
||||
// Read the name, if possible
|
||||
if (component.NameOffset != 0)
|
||||
{
|
||||
// Seek to the name
|
||||
data.Seek(component.NameOffset + descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the string
|
||||
if (majorVersion >= 17)
|
||||
component.Name = data.ReadString(Encoding.Unicode);
|
||||
else
|
||||
component.Name = data.ReadString(Encoding.ASCII);
|
||||
}
|
||||
|
||||
// Read the CLSID, if possible
|
||||
if (component.CLSIDOffset != 0)
|
||||
{
|
||||
// Seek to the CLSID
|
||||
data.Seek(component.CLSIDOffset + descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the GUID
|
||||
component.CLSID = data.ReadGuid();
|
||||
}
|
||||
|
||||
// Read the file group names, if possible
|
||||
if (component.FileGroupCount != 0 && component.FileGroupNamesOffset != 0)
|
||||
{
|
||||
// Seek to the file group table offset
|
||||
data.Seek(component.FileGroupNamesOffset + descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the file group names table
|
||||
component.FileGroupNames = new string[component.FileGroupCount];
|
||||
for (int j = 0; j < component.FileGroupCount; j++)
|
||||
{
|
||||
// Get the name offset
|
||||
uint nameOffset = data.ReadUInt32();
|
||||
|
||||
// Cache the current offset
|
||||
long preNameOffset = data.Position;
|
||||
|
||||
// Seek to the name offset
|
||||
data.Seek(nameOffset + descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
if (majorVersion >= 17)
|
||||
component.FileGroupNames[j] = data.ReadString(Encoding.Unicode);
|
||||
else
|
||||
component.FileGroupNames[j] = data.ReadString(Encoding.ASCII);
|
||||
|
||||
// Seek back to the original position
|
||||
data.Seek(preNameOffset, SeekOrigin.Begin);
|
||||
}
|
||||
}
|
||||
|
||||
// Seek back to the correct offset
|
||||
data.Seek(currentPosition, SeekOrigin.Begin);
|
||||
|
||||
return component;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a directory name
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">Major version of the cabinet</param>
|
||||
/// <returns>Filled directory name on success, null on error</returns>
|
||||
private static string ParseDirectoryName(Stream data, int majorVersion)
|
||||
{
|
||||
// Read the string
|
||||
if (majorVersion >= 17)
|
||||
return data.ReadString(Encoding.Unicode);
|
||||
else
|
||||
return data.ReadString(Encoding.ASCII);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a file descriptor
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">Major version of the cabinet</param>
|
||||
/// <param name="descriptorOffset">Offset of the cabinet descriptor</param>
|
||||
/// <returns>Filled file descriptor on success, null on error</returns>
|
||||
private static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset)
|
||||
{
|
||||
FileDescriptor fileDescriptor = new FileDescriptor();
|
||||
|
||||
// Read the descriptor based on version
|
||||
if (majorVersion <= 5)
|
||||
{
|
||||
fileDescriptor.Volume = 0xFFFF; // Set by the header index
|
||||
fileDescriptor.NameOffset = data.ReadUInt32();
|
||||
fileDescriptor.DirectoryIndex = data.ReadUInt32();
|
||||
fileDescriptor.Flags = (FileFlags)data.ReadUInt16();
|
||||
fileDescriptor.ExpandedSize = data.ReadUInt32();
|
||||
fileDescriptor.CompressedSize = data.ReadUInt32();
|
||||
_ = data.ReadBytes(0x14); // Skip 0x14 bytes, unknown data?
|
||||
fileDescriptor.DataOffset = data.ReadUInt32();
|
||||
|
||||
if (majorVersion == 5)
|
||||
fileDescriptor.MD5 = data.ReadBytes(0x10);
|
||||
}
|
||||
else
|
||||
{
|
||||
fileDescriptor.Flags = (FileFlags)data.ReadUInt16();
|
||||
fileDescriptor.ExpandedSize = data.ReadUInt64();
|
||||
fileDescriptor.CompressedSize = data.ReadUInt64();
|
||||
fileDescriptor.DataOffset = data.ReadUInt64();
|
||||
fileDescriptor.MD5 = data.ReadBytes(0x10);
|
||||
_ = data.ReadBytes(0x10); // Skip 0x10 bytes, unknown data?
|
||||
fileDescriptor.NameOffset = data.ReadUInt32();
|
||||
fileDescriptor.DirectoryIndex = data.ReadUInt16();
|
||||
_ = data.ReadBytes(0x0C); // Skip 0x0C bytes, unknown data?
|
||||
fileDescriptor.LinkPrevious = data.ReadUInt32();
|
||||
fileDescriptor.LinkNext = data.ReadUInt32();
|
||||
fileDescriptor.LinkFlags = (LinkFlags)data.ReadByteValue();
|
||||
fileDescriptor.Volume = data.ReadUInt16();
|
||||
}
|
||||
|
||||
// Cache the current position
|
||||
long currentPosition = data.Position;
|
||||
|
||||
// Read the name, if possible
|
||||
if (fileDescriptor.NameOffset != 0)
|
||||
{
|
||||
// Seek to the name
|
||||
data.Seek(fileDescriptor.NameOffset + descriptorOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the string
|
||||
if (majorVersion >= 17)
|
||||
fileDescriptor.Name = data.ReadString(Encoding.Unicode);
|
||||
else
|
||||
fileDescriptor.Name = data.ReadString(Encoding.ASCII);
|
||||
}
|
||||
|
||||
// Seek back to the correct offset
|
||||
data.Seek(currentPosition, SeekOrigin.Begin);
|
||||
|
||||
return fileDescriptor;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helpers
|
||||
|
||||
/// <summary>
|
||||
/// Get the major version of the cabinet
|
||||
/// </summary>
|
||||
/// <remarks>This should live in the wrapper but is needed during parsing</remarks>
|
||||
private static int GetMajorVersion(CommonHeader commonHeader)
|
||||
{
|
||||
uint majorVersion = commonHeader.Version;
|
||||
if (majorVersion >> 24 == 1)
|
||||
{
|
||||
majorVersion = (majorVersion >> 12) & 0x0F;
|
||||
}
|
||||
else if (majorVersion >> 24 == 2 || majorVersion >> 24 == 4)
|
||||
{
|
||||
majorVersion = majorVersion & 0xFFFF;
|
||||
if (majorVersion != 0)
|
||||
majorVersion /= 100;
|
||||
}
|
||||
|
||||
return (int)majorVersion;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,943 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.LinearExecutable;
|
||||
using static SabreTools.Models.LinearExecutable.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class LinearExecutable
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a Linear Executable
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled executable on success, null on error</returns>
|
||||
public static Executable ParseExecutable(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseExecutable(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Linear Executable
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled executable on success, null on error</returns>
|
||||
public static Executable ParseExecutable(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new executable to fill
|
||||
var executable = new Executable();
|
||||
|
||||
#region MS-DOS Stub
|
||||
|
||||
// Parse the MS-DOS stub
|
||||
var stub = MSDOS.ParseExecutable(data);
|
||||
if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0)
|
||||
return null;
|
||||
|
||||
// Set the MS-DOS stub
|
||||
executable.Stub = stub;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Information Block
|
||||
|
||||
// Try to parse the executable header
|
||||
data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin);
|
||||
var informationBlock = ParseInformationBlock(data);
|
||||
if (informationBlock == null)
|
||||
return null;
|
||||
|
||||
// Set the executable header
|
||||
executable.InformationBlock = informationBlock;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Object Table
|
||||
|
||||
// Get the object table offset
|
||||
long offset = informationBlock.ObjectTableOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the object table
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the object table
|
||||
executable.ObjectTable = new ObjectTableEntry[informationBlock.ObjectTableCount];
|
||||
|
||||
// Try to parse the object table
|
||||
for (int i = 0; i < executable.ObjectTable.Length; i++)
|
||||
{
|
||||
var entry = ParseObjectTableEntry(data);
|
||||
if (entry == null)
|
||||
return null;
|
||||
|
||||
executable.ObjectTable[i] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Object Page Map
|
||||
|
||||
// Get the object page map offset
|
||||
offset = informationBlock.ObjectPageMapOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the object page map
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the object page map
|
||||
executable.ObjectPageMap = new ObjectPageMapEntry[informationBlock.ObjectTableCount];
|
||||
|
||||
// Try to parse the object page map
|
||||
for (int i = 0; i < executable.ObjectPageMap.Length; i++)
|
||||
{
|
||||
var entry = ParseObjectPageMapEntry(data);
|
||||
if (entry == null)
|
||||
return null;
|
||||
|
||||
executable.ObjectPageMap[i] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Object Iterate Data Map
|
||||
|
||||
offset = informationBlock.ObjectIterateDataMapOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the object page map
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// TODO: Implement when model found
|
||||
// No model has been found in the documentation about what
|
||||
// each of the entries looks like for this map.
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Resource Table
|
||||
|
||||
// Get the resource table offset
|
||||
offset = informationBlock.ResourceTableOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the resource table
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the resource table
|
||||
executable.ResourceTable = new ResourceTableEntry[informationBlock.ResourceTableCount];
|
||||
|
||||
// Try to parse the resource table
|
||||
for (int i = 0; i < executable.ResourceTable.Length; i++)
|
||||
{
|
||||
var entry = ParseResourceTableEntry(data);
|
||||
if (entry == null)
|
||||
return null;
|
||||
|
||||
executable.ResourceTable[i] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Resident Names Table
|
||||
|
||||
// Get the resident names table offset
|
||||
offset = informationBlock.ResidentNamesTableOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the resident names table
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the resident names table
|
||||
var residentNamesTable = new List<ResidentNamesTableEntry>();
|
||||
|
||||
// Try to parse the resident names table
|
||||
while (true)
|
||||
{
|
||||
var entry = ParseResidentNamesTableEntry(data);
|
||||
residentNamesTable.Add(entry);
|
||||
|
||||
// If we have a 0-length entry
|
||||
if (entry.Length == 0)
|
||||
break;
|
||||
}
|
||||
|
||||
// Assign the resident names table
|
||||
executable.ResidentNamesTable = residentNamesTable.ToArray();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Entry Table
|
||||
|
||||
// Get the entry table offset
|
||||
offset = informationBlock.EntryTableOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the entry table
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the entry table
|
||||
var entryTable = new List<EntryTableBundle>();
|
||||
|
||||
// Try to parse the entry table
|
||||
while (true)
|
||||
{
|
||||
var bundle = ParseEntryTableBundle(data);
|
||||
entryTable.Add(bundle);
|
||||
|
||||
// If we have a 0-length entry
|
||||
if (bundle.Entries == 0)
|
||||
break;
|
||||
}
|
||||
|
||||
// Assign the entry table
|
||||
executable.EntryTable = entryTable.ToArray();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Module Format Directives Table
|
||||
|
||||
// Get the module format directives table offset
|
||||
offset = informationBlock.ModuleDirectivesTableOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the module format directives table
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the module format directives table
|
||||
executable.ModuleFormatDirectivesTable = new ModuleFormatDirectivesTableEntry[informationBlock.ModuleDirectivesCount];
|
||||
|
||||
// Try to parse the module format directives table
|
||||
for (int i = 0; i < executable.ModuleFormatDirectivesTable.Length; i++)
|
||||
{
|
||||
var entry = ParseModuleFormatDirectivesTableEntry(data);
|
||||
if (entry == null)
|
||||
return null;
|
||||
|
||||
executable.ModuleFormatDirectivesTable[i] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Verify Record Directive Table
|
||||
|
||||
// TODO: Figure out where the offset to this table is stored
|
||||
// The documentation suggests it's either part of or immediately following
|
||||
// the Module Format Directives Table
|
||||
|
||||
#endregion
|
||||
|
||||
#region Fix-up Page Table
|
||||
|
||||
// Get the fix-up page table offset
|
||||
offset = informationBlock.FixupPageTableOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the fix-up page table
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the fix-up page table
|
||||
executable.FixupPageTable = new FixupPageTableEntry[executable.ObjectPageMap.Length + 1];
|
||||
|
||||
// Try to parse the fix-up page table
|
||||
for (int i = 0; i < executable.FixupPageTable.Length; i++)
|
||||
{
|
||||
var entry = ParseFixupPageTableEntry(data);
|
||||
if (entry == null)
|
||||
return null;
|
||||
|
||||
executable.FixupPageTable[i] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Fix-up Record Table
|
||||
|
||||
// Get the fix-up record table offset
|
||||
offset = informationBlock.FixupRecordTableOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the fix-up record table
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the fix-up record table
|
||||
executable.FixupRecordTable = new FixupRecordTableEntry[executable.ObjectPageMap.Length + 1];
|
||||
|
||||
// Try to parse the fix-up record table
|
||||
for (int i = 0; i < executable.FixupRecordTable.Length; i++)
|
||||
{
|
||||
var entry = ParseFixupRecordTableEntry(data);
|
||||
if (entry == null)
|
||||
return null;
|
||||
|
||||
executable.FixupRecordTable[i] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Imported Module Name Table
|
||||
|
||||
// Get the imported module name table offset
|
||||
offset = informationBlock.ImportedModulesNameTableOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the imported module name table
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the imported module name table
|
||||
executable.ImportModuleNameTable = new ImportModuleNameTableEntry[informationBlock.ImportedModulesCount];
|
||||
|
||||
// Try to parse the imported module name table
|
||||
for (int i = 0; i < executable.ImportModuleNameTable.Length; i++)
|
||||
{
|
||||
var entry = ParseImportModuleNameTableEntry(data);
|
||||
if (entry == null)
|
||||
return null;
|
||||
|
||||
executable.ImportModuleNameTable[i] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Imported Module Procedure Name Table
|
||||
|
||||
// Get the imported module procedure name table offset
|
||||
offset = informationBlock.ImportProcedureNameTableOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the imported module procedure name table
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Get the size of the imported module procedure name table
|
||||
long tableSize = informationBlock.FixupPageTableOffset
|
||||
+ informationBlock.FixupSectionSize
|
||||
- informationBlock.ImportProcedureNameTableOffset;
|
||||
|
||||
// Create the imported module procedure name table
|
||||
var importModuleProcedureNameTable = new List<ImportModuleProcedureNameTableEntry>();
|
||||
|
||||
// Try to parse the imported module procedure name table
|
||||
while (data.Position < offset + tableSize)
|
||||
{
|
||||
var entry = ParseImportModuleProcedureNameTableEntry(data);
|
||||
if (entry == null)
|
||||
return null;
|
||||
|
||||
importModuleProcedureNameTable.Add(entry);
|
||||
}
|
||||
|
||||
// Assign the resident names table
|
||||
executable.ImportModuleProcedureNameTable = importModuleProcedureNameTable.ToArray();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Per-Page Checksum Table
|
||||
|
||||
// Get the per-page checksum table offset
|
||||
offset = informationBlock.PerPageChecksumTableOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the per-page checksum name table
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the per-page checksum name table
|
||||
executable.PerPageChecksumTable = new PerPageChecksumTableEntry[informationBlock.ModuleNumberPages];
|
||||
|
||||
// Try to parse the per-page checksum name table
|
||||
for (int i = 0; i < executable.PerPageChecksumTable.Length; i++)
|
||||
{
|
||||
var entry = ParsePerPageChecksumTableEntry(data);
|
||||
if (entry == null)
|
||||
return null;
|
||||
|
||||
executable.PerPageChecksumTable[i] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Non-Resident Names Table
|
||||
|
||||
// Get the non-resident names table offset
|
||||
offset = informationBlock.NonResidentNamesTableOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the non-resident names table
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the non-resident names table
|
||||
var nonResidentNamesTable = new List<NonResidentNamesTableEntry>();
|
||||
|
||||
// Try to parse the non-resident names table
|
||||
while (true)
|
||||
{
|
||||
var entry = ParseNonResidentNameTableEntry(data);
|
||||
nonResidentNamesTable.Add(entry);
|
||||
|
||||
// If we have a 0-length entry
|
||||
if (entry.Length == 0)
|
||||
break;
|
||||
}
|
||||
|
||||
// Assign the non-resident names table
|
||||
executable.NonResidentNamesTable = nonResidentNamesTable.ToArray();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Debug Information
|
||||
|
||||
// Get the debug information offset
|
||||
offset = informationBlock.DebugInformationOffset + stub.Header.NewExeHeaderAddr;
|
||||
if (offset > stub.Header.NewExeHeaderAddr && offset < data.Length)
|
||||
{
|
||||
// Seek to the debug information
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the debug information
|
||||
var debugInformation = ParseDebugInformation(data, informationBlock.DebugInformationLength);
|
||||
if (debugInformation == null)
|
||||
return null;
|
||||
|
||||
// Set the debug information
|
||||
executable.DebugInformation = debugInformation;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return executable;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an information block
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled information block on success, null on error</returns>
|
||||
private static InformationBlock ParseInformationBlock(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var informationBlock = new InformationBlock();
|
||||
|
||||
byte[] magic = data.ReadBytes(2);
|
||||
informationBlock.Signature = Encoding.ASCII.GetString(magic);
|
||||
if (informationBlock.Signature != LESignatureString && informationBlock.Signature != LXSignatureString)
|
||||
return null;
|
||||
|
||||
informationBlock.ByteOrder = (ByteOrder)data.ReadByteValue();
|
||||
informationBlock.WordOrder = (WordOrder)data.ReadByteValue();
|
||||
informationBlock.ExecutableFormatLevel = data.ReadUInt32();
|
||||
informationBlock.CPUType = (CPUType)data.ReadUInt16();
|
||||
informationBlock.ModuleOS = (OperatingSystem)data.ReadUInt16();
|
||||
informationBlock.ModuleVersion = data.ReadUInt32();
|
||||
informationBlock.ModuleTypeFlags = (ModuleFlags)data.ReadUInt32();
|
||||
informationBlock.ModuleNumberPages = data.ReadUInt32();
|
||||
informationBlock.InitialObjectCS = data.ReadUInt32();
|
||||
informationBlock.InitialEIP = data.ReadUInt32();
|
||||
informationBlock.InitialObjectSS = data.ReadUInt32();
|
||||
informationBlock.InitialESP = data.ReadUInt32();
|
||||
informationBlock.MemoryPageSize = data.ReadUInt32();
|
||||
informationBlock.BytesOnLastPage = data.ReadUInt32();
|
||||
informationBlock.FixupSectionSize = data.ReadUInt32();
|
||||
informationBlock.FixupSectionChecksum = data.ReadUInt32();
|
||||
informationBlock.LoaderSectionSize = data.ReadUInt32();
|
||||
informationBlock.LoaderSectionChecksum = data.ReadUInt32();
|
||||
informationBlock.ObjectTableOffset = data.ReadUInt32();
|
||||
informationBlock.ObjectTableCount = data.ReadUInt32();
|
||||
informationBlock.ObjectPageMapOffset = data.ReadUInt32();
|
||||
informationBlock.ObjectIterateDataMapOffset = data.ReadUInt32();
|
||||
informationBlock.ResourceTableOffset = data.ReadUInt32();
|
||||
informationBlock.ResourceTableCount = data.ReadUInt32();
|
||||
informationBlock.ResidentNamesTableOffset = data.ReadUInt32();
|
||||
informationBlock.EntryTableOffset = data.ReadUInt32();
|
||||
informationBlock.ModuleDirectivesTableOffset = data.ReadUInt32();
|
||||
informationBlock.ModuleDirectivesCount = data.ReadUInt32();
|
||||
informationBlock.FixupPageTableOffset = data.ReadUInt32();
|
||||
informationBlock.FixupRecordTableOffset = data.ReadUInt32();
|
||||
informationBlock.ImportedModulesNameTableOffset = data.ReadUInt32();
|
||||
informationBlock.ImportedModulesCount = data.ReadUInt32();
|
||||
informationBlock.ImportProcedureNameTableOffset = data.ReadUInt32();
|
||||
informationBlock.PerPageChecksumTableOffset = data.ReadUInt32();
|
||||
informationBlock.DataPagesOffset = data.ReadUInt32();
|
||||
informationBlock.PreloadPageCount = data.ReadUInt32();
|
||||
informationBlock.NonResidentNamesTableOffset = data.ReadUInt32();
|
||||
informationBlock.NonResidentNamesTableLength = data.ReadUInt32();
|
||||
informationBlock.NonResidentNamesTableChecksum = data.ReadUInt32();
|
||||
informationBlock.AutomaticDataObject = data.ReadUInt32();
|
||||
informationBlock.DebugInformationOffset = data.ReadUInt32();
|
||||
informationBlock.DebugInformationLength = data.ReadUInt32();
|
||||
informationBlock.PreloadInstancePagesNumber = data.ReadUInt32();
|
||||
informationBlock.DemandInstancePagesNumber = data.ReadUInt32();
|
||||
informationBlock.ExtraHeapAllocation = data.ReadUInt32();
|
||||
|
||||
return informationBlock;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an object table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled object table entry on success, null on error</returns>
|
||||
private static ObjectTableEntry ParseObjectTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new ObjectTableEntry();
|
||||
|
||||
entry.VirtualSegmentSize = data.ReadUInt32();
|
||||
entry.RelocationBaseAddress = data.ReadUInt32();
|
||||
entry.ObjectFlags = (ObjectFlags)data.ReadUInt16();
|
||||
entry.PageTableIndex = data.ReadUInt32();
|
||||
entry.PageTableEntries = data.ReadUInt32();
|
||||
entry.Reserved = data.ReadUInt32();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an object page map entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled object page map entry on success, null on error</returns>
|
||||
private static ObjectPageMapEntry ParseObjectPageMapEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new ObjectPageMapEntry();
|
||||
|
||||
entry.PageDataOffset = data.ReadUInt32();
|
||||
entry.DataSize = data.ReadUInt16();
|
||||
entry.Flags = (ObjectPageFlags)data.ReadUInt16();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a resource table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled resource table entry on success, null on error</returns>
|
||||
private static ResourceTableEntry ParseResourceTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new ResourceTableEntry();
|
||||
|
||||
entry.TypeID = (ResourceTableEntryType)data.ReadUInt32();
|
||||
entry.NameID = data.ReadUInt16();
|
||||
entry.ResourceSize = data.ReadUInt32();
|
||||
entry.ObjectNumber = data.ReadUInt16();
|
||||
entry.Offset = data.ReadUInt32();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a resident names table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled resident names table entry on success, null on error</returns>
|
||||
private static ResidentNamesTableEntry ParseResidentNamesTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new ResidentNamesTableEntry();
|
||||
|
||||
entry.Length = data.ReadByteValue();
|
||||
if (entry.Length > 0)
|
||||
{
|
||||
byte[] name = data.ReadBytes(entry.Length);
|
||||
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
|
||||
}
|
||||
entry.OrdinalNumber = data.ReadUInt16();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an entry table bundle
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled entry table bundle on success, null on error</returns>
|
||||
private static EntryTableBundle ParseEntryTableBundle(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var bundle = new EntryTableBundle();
|
||||
|
||||
bundle.Entries = data.ReadByteValue();
|
||||
if (bundle.Entries == 0)
|
||||
return bundle;
|
||||
|
||||
bundle.BundleType = (BundleType)data.ReadByteValue();
|
||||
bundle.TableEntries = new EntryTableEntry[bundle.Entries];
|
||||
for (int i = 0; i < bundle.Entries; i++)
|
||||
{
|
||||
var entry = new EntryTableEntry();
|
||||
|
||||
switch (bundle.BundleType & ~BundleType.ParameterTypingInformationPresent)
|
||||
{
|
||||
case BundleType.UnusedEntry:
|
||||
// Empty entry with no information
|
||||
break;
|
||||
|
||||
case BundleType.SixteenBitEntry:
|
||||
entry.SixteenBitObjectNumber = data.ReadUInt16();
|
||||
entry.SixteenBitEntryFlags = (EntryFlags)data.ReadByteValue();
|
||||
entry.SixteenBitOffset = data.ReadUInt16();
|
||||
break;
|
||||
|
||||
case BundleType.TwoEightySixCallGateEntry:
|
||||
entry.TwoEightySixObjectNumber = data.ReadUInt16();
|
||||
entry.TwoEightySixEntryFlags = (EntryFlags)data.ReadByteValue();
|
||||
entry.TwoEightySixOffset = data.ReadUInt16();
|
||||
entry.TwoEightySixCallgate = data.ReadUInt16();
|
||||
break;
|
||||
|
||||
case BundleType.ThirtyTwoBitEntry:
|
||||
entry.ThirtyTwoBitObjectNumber = data.ReadUInt16();
|
||||
entry.ThirtyTwoBitEntryFlags = (EntryFlags)data.ReadByteValue();
|
||||
entry.ThirtyTwoBitOffset = data.ReadUInt32();
|
||||
break;
|
||||
|
||||
case BundleType.ForwarderEntry:
|
||||
entry.ForwarderReserved = data.ReadUInt16();
|
||||
entry.ForwarderFlags = (ForwarderFlags)data.ReadByteValue();
|
||||
entry.ForwarderModuleOrdinalNumber = data.ReadUInt16();
|
||||
entry.ProcedureNameOffset = data.ReadUInt32();
|
||||
entry.ImportOrdinalNumber = data.ReadUInt32();
|
||||
break;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
bundle.TableEntries[i] = entry;
|
||||
}
|
||||
|
||||
return bundle;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a module format directives table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled module format directives table entry on success, null on error</returns>
|
||||
private static ModuleFormatDirectivesTableEntry ParseModuleFormatDirectivesTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new ModuleFormatDirectivesTableEntry();
|
||||
|
||||
entry.DirectiveNumber = (DirectiveNumber)data.ReadUInt16();
|
||||
entry.DirectiveDataLength = data.ReadUInt16();
|
||||
entry.DirectiveDataOffset = data.ReadUInt32();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a verify record directive table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled verify record directive table entry on success, null on error</returns>
|
||||
private static VerifyRecordDirectiveTableEntry ParseVerifyRecordDirectiveTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new VerifyRecordDirectiveTableEntry();
|
||||
|
||||
entry.EntryCount = data.ReadUInt16();
|
||||
entry.OrdinalIndex = data.ReadUInt16();
|
||||
entry.Version = data.ReadUInt16();
|
||||
entry.ObjectEntriesCount = data.ReadUInt16();
|
||||
entry.ObjectNumberInModule = data.ReadUInt16();
|
||||
entry.ObjectLoadBaseAddress = data.ReadUInt16();
|
||||
entry.ObjectVirtualAddressSize = data.ReadUInt16();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a fix-up page table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled fix-up page table entry on success, null on error</returns>
|
||||
private static FixupPageTableEntry ParseFixupPageTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new FixupPageTableEntry();
|
||||
|
||||
entry.Offset = data.ReadUInt32();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a fix-up record table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled fix-up record table entry on success, null on error</returns>
|
||||
private static FixupRecordTableEntry ParseFixupRecordTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new FixupRecordTableEntry();
|
||||
|
||||
entry.SourceType = (FixupRecordSourceType)data.ReadByteValue();
|
||||
entry.TargetFlags = (FixupRecordTargetFlags)data.ReadByteValue();
|
||||
|
||||
// Source list flag
|
||||
if (entry.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag))
|
||||
entry.SourceOffsetListCount = data.ReadByteValue();
|
||||
else
|
||||
entry.SourceOffset = data.ReadUInt16();
|
||||
|
||||
// OBJECT / TRGOFF
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReference))
|
||||
{
|
||||
// 16-bit Object Number/Module Ordinal Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
|
||||
entry.TargetObjectNumberWORD = data.ReadUInt16();
|
||||
else
|
||||
entry.TargetObjectNumberByte = data.ReadByteValue();
|
||||
|
||||
// 16-bit Selector fixup
|
||||
if (!entry.SourceType.HasFlag(FixupRecordSourceType.SixteenBitSelectorFixup))
|
||||
{
|
||||
// 32-bit Target Offset Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
|
||||
entry.TargetOffsetDWORD = data.ReadUInt32();
|
||||
else
|
||||
entry.TargetOffsetWORD = data.ReadUInt16();
|
||||
}
|
||||
}
|
||||
|
||||
// MOD ORD# / IMPORT ORD / ADDITIVE
|
||||
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByOrdinal))
|
||||
{
|
||||
// 16-bit Object Number/Module Ordinal Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
|
||||
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
|
||||
else
|
||||
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
|
||||
|
||||
// 8-bit Ordinal Flag & 32-bit Target Offset Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.EightBitOrdinalFlag))
|
||||
entry.ImportedOrdinalNumberByte = data.ReadByteValue();
|
||||
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
|
||||
entry.ImportedOrdinalNumberDWORD = data.ReadUInt32();
|
||||
else
|
||||
entry.ImportedOrdinalNumberWORD = data.ReadUInt16();
|
||||
|
||||
// Additive Fixup Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
|
||||
{
|
||||
// 32-bit Additive Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
|
||||
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
|
||||
else
|
||||
entry.AdditiveFixupValueWORD = data.ReadUInt16();
|
||||
}
|
||||
}
|
||||
|
||||
// MOD ORD# / PROCEDURE NAME OFFSET / ADDITIVE
|
||||
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ImportedReferenceByName))
|
||||
{
|
||||
// 16-bit Object Number/Module Ordinal Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
|
||||
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
|
||||
else
|
||||
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
|
||||
|
||||
// 32-bit Target Offset Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitTargetOffsetFlag))
|
||||
entry.OffsetImportProcedureNameTableDWORD = data.ReadUInt32();
|
||||
else
|
||||
entry.OffsetImportProcedureNameTableWORD = data.ReadUInt16();
|
||||
|
||||
// Additive Fixup Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
|
||||
{
|
||||
// 32-bit Additive Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
|
||||
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
|
||||
else
|
||||
entry.AdditiveFixupValueWORD = data.ReadUInt16();
|
||||
}
|
||||
}
|
||||
|
||||
// ORD # / ADDITIVE
|
||||
else if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.InternalReferenceViaEntryTable))
|
||||
{
|
||||
// 16-bit Object Number/Module Ordinal Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.SixteenBitObjectNumberModuleOrdinalFlag))
|
||||
entry.OrdinalIndexImportModuleNameTableWORD = data.ReadUInt16();
|
||||
else
|
||||
entry.OrdinalIndexImportModuleNameTableByte = data.ReadByteValue();
|
||||
|
||||
// Additive Fixup Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.AdditiveFixupFlag))
|
||||
{
|
||||
// 32-bit Additive Flag
|
||||
if (entry.TargetFlags.HasFlag(FixupRecordTargetFlags.ThirtyTwoBitAdditiveFixupFlag))
|
||||
entry.AdditiveFixupValueDWORD = data.ReadUInt32();
|
||||
else
|
||||
entry.AdditiveFixupValueWORD = data.ReadUInt16();
|
||||
}
|
||||
}
|
||||
|
||||
// No other top-level flags recognized
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
#region SCROFFn
|
||||
|
||||
if (entry.SourceType.HasFlag(FixupRecordSourceType.SourceListFlag))
|
||||
{
|
||||
entry.SourceOffsetList = new ushort[entry.SourceOffsetListCount];
|
||||
for (int i = 0; i < entry.SourceOffsetList.Length; i++)
|
||||
{
|
||||
entry.SourceOffsetList[i] = data.ReadUInt16();
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a import module name table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled import module name table entry on success, null on error</returns>
|
||||
private static ImportModuleNameTableEntry ParseImportModuleNameTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new ImportModuleNameTableEntry();
|
||||
|
||||
entry.Length = data.ReadByteValue();
|
||||
if (entry.Length > 0)
|
||||
{
|
||||
byte[] name = data.ReadBytes(entry.Length);
|
||||
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
|
||||
}
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a import module name table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled import module name table entry on success, null on error</returns>
|
||||
private static ImportModuleProcedureNameTableEntry ParseImportModuleProcedureNameTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new ImportModuleProcedureNameTableEntry();
|
||||
|
||||
entry.Length = data.ReadByteValue();
|
||||
if (entry.Length > 0)
|
||||
{
|
||||
byte[] name = data.ReadBytes(entry.Length);
|
||||
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
|
||||
}
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a per-page checksum table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled per-page checksum table entry on success, null on error</returns>
|
||||
private static PerPageChecksumTableEntry ParsePerPageChecksumTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new PerPageChecksumTableEntry();
|
||||
|
||||
entry.Checksum = data.ReadUInt32();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a non-resident names table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled non-resident names table entry on success, null on error</returns>
|
||||
private static NonResidentNamesTableEntry ParseNonResidentNameTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new NonResidentNamesTableEntry();
|
||||
|
||||
entry.Length = data.ReadByteValue();
|
||||
if (entry.Length > 0)
|
||||
{
|
||||
byte[] name = data.ReadBytes(entry.Length);
|
||||
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
|
||||
}
|
||||
entry.OrdinalNumber = data.ReadUInt16();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a debug information
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="size">Total size of the debug information</param>
|
||||
/// <returns>Filled debug information on success, null on error</returns>
|
||||
private static DebugInformation ParseDebugInformation(Stream data, long size)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var debugInformation = new DebugInformation();
|
||||
|
||||
byte[] signature = data.ReadBytes(3);
|
||||
debugInformation.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (debugInformation.Signature != DebugInformationSignatureString)
|
||||
return null;
|
||||
|
||||
debugInformation.FormatType = (DebugFormatType)data.ReadByteValue();
|
||||
debugInformation.DebuggerData = data.ReadBytes((int)(size - 4));
|
||||
|
||||
return debugInformation;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,175 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.MSDOS;
|
||||
using static SabreTools.Models.MSDOS.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class MSDOS
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into an MS-DOS executable
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled executable on success, null on error</returns>
|
||||
public static Executable ParseExecutable(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseExecutable(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an MS-DOS executable
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled executable on success, null on error</returns>
|
||||
public static Executable ParseExecutable(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new executable to fill
|
||||
var executable = new Executable();
|
||||
|
||||
#region Executable Header
|
||||
|
||||
// Try to parse the executable header
|
||||
var executableHeader = ParseExecutableHeader(data);
|
||||
if (executableHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the executable header
|
||||
executable.Header = executableHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Relocation Table
|
||||
|
||||
// If the offset for the relocation table doesn't exist
|
||||
int tableAddress = initialOffset + executableHeader.RelocationTableAddr;
|
||||
if (tableAddress >= data.Length)
|
||||
return executable;
|
||||
|
||||
// Try to parse the relocation table
|
||||
data.Seek(tableAddress, SeekOrigin.Begin);
|
||||
var relocationTable = ParseRelocationTable(data, executableHeader.RelocationItems);
|
||||
if (relocationTable == null)
|
||||
return null;
|
||||
|
||||
// Set the relocation table
|
||||
executable.RelocationTable = relocationTable;
|
||||
|
||||
#endregion
|
||||
|
||||
// Return the executable
|
||||
return executable;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an MS-DOS executable header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled executable header on success, null on error</returns>
|
||||
private static ExecutableHeader ParseExecutableHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var header = new ExecutableHeader();
|
||||
|
||||
#region Standard Fields
|
||||
|
||||
byte[] magic = data.ReadBytes(2);
|
||||
header.Magic = Encoding.ASCII.GetString(magic);
|
||||
if (header.Magic != SignatureString)
|
||||
return null;
|
||||
|
||||
header.LastPageBytes = data.ReadUInt16();
|
||||
header.Pages = data.ReadUInt16();
|
||||
header.RelocationItems = data.ReadUInt16();
|
||||
header.HeaderParagraphSize = data.ReadUInt16();
|
||||
header.MinimumExtraParagraphs = data.ReadUInt16();
|
||||
header.MaximumExtraParagraphs = data.ReadUInt16();
|
||||
header.InitialSSValue = data.ReadUInt16();
|
||||
header.InitialSPValue = data.ReadUInt16();
|
||||
header.Checksum = data.ReadUInt16();
|
||||
header.InitialIPValue = data.ReadUInt16();
|
||||
header.InitialCSValue = data.ReadUInt16();
|
||||
header.RelocationTableAddr = data.ReadUInt16();
|
||||
header.OverlayNumber = data.ReadUInt16();
|
||||
|
||||
#endregion
|
||||
|
||||
// If we don't have enough data for PE extensions
|
||||
if (data.Position >= data.Length || data.Length - data.Position < 36)
|
||||
return header;
|
||||
|
||||
#region PE Extensions
|
||||
|
||||
header.Reserved1 = new ushort[4];
|
||||
for (int i = 0; i < header.Reserved1.Length; i++)
|
||||
{
|
||||
header.Reserved1[i] = data.ReadUInt16();
|
||||
}
|
||||
header.OEMIdentifier = data.ReadUInt16();
|
||||
header.OEMInformation = data.ReadUInt16();
|
||||
header.Reserved2 = new ushort[10];
|
||||
for (int i = 0; i < header.Reserved2.Length; i++)
|
||||
{
|
||||
header.Reserved2[i] = data.ReadUInt16();
|
||||
}
|
||||
header.NewExeHeaderAddr = data.ReadUInt32();
|
||||
|
||||
#endregion
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a relocation table
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="count">Number of relocation table entries to read</param>
|
||||
/// <returns>Filled relocation table on success, null on error</returns>
|
||||
private static RelocationEntry[] ParseRelocationTable(Stream data, int count)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var relocationTable = new RelocationEntry[count];
|
||||
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
var entry = new RelocationEntry();
|
||||
entry.Offset = data.ReadUInt16();
|
||||
entry.Segment = data.ReadUInt16();
|
||||
relocationTable[i] = entry;
|
||||
}
|
||||
|
||||
return relocationTable;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,258 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.MicrosoftCabinet;
|
||||
using static SabreTools.Models.MicrosoftCabinet.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
// TODO: Add multi-cabinet reading
|
||||
public class MicrosoftCabinet
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a Microsoft Cabinet file
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled cabinet on success, null on error</returns>
|
||||
public static Cabinet ParseCabinet(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseCabinet(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Microsoft Cabinet file
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled cabinet on success, null on error</returns>
|
||||
public static Cabinet ParseCabinet(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new cabinet to fill
|
||||
var cabinet = new Cabinet();
|
||||
|
||||
#region Cabinet Header
|
||||
|
||||
// Try to parse the cabinet header
|
||||
var cabinetHeader = ParseCabinetHeader(data);
|
||||
if (cabinetHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the cabinet header
|
||||
cabinet.Header = cabinetHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Folders
|
||||
|
||||
// Set the folder array
|
||||
cabinet.Folders = new CFFOLDER[cabinetHeader.FolderCount];
|
||||
|
||||
// Try to parse each folder, if we have any
|
||||
for (int i = 0; i < cabinetHeader.FolderCount; i++)
|
||||
{
|
||||
var folder = ParseFolder(data, cabinetHeader);
|
||||
if (folder == null)
|
||||
return null;
|
||||
|
||||
// Set the folder
|
||||
cabinet.Folders[i] = folder;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Files
|
||||
|
||||
// Get the files offset
|
||||
int filesOffset = (int)cabinetHeader.FilesOffset + initialOffset;
|
||||
if (filesOffset > data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the offset
|
||||
data.Seek(filesOffset, SeekOrigin.Begin);
|
||||
|
||||
// Set the file array
|
||||
cabinet.Files = new CFFILE[cabinetHeader.FileCount];
|
||||
|
||||
// Try to parse each file, if we have any
|
||||
for (int i = 0; i < cabinetHeader.FileCount; i++)
|
||||
{
|
||||
var file = ParseFile(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
// Set the file
|
||||
cabinet.Files[i] = file;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return cabinet;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a cabinet header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled cabinet header on success, null on error</returns>
|
||||
private static CFHEADER ParseCabinetHeader(Stream data)
|
||||
{
|
||||
CFHEADER header = new CFHEADER();
|
||||
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
header.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (header.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
header.Reserved1 = data.ReadUInt32();
|
||||
header.CabinetSize = data.ReadUInt32();
|
||||
header.Reserved2 = data.ReadUInt32();
|
||||
header.FilesOffset = data.ReadUInt32();
|
||||
header.Reserved3 = data.ReadUInt32();
|
||||
header.VersionMinor = data.ReadByteValue();
|
||||
header.VersionMajor = data.ReadByteValue();
|
||||
header.FolderCount = data.ReadUInt16();
|
||||
header.FileCount = data.ReadUInt16();
|
||||
header.Flags = (HeaderFlags)data.ReadUInt16();
|
||||
header.SetID = data.ReadUInt16();
|
||||
header.CabinetIndex = data.ReadUInt16();
|
||||
|
||||
if (header.Flags.HasFlag(HeaderFlags.RESERVE_PRESENT))
|
||||
{
|
||||
header.HeaderReservedSize = data.ReadUInt16();
|
||||
if (header.HeaderReservedSize > 60_000)
|
||||
return null;
|
||||
|
||||
header.FolderReservedSize = data.ReadByteValue();
|
||||
header.DataReservedSize = data.ReadByteValue();
|
||||
|
||||
if (header.HeaderReservedSize > 0)
|
||||
header.ReservedData = data.ReadBytes(header.HeaderReservedSize);
|
||||
}
|
||||
|
||||
if (header.Flags.HasFlag(HeaderFlags.PREV_CABINET))
|
||||
{
|
||||
header.CabinetPrev = data.ReadString(Encoding.ASCII);
|
||||
header.DiskPrev = data.ReadString(Encoding.ASCII);
|
||||
}
|
||||
|
||||
if (header.Flags.HasFlag(HeaderFlags.NEXT_CABINET))
|
||||
{
|
||||
header.CabinetNext = data.ReadString(Encoding.ASCII);
|
||||
header.DiskNext = data.ReadString(Encoding.ASCII);
|
||||
}
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a folder
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="header">Cabinet header to get flags and sizes from</param>
|
||||
/// <returns>Filled folder on success, null on error</returns>
|
||||
private static CFFOLDER ParseFolder(Stream data, CFHEADER header)
|
||||
{
|
||||
CFFOLDER folder = new CFFOLDER();
|
||||
|
||||
folder.CabStartOffset = data.ReadUInt32();
|
||||
folder.DataCount = data.ReadUInt16();
|
||||
folder.CompressionType = (CompressionType)data.ReadUInt16();
|
||||
|
||||
if (header.FolderReservedSize > 0)
|
||||
folder.ReservedData = data.ReadBytes(header.FolderReservedSize);
|
||||
|
||||
if (folder.CabStartOffset > 0)
|
||||
{
|
||||
long currentPosition = data.Position;
|
||||
data.Seek(folder.CabStartOffset, SeekOrigin.Begin);
|
||||
|
||||
folder.DataBlocks = new CFDATA[folder.DataCount];
|
||||
for (int i = 0; i < folder.DataCount; i++)
|
||||
{
|
||||
CFDATA dataBlock = ParseDataBlock(data, header.DataReservedSize);
|
||||
folder.DataBlocks[i] = dataBlock;
|
||||
}
|
||||
|
||||
data.Seek(currentPosition, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
return folder;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a data block
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="dataReservedSize">Reserved byte size for data blocks</param>
|
||||
/// <returns>Filled folder on success, null on error</returns>
|
||||
private static CFDATA ParseDataBlock(Stream data, byte dataReservedSize)
|
||||
{
|
||||
CFDATA dataBlock = new CFDATA();
|
||||
|
||||
dataBlock.Checksum = data.ReadUInt32();
|
||||
dataBlock.CompressedSize = data.ReadUInt16();
|
||||
dataBlock.UncompressedSize = data.ReadUInt16();
|
||||
|
||||
if (dataReservedSize > 0)
|
||||
dataBlock.ReservedData = data.ReadBytes(dataReservedSize);
|
||||
|
||||
if (dataBlock.CompressedSize > 0)
|
||||
dataBlock.CompressedData = data.ReadBytes(dataBlock.CompressedSize);
|
||||
|
||||
return dataBlock;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a file
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled file on success, null on error</returns>
|
||||
private static CFFILE ParseFile(Stream data)
|
||||
{
|
||||
CFFILE file = new CFFILE();
|
||||
|
||||
file.FileSize = data.ReadUInt32();
|
||||
file.FolderStartOffset = data.ReadUInt32();
|
||||
file.FolderIndex = (FolderIndex)data.ReadUInt16();
|
||||
file.Date = data.ReadUInt16();
|
||||
file.Time = data.ReadUInt16();
|
||||
file.Attributes = (SabreTools.Models.MicrosoftCabinet.FileAttributes)data.ReadUInt16();
|
||||
|
||||
if (file.Attributes.HasFlag(SabreTools.Models.MicrosoftCabinet.FileAttributes.NAME_IS_UTF))
|
||||
file.Name = data.ReadString(Encoding.Unicode);
|
||||
else
|
||||
file.Name = data.ReadString(Encoding.ASCII);
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,651 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.MoPaQ;
|
||||
using static SabreTools.Models.MoPaQ.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public class MoPaQ
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a MoPaQ archive
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled archive on success, null on error</returns>
|
||||
public static Archive ParseArchive(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseArchive(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a MoPaQ archive
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled archive on success, null on error</returns>
|
||||
public static Archive ParseArchive(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
#region User Data
|
||||
|
||||
// Check for User Data
|
||||
uint possibleSignature = data.ReadUInt32();
|
||||
data.Seek(-4, SeekOrigin.Current);
|
||||
if (possibleSignature == 0x1B51504D)
|
||||
{
|
||||
// Save the current position for offset correction
|
||||
long basePtr = data.Position;
|
||||
|
||||
// Deserialize the user data, returning null if invalid
|
||||
var userData = ParseUserData(data);
|
||||
if (userData == null)
|
||||
return null;
|
||||
|
||||
// Set the user data
|
||||
archive.UserData = userData;
|
||||
|
||||
// Set the starting position according to the header offset
|
||||
data.Seek(basePtr + (int)archive.UserData.HeaderOffset, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Archive Header
|
||||
|
||||
// Check for the Header
|
||||
possibleSignature = data.ReadUInt32();
|
||||
data.Seek(-4, SeekOrigin.Current);
|
||||
if (possibleSignature == 0x1A51504D)
|
||||
{
|
||||
// Try to parse the archive header
|
||||
var archiveHeader = ParseArchiveHeader(data);
|
||||
if (archiveHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the archive header
|
||||
archive.ArchiveHeader = archiveHeader;
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Hash Table
|
||||
|
||||
// TODO: The hash table has to be be decrypted before reading
|
||||
|
||||
// Version 1
|
||||
if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format1)
|
||||
{
|
||||
// If we have a hash table
|
||||
long hashTableOffset = archive.ArchiveHeader.HashTablePosition;
|
||||
if (hashTableOffset != 0)
|
||||
{
|
||||
// Seek to the offset
|
||||
data.Seek(hashTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Find the ending offset based on size
|
||||
long hashTableEnd = hashTableOffset + archive.ArchiveHeader.HashTableSize;
|
||||
|
||||
// Read in the hash table
|
||||
var hashTable = new List<HashEntry>();
|
||||
|
||||
while (data.Position < hashTableEnd)
|
||||
{
|
||||
var hashEntry = ParseHashEntry(data);
|
||||
if (hashEntry == null)
|
||||
return null;
|
||||
|
||||
hashTable.Add(hashEntry);
|
||||
}
|
||||
|
||||
archive.HashTable = hashTable.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
// Version 2 and 3
|
||||
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format2 || archive.ArchiveHeader.FormatVersion == FormatVersion.Format3)
|
||||
{
|
||||
// If we have a hash table
|
||||
long hashTableOffset = ((uint)archive.ArchiveHeader.HashTablePositionHi << 23) | archive.ArchiveHeader.HashTablePosition;
|
||||
if (hashTableOffset != 0)
|
||||
{
|
||||
// Seek to the offset
|
||||
data.Seek(hashTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Find the ending offset based on size
|
||||
long hashTableEnd = hashTableOffset + archive.ArchiveHeader.HashTableSize;
|
||||
|
||||
// Read in the hash table
|
||||
var hashTable = new List<HashEntry>();
|
||||
|
||||
while (data.Position < hashTableEnd)
|
||||
{
|
||||
var hashEntry = ParseHashEntry(data);
|
||||
if (hashEntry == null)
|
||||
return null;
|
||||
|
||||
hashTable.Add(hashEntry);
|
||||
}
|
||||
|
||||
archive.HashTable = hashTable.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
// Version 4
|
||||
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format4)
|
||||
{
|
||||
// If we have a hash table
|
||||
long hashTableOffset = ((uint)archive.ArchiveHeader.HashTablePositionHi << 23) | archive.ArchiveHeader.HashTablePosition;
|
||||
if (hashTableOffset != 0)
|
||||
{
|
||||
// Seek to the offset
|
||||
data.Seek(hashTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Find the ending offset based on size
|
||||
long hashTableEnd = hashTableOffset + (long)archive.ArchiveHeader.HashTableSizeLong;
|
||||
|
||||
// Read in the hash table
|
||||
var hashTable = new List<HashEntry>();
|
||||
|
||||
while (data.Position < hashTableEnd)
|
||||
{
|
||||
var hashEntry = ParseHashEntry(data);
|
||||
if (hashEntry == null)
|
||||
return null;
|
||||
|
||||
hashTable.Add(hashEntry);
|
||||
}
|
||||
|
||||
archive.HashTable = hashTable.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Block Table
|
||||
|
||||
// Version 1
|
||||
if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format1)
|
||||
{
|
||||
// If we have a block table
|
||||
long blockTableOffset = archive.ArchiveHeader.BlockTablePosition;
|
||||
if (blockTableOffset != 0)
|
||||
{
|
||||
// Seek to the offset
|
||||
data.Seek(blockTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Find the ending offset based on size
|
||||
long blockTableEnd = blockTableOffset + archive.ArchiveHeader.BlockTableSize;
|
||||
|
||||
// Read in the block table
|
||||
var blockTable = new List<BlockEntry>();
|
||||
|
||||
while (data.Position < blockTableEnd)
|
||||
{
|
||||
var blockEntry = ParseBlockEntry(data);
|
||||
if (blockEntry == null)
|
||||
return null;
|
||||
|
||||
blockTable.Add(blockEntry);
|
||||
}
|
||||
|
||||
archive.BlockTable = blockTable.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
// Version 2 and 3
|
||||
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format2 || archive.ArchiveHeader.FormatVersion == FormatVersion.Format3)
|
||||
{
|
||||
// If we have a block table
|
||||
long blockTableOffset = ((uint)archive.ArchiveHeader.BlockTablePositionHi << 23) | archive.ArchiveHeader.BlockTablePosition;
|
||||
if (blockTableOffset != 0)
|
||||
{
|
||||
// Seek to the offset
|
||||
data.Seek(blockTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Find the ending offset based on size
|
||||
long blockTableEnd = blockTableOffset + archive.ArchiveHeader.BlockTableSize;
|
||||
|
||||
// Read in the block table
|
||||
var blockTable = new List<BlockEntry>();
|
||||
|
||||
while (data.Position < blockTableEnd)
|
||||
{
|
||||
var blockEntry = ParseBlockEntry(data);
|
||||
if (blockEntry == null)
|
||||
return null;
|
||||
|
||||
blockTable.Add(blockEntry);
|
||||
}
|
||||
|
||||
archive.BlockTable = blockTable.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
// Version 4
|
||||
else if (archive.ArchiveHeader.FormatVersion == FormatVersion.Format4)
|
||||
{
|
||||
// If we have a block table
|
||||
long blockTableOffset = ((uint)archive.ArchiveHeader.BlockTablePositionHi << 23) | archive.ArchiveHeader.BlockTablePosition;
|
||||
if (blockTableOffset != 0)
|
||||
{
|
||||
// Seek to the offset
|
||||
data.Seek(blockTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Find the ending offset based on size
|
||||
long blockTableEnd = blockTableOffset + (long)archive.ArchiveHeader.BlockTableSizeLong;
|
||||
|
||||
// Read in the block table
|
||||
var blockTable = new List<BlockEntry>();
|
||||
|
||||
while (data.Position < blockTableEnd)
|
||||
{
|
||||
var blockEntry = ParseBlockEntry(data);
|
||||
if (blockEntry == null)
|
||||
return null;
|
||||
|
||||
blockTable.Add(blockEntry);
|
||||
}
|
||||
|
||||
archive.BlockTable = blockTable.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Hi-Block Table
|
||||
|
||||
// Version 2, 3, and 4
|
||||
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format2)
|
||||
{
|
||||
// If we have a hi-block table
|
||||
long hiBlockTableOffset = (long)archive.ArchiveHeader.HiBlockTablePosition;
|
||||
if (hiBlockTableOffset != 0)
|
||||
{
|
||||
// Seek to the offset
|
||||
data.Seek(hiBlockTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read in the hi-block table
|
||||
var hiBlockTable = new List<short>();
|
||||
|
||||
for (int i = 0; i < archive.BlockTable.Length; i++)
|
||||
{
|
||||
short hiBlockEntry = data.ReadInt16();
|
||||
hiBlockTable.Add(hiBlockEntry);
|
||||
}
|
||||
|
||||
archive.HiBlockTable = hiBlockTable.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region BET Table
|
||||
|
||||
// Version 3 and 4
|
||||
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format3)
|
||||
{
|
||||
// If we have a BET table
|
||||
long betTableOffset = (long)archive.ArchiveHeader.BetTablePosition;
|
||||
if (betTableOffset != 0)
|
||||
{
|
||||
// Seek to the offset
|
||||
data.Seek(betTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read in the BET table
|
||||
var betTable = ParseBetTable(data);
|
||||
if (betTable != null)
|
||||
return null;
|
||||
|
||||
archive.BetTable = betTable;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region HET Table
|
||||
|
||||
// Version 3 and 4
|
||||
if (archive.ArchiveHeader.FormatVersion >= FormatVersion.Format3)
|
||||
{
|
||||
// If we have a HET table
|
||||
long hetTableOffset = (long)archive.ArchiveHeader.HetTablePosition;
|
||||
if (hetTableOffset != 0)
|
||||
{
|
||||
// Seek to the offset
|
||||
data.Seek(hetTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read in the HET table
|
||||
var hetTable = ParseHetTable(data);
|
||||
if (hetTable != null)
|
||||
return null;
|
||||
|
||||
archive.HetTable = hetTable;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return archive;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a archive header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled archive header on success, null on error</returns>
|
||||
private static ArchiveHeader ParseArchiveHeader(Stream data)
|
||||
{
|
||||
ArchiveHeader archiveHeader = new ArchiveHeader();
|
||||
|
||||
// V1 - Common
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
archiveHeader.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (archiveHeader.Signature != ArchiveHeaderSignatureString)
|
||||
return null;
|
||||
|
||||
archiveHeader.HeaderSize = data.ReadUInt32();
|
||||
archiveHeader.ArchiveSize = data.ReadUInt32();
|
||||
archiveHeader.FormatVersion = (FormatVersion)data.ReadUInt16();
|
||||
archiveHeader.BlockSize = data.ReadUInt16();
|
||||
archiveHeader.HashTablePosition = data.ReadUInt32();
|
||||
archiveHeader.BlockTablePosition = data.ReadUInt32();
|
||||
archiveHeader.HashTableSize = data.ReadUInt32();
|
||||
archiveHeader.BlockTableSize = data.ReadUInt32();
|
||||
|
||||
// V2
|
||||
if (archiveHeader.FormatVersion >= FormatVersion.Format2)
|
||||
{
|
||||
archiveHeader.HiBlockTablePosition = data.ReadUInt64();
|
||||
archiveHeader.HashTablePositionHi = data.ReadUInt16();
|
||||
archiveHeader.BlockTablePositionHi = data.ReadUInt16();
|
||||
}
|
||||
|
||||
// V3
|
||||
if (archiveHeader.FormatVersion >= FormatVersion.Format3)
|
||||
{
|
||||
archiveHeader.ArchiveSizeLong = data.ReadUInt64();
|
||||
archiveHeader.BetTablePosition = data.ReadUInt64();
|
||||
archiveHeader.HetTablePosition = data.ReadUInt64();
|
||||
}
|
||||
|
||||
// V4
|
||||
if (archiveHeader.FormatVersion >= FormatVersion.Format4)
|
||||
{
|
||||
archiveHeader.HashTableSizeLong = data.ReadUInt64();
|
||||
archiveHeader.BlockTableSizeLong = data.ReadUInt64();
|
||||
archiveHeader.HiBlockTableSize = data.ReadUInt64();
|
||||
archiveHeader.HetTableSize = data.ReadUInt64();
|
||||
archiveHeader.BetTablesize = data.ReadUInt64();
|
||||
archiveHeader.RawChunkSize = data.ReadUInt32();
|
||||
|
||||
archiveHeader.BlockTableMD5 = data.ReadBytes(0x10);
|
||||
archiveHeader.HashTableMD5 = data.ReadBytes(0x10);
|
||||
archiveHeader.HiBlockTableMD5 = data.ReadBytes(0x10);
|
||||
archiveHeader.BetTableMD5 = data.ReadBytes(0x10);
|
||||
archiveHeader.HetTableMD5 = data.ReadBytes(0x10);
|
||||
archiveHeader.HetTableMD5 = data.ReadBytes(0x10);
|
||||
}
|
||||
|
||||
return archiveHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a user data object
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled user data on success, null on error</returns>
|
||||
private static UserData ParseUserData(Stream data)
|
||||
{
|
||||
UserData userData = new UserData();
|
||||
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
userData.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (userData.Signature != UserDataSignatureString)
|
||||
return null;
|
||||
|
||||
userData.UserDataSize = data.ReadUInt32();
|
||||
userData.HeaderOffset = data.ReadUInt32();
|
||||
userData.UserDataHeaderSize = data.ReadUInt32();
|
||||
|
||||
return userData;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a HET table
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled HET table on success, null on error</returns>
|
||||
private static HetTable ParseHetTable(Stream data)
|
||||
{
|
||||
HetTable hetTable = new HetTable();
|
||||
|
||||
// Common Headers
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
hetTable.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (hetTable.Signature != HetTableSignatureString)
|
||||
return null;
|
||||
|
||||
hetTable.Version = data.ReadUInt32();
|
||||
hetTable.DataSize = data.ReadUInt32();
|
||||
|
||||
// HET-Specific
|
||||
hetTable.TableSize = data.ReadUInt32();
|
||||
hetTable.MaxFileCount = data.ReadUInt32();
|
||||
hetTable.HashTableSize = data.ReadUInt32();
|
||||
hetTable.TotalIndexSize = data.ReadUInt32();
|
||||
hetTable.IndexSizeExtra = data.ReadUInt32();
|
||||
hetTable.IndexSize = data.ReadUInt32();
|
||||
hetTable.BlockTableSize = data.ReadUInt32();
|
||||
hetTable.HashTable = data.ReadBytes((int)hetTable.HashTableSize);
|
||||
|
||||
// TODO: Populate the file indexes array
|
||||
hetTable.FileIndexes = new byte[(int)hetTable.HashTableSize][];
|
||||
|
||||
return hetTable;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a BET table
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled BET table on success, null on error</returns>
|
||||
private static BetTable ParseBetTable(Stream data)
|
||||
{
|
||||
BetTable betTable = new BetTable();
|
||||
|
||||
// Common Headers
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
betTable.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (betTable.Signature != BetTableSignatureString)
|
||||
return null;
|
||||
|
||||
betTable.Version = data.ReadUInt32();
|
||||
betTable.DataSize = data.ReadUInt32();
|
||||
|
||||
// BET-Specific
|
||||
betTable.TableSize = data.ReadUInt32();
|
||||
betTable.FileCount = data.ReadUInt32();
|
||||
betTable.Unknown = data.ReadUInt32();
|
||||
betTable.TableEntrySize = data.ReadUInt32();
|
||||
|
||||
betTable.FilePositionBitIndex = data.ReadUInt32();
|
||||
betTable.FileSizeBitIndex = data.ReadUInt32();
|
||||
betTable.CompressedSizeBitIndex = data.ReadUInt32();
|
||||
betTable.FlagIndexBitIndex = data.ReadUInt32();
|
||||
betTable.UnknownBitIndex = data.ReadUInt32();
|
||||
|
||||
betTable.FilePositionBitCount = data.ReadUInt32();
|
||||
betTable.FileSizeBitCount = data.ReadUInt32();
|
||||
betTable.CompressedSizeBitCount = data.ReadUInt32();
|
||||
betTable.FlagIndexBitCount = data.ReadUInt32();
|
||||
betTable.UnknownBitCount = data.ReadUInt32();
|
||||
|
||||
betTable.TotalBetHashSize = data.ReadUInt32();
|
||||
betTable.BetHashSizeExtra = data.ReadUInt32();
|
||||
betTable.BetHashSize = data.ReadUInt32();
|
||||
betTable.BetHashArraySize = data.ReadUInt32();
|
||||
betTable.FlagCount = data.ReadUInt32();
|
||||
|
||||
betTable.FlagsArray = new uint[betTable.FlagCount];
|
||||
byte[] flagsArray = data.ReadBytes((int)betTable.FlagCount * 4);
|
||||
Buffer.BlockCopy(flagsArray, 0, betTable.FlagsArray, 0, (int)betTable.FlagCount * 4);
|
||||
|
||||
// TODO: Populate the file table
|
||||
// TODO: Populate the hash table
|
||||
|
||||
return betTable;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a hash entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled hash entry on success, null on error</returns>
|
||||
private static HashEntry ParseHashEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
HashEntry hashEntry = new HashEntry();
|
||||
|
||||
hashEntry.NameHashPartA = data.ReadUInt32();
|
||||
hashEntry.NameHashPartB = data.ReadUInt32();
|
||||
hashEntry.Locale = (Locale)data.ReadUInt16();
|
||||
hashEntry.Platform = data.ReadUInt16();
|
||||
hashEntry.BlockIndex = data.ReadUInt32();
|
||||
|
||||
return hashEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a block entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled block entry on success, null on error</returns>
|
||||
private static BlockEntry ParseBlockEntry(Stream data)
|
||||
{
|
||||
BlockEntry blockEntry = new BlockEntry();
|
||||
|
||||
blockEntry.FilePosition = data.ReadUInt32();
|
||||
blockEntry.CompressedSize = data.ReadUInt32();
|
||||
blockEntry.UncompressedSize = data.ReadUInt32();
|
||||
blockEntry.Flags = (FileFlags)data.ReadUInt32();
|
||||
|
||||
return blockEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a patch info
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled patch info on success, null on error</returns>
|
||||
private static PatchInfo ParsePatchInfo(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
PatchInfo patchInfo = new PatchInfo();
|
||||
|
||||
patchInfo.Length = data.ReadUInt32();
|
||||
patchInfo.Flags = data.ReadUInt32();
|
||||
patchInfo.DataSize = data.ReadUInt32();
|
||||
patchInfo.MD5 = data.ReadBytes(0x10);
|
||||
|
||||
// TODO: Fill the sector offset table
|
||||
|
||||
return patchInfo;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helpers
|
||||
|
||||
/// <summary>
|
||||
/// Buffer for encryption and decryption
|
||||
/// </summary>
|
||||
private uint[] _stormBuffer = new uint[STORM_BUFFER_SIZE];
|
||||
|
||||
/// <summary>
|
||||
/// Prepare the encryption table
|
||||
/// </summary>
|
||||
private void PrepareCryptTable()
|
||||
{
|
||||
uint seed = 0x00100001;
|
||||
for (uint index1 = 0; index1 < 0x100; index1++)
|
||||
{
|
||||
for (uint index2 = index1, i = 0; i < 5; i++, index2 += 0x100)
|
||||
{
|
||||
seed = (seed * 125 + 3) % 0x2AAAAB;
|
||||
uint temp1 = (seed & 0xFFFF) << 0x10;
|
||||
|
||||
seed = (seed * 125 + 3) % 0x2AAAAB;
|
||||
uint temp2 = (seed & 0xFFFF);
|
||||
|
||||
_stormBuffer[index2] = (temp1 | temp2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Decrypt a single block of data
|
||||
/// </summary>
|
||||
private unsafe byte[] DecryptBlock(byte[] block, uint length, uint key)
|
||||
{
|
||||
uint seed = 0xEEEEEEEE;
|
||||
|
||||
uint[] castBlock = new uint[length / 4];
|
||||
Buffer.BlockCopy(block, 0, castBlock, 0, (int)length);
|
||||
int castBlockPtr = 0;
|
||||
|
||||
// Round to uints
|
||||
length >>= 2;
|
||||
|
||||
while (length-- > 0)
|
||||
{
|
||||
seed += _stormBuffer[MPQ_HASH_KEY2_MIX + (key & 0xFF)];
|
||||
uint ch = castBlock[castBlockPtr] ^ (key + seed);
|
||||
|
||||
key = ((~key << 0x15) + 0x11111111) | (key >> 0x0B);
|
||||
seed = ch + seed + (seed << 5) + 3;
|
||||
castBlock[castBlockPtr++] = ch;
|
||||
}
|
||||
|
||||
Buffer.BlockCopy(castBlock, 0, block, 0, (int)length);
|
||||
return block;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,544 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.NCF;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class NCF
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a Half-Life No Cache
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled Half-Life No Cache on success, null on error</returns>
|
||||
public static SabreTools.Models.NCF.File ParseFile(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseFile(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache on success, null on error</returns>
|
||||
public static SabreTools.Models.NCF.File ParseFile(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new Half-Life No Cache to fill
|
||||
var file = new SabreTools.Models.NCF.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the no cache header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
// Cache the current offset
|
||||
initialOffset = data.Position;
|
||||
|
||||
#region Directory Header
|
||||
|
||||
// Try to parse the directory header
|
||||
var directoryHeader = ParseDirectoryHeader(data);
|
||||
if (directoryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache directory header
|
||||
file.DirectoryHeader = directoryHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Entries
|
||||
|
||||
// Create the directory entry array
|
||||
file.DirectoryEntries = new DirectoryEntry[directoryHeader.ItemCount];
|
||||
|
||||
// Try to parse the directory entries
|
||||
for (int i = 0; i < directoryHeader.ItemCount; i++)
|
||||
{
|
||||
var directoryEntry = ParseDirectoryEntry(data);
|
||||
file.DirectoryEntries[i] = directoryEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Names
|
||||
|
||||
if (directoryHeader.NameSize > 0)
|
||||
{
|
||||
// Get the current offset for adjustment
|
||||
long directoryNamesStart = data.Position;
|
||||
|
||||
// Get the ending offset
|
||||
long directoryNamesEnd = data.Position + directoryHeader.NameSize;
|
||||
|
||||
// Create the string dictionary
|
||||
file.DirectoryNames = new Dictionary<long, string>();
|
||||
|
||||
// Loop and read the null-terminated strings
|
||||
while (data.Position < directoryNamesEnd)
|
||||
{
|
||||
long nameOffset = data.Position - directoryNamesStart;
|
||||
string directoryName = data.ReadString(Encoding.ASCII);
|
||||
if (data.Position > directoryNamesEnd)
|
||||
{
|
||||
data.Seek(-directoryName.Length, SeekOrigin.Current);
|
||||
byte[] endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
|
||||
if (endingData != null)
|
||||
directoryName = Encoding.ASCII.GetString(endingData);
|
||||
else
|
||||
directoryName = null;
|
||||
}
|
||||
|
||||
file.DirectoryNames[nameOffset] = directoryName;
|
||||
}
|
||||
|
||||
// Loop and assign to entries
|
||||
foreach (var directoryEntry in file.DirectoryEntries)
|
||||
{
|
||||
directoryEntry.Name = file.DirectoryNames[directoryEntry.NameOffset];
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Info 1 Entries
|
||||
|
||||
// Create the directory info 1 entry array
|
||||
file.DirectoryInfo1Entries = new DirectoryInfo1Entry[directoryHeader.Info1Count];
|
||||
|
||||
// Try to parse the directory info 1 entries
|
||||
for (int i = 0; i < directoryHeader.Info1Count; i++)
|
||||
{
|
||||
var directoryInfo1Entry = ParseDirectoryInfo1Entry(data);
|
||||
file.DirectoryInfo1Entries[i] = directoryInfo1Entry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Info 2 Entries
|
||||
|
||||
// Create the directory info 2 entry array
|
||||
file.DirectoryInfo2Entries = new DirectoryInfo2Entry[directoryHeader.ItemCount];
|
||||
|
||||
// Try to parse the directory info 2 entries
|
||||
for (int i = 0; i < directoryHeader.ItemCount; i++)
|
||||
{
|
||||
var directoryInfo2Entry = ParseDirectoryInfo2Entry(data);
|
||||
file.DirectoryInfo2Entries[i] = directoryInfo2Entry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Copy Entries
|
||||
|
||||
// Create the directory copy entry array
|
||||
file.DirectoryCopyEntries = new DirectoryCopyEntry[directoryHeader.CopyCount];
|
||||
|
||||
// Try to parse the directory copy entries
|
||||
for (int i = 0; i < directoryHeader.CopyCount; i++)
|
||||
{
|
||||
var directoryCopyEntry = ParseDirectoryCopyEntry(data);
|
||||
file.DirectoryCopyEntries[i] = directoryCopyEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Local Entries
|
||||
|
||||
// Create the directory local entry array
|
||||
file.DirectoryLocalEntries = new DirectoryLocalEntry[directoryHeader.LocalCount];
|
||||
|
||||
// Try to parse the directory local entries
|
||||
for (int i = 0; i < directoryHeader.LocalCount; i++)
|
||||
{
|
||||
var directoryLocalEntry = ParseDirectoryLocalEntry(data);
|
||||
file.DirectoryLocalEntries[i] = directoryLocalEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
// Seek to end of directory section, just in case
|
||||
data.Seek(initialOffset + directoryHeader.DirectorySize, SeekOrigin.Begin);
|
||||
|
||||
#region Unknown Header
|
||||
|
||||
// Try to parse the unknown header
|
||||
var unknownHeader = ParseUnknownHeader(data);
|
||||
if (unknownHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache unknown header
|
||||
file.UnknownHeader = unknownHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Unknown Entries
|
||||
|
||||
// Create the unknown entry array
|
||||
file.UnknownEntries = new UnknownEntry[directoryHeader.ItemCount];
|
||||
|
||||
// Try to parse the unknown entries
|
||||
for (int i = 0; i < directoryHeader.ItemCount; i++)
|
||||
{
|
||||
var unknownEntry = ParseUnknownEntry(data);
|
||||
file.UnknownEntries[i] = unknownEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Checksum Header
|
||||
|
||||
// Try to parse the checksum header
|
||||
var checksumHeader = ParseChecksumHeader(data);
|
||||
if (checksumHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache checksum header
|
||||
file.ChecksumHeader = checksumHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
// Cache the current offset
|
||||
initialOffset = data.Position;
|
||||
|
||||
#region Checksum Map Header
|
||||
|
||||
// Try to parse the checksum map header
|
||||
var checksumMapHeader = ParseChecksumMapHeader(data);
|
||||
if (checksumMapHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the game cache checksum map header
|
||||
file.ChecksumMapHeader = checksumMapHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Checksum Map Entries
|
||||
|
||||
// Create the checksum map entry array
|
||||
file.ChecksumMapEntries = new ChecksumMapEntry[checksumMapHeader.ItemCount];
|
||||
|
||||
// Try to parse the checksum map entries
|
||||
for (int i = 0; i < checksumMapHeader.ItemCount; i++)
|
||||
{
|
||||
var checksumMapEntry = ParseChecksumMapEntry(data);
|
||||
file.ChecksumMapEntries[i] = checksumMapEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Checksum Entries
|
||||
|
||||
// Create the checksum entry array
|
||||
file.ChecksumEntries = new ChecksumEntry[checksumMapHeader.ChecksumCount];
|
||||
|
||||
// Try to parse the checksum entries
|
||||
for (int i = 0; i < checksumMapHeader.ChecksumCount; i++)
|
||||
{
|
||||
var checksumEntry = ParseChecksumEntry(data);
|
||||
file.ChecksumEntries[i] = checksumEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
// Seek to end of checksum section, just in case
|
||||
data.Seek(initialOffset + checksumHeader.ChecksumSize, SeekOrigin.Begin);
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache header on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Header header = new Header();
|
||||
|
||||
header.Dummy0 = data.ReadUInt32();
|
||||
if (header.Dummy0 != 0x00000001)
|
||||
return null;
|
||||
|
||||
header.MajorVersion = data.ReadUInt32();
|
||||
if (header.MajorVersion != 0x00000002)
|
||||
return null;
|
||||
|
||||
header.MinorVersion = data.ReadUInt32();
|
||||
if (header.MinorVersion != 1)
|
||||
return null;
|
||||
|
||||
header.CacheID = data.ReadUInt32();
|
||||
header.LastVersionPlayed = data.ReadUInt32();
|
||||
header.Dummy1 = data.ReadUInt32();
|
||||
header.Dummy2 = data.ReadUInt32();
|
||||
header.FileSize = data.ReadUInt32();
|
||||
header.BlockSize = data.ReadUInt32();
|
||||
header.BlockCount = data.ReadUInt32();
|
||||
header.Dummy3 = data.ReadUInt32();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache directory header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache directory header on success, null on error</returns>
|
||||
private static DirectoryHeader ParseDirectoryHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryHeader directoryHeader = new DirectoryHeader();
|
||||
|
||||
directoryHeader.Dummy0 = data.ReadUInt32();
|
||||
if (directoryHeader.Dummy0 != 0x00000004)
|
||||
return null;
|
||||
|
||||
directoryHeader.CacheID = data.ReadUInt32();
|
||||
directoryHeader.LastVersionPlayed = data.ReadUInt32();
|
||||
directoryHeader.ItemCount = data.ReadUInt32();
|
||||
directoryHeader.FileCount = data.ReadUInt32();
|
||||
directoryHeader.ChecksumDataLength = data.ReadUInt32();
|
||||
directoryHeader.DirectorySize = data.ReadUInt32();
|
||||
directoryHeader.NameSize = data.ReadUInt32();
|
||||
directoryHeader.Info1Count = data.ReadUInt32();
|
||||
directoryHeader.CopyCount = data.ReadUInt32();
|
||||
directoryHeader.LocalCount = data.ReadUInt32();
|
||||
directoryHeader.Dummy1 = data.ReadUInt32();
|
||||
directoryHeader.Dummy2 = data.ReadUInt32();
|
||||
directoryHeader.Checksum = data.ReadUInt32();
|
||||
|
||||
return directoryHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache directory entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache directory entry on success, null on error</returns>
|
||||
private static DirectoryEntry ParseDirectoryEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryEntry directoryEntry = new DirectoryEntry();
|
||||
|
||||
directoryEntry.NameOffset = data.ReadUInt32();
|
||||
directoryEntry.ItemSize = data.ReadUInt32();
|
||||
directoryEntry.ChecksumIndex = data.ReadUInt32();
|
||||
directoryEntry.DirectoryFlags = (HL_NCF_FLAG)data.ReadUInt32();
|
||||
directoryEntry.ParentIndex = data.ReadUInt32();
|
||||
directoryEntry.NextIndex = data.ReadUInt32();
|
||||
directoryEntry.FirstIndex = data.ReadUInt32();
|
||||
|
||||
return directoryEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache directory info 1 entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache directory info 1 entry on success, null on error</returns>
|
||||
private static DirectoryInfo1Entry ParseDirectoryInfo1Entry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryInfo1Entry directoryInfo1Entry = new DirectoryInfo1Entry();
|
||||
|
||||
directoryInfo1Entry.Dummy0 = data.ReadUInt32();
|
||||
|
||||
return directoryInfo1Entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache directory info 2 entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache directory info 2 entry on success, null on error</returns>
|
||||
private static DirectoryInfo2Entry ParseDirectoryInfo2Entry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryInfo2Entry directoryInfo2Entry = new DirectoryInfo2Entry();
|
||||
|
||||
directoryInfo2Entry.Dummy0 = data.ReadUInt32();
|
||||
|
||||
return directoryInfo2Entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache directory copy entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache directory copy entry on success, null on error</returns>
|
||||
private static DirectoryCopyEntry ParseDirectoryCopyEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryCopyEntry directoryCopyEntry = new DirectoryCopyEntry();
|
||||
|
||||
directoryCopyEntry.DirectoryIndex = data.ReadUInt32();
|
||||
|
||||
return directoryCopyEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache directory local entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache directory local entry on success, null on error</returns>
|
||||
private static DirectoryLocalEntry ParseDirectoryLocalEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryLocalEntry directoryLocalEntry = new DirectoryLocalEntry();
|
||||
|
||||
directoryLocalEntry.DirectoryIndex = data.ReadUInt32();
|
||||
|
||||
return directoryLocalEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache unknown header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache unknown header on success, null on error</returns>
|
||||
private static UnknownHeader ParseUnknownHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
UnknownHeader unknownHeader = new UnknownHeader();
|
||||
|
||||
unknownHeader.Dummy0 = data.ReadUInt32();
|
||||
if (unknownHeader.Dummy0 != 0x00000001)
|
||||
return null;
|
||||
|
||||
unknownHeader.Dummy1 = data.ReadUInt32();
|
||||
if (unknownHeader.Dummy1 != 0x00000000)
|
||||
return null;
|
||||
|
||||
return unknownHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache unknown entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cacheunknown entry on success, null on error</returns>
|
||||
private static UnknownEntry ParseUnknownEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
UnknownEntry unknownEntry = new UnknownEntry();
|
||||
|
||||
unknownEntry.Dummy0 = data.ReadUInt32();
|
||||
|
||||
return unknownEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache checksum header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache checksum header on success, null on error</returns>
|
||||
private static ChecksumHeader ParseChecksumHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
ChecksumHeader checksumHeader = new ChecksumHeader();
|
||||
|
||||
checksumHeader.Dummy0 = data.ReadUInt32();
|
||||
if (checksumHeader.Dummy0 != 0x00000001)
|
||||
return null;
|
||||
|
||||
checksumHeader.ChecksumSize = data.ReadUInt32();
|
||||
|
||||
return checksumHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache checksum map header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache checksum map header on success, null on error</returns>
|
||||
private static ChecksumMapHeader ParseChecksumMapHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
ChecksumMapHeader checksumMapHeader = new ChecksumMapHeader();
|
||||
|
||||
checksumMapHeader.Dummy0 = data.ReadUInt32();
|
||||
if (checksumMapHeader.Dummy0 != 0x14893721)
|
||||
return null;
|
||||
|
||||
checksumMapHeader.Dummy1 = data.ReadUInt32();
|
||||
if (checksumMapHeader.Dummy1 != 0x00000001)
|
||||
return null;
|
||||
|
||||
checksumMapHeader.ItemCount = data.ReadUInt32();
|
||||
checksumMapHeader.ChecksumCount = data.ReadUInt32();
|
||||
|
||||
return checksumMapHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache checksum map entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache checksum map entry on success, null on error</returns>
|
||||
private static ChecksumMapEntry ParseChecksumMapEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
ChecksumMapEntry checksumMapEntry = new ChecksumMapEntry();
|
||||
|
||||
checksumMapEntry.ChecksumCount = data.ReadUInt32();
|
||||
checksumMapEntry.FirstChecksumIndex = data.ReadUInt32();
|
||||
|
||||
return checksumMapEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life No Cache checksum entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life No Cache checksum entry on success, null on error</returns>
|
||||
private static ChecksumEntry ParseChecksumEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
ChecksumEntry checksumEntry = new ChecksumEntry();
|
||||
|
||||
checksumEntry.Checksum = data.ReadUInt32();
|
||||
|
||||
return checksumEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,508 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.NewExecutable;
|
||||
using static SabreTools.Models.NewExecutable.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class NewExecutable
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a New Executable
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled executable on success, null on error</returns>
|
||||
public static Executable ParseExecutable(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseExecutable(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a New Executable
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled executable on success, null on error</returns>
|
||||
public static Executable ParseExecutable(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new executable to fill
|
||||
var executable = new Executable();
|
||||
|
||||
#region MS-DOS Stub
|
||||
|
||||
// Parse the MS-DOS stub
|
||||
var stub = MSDOS.ParseExecutable(data);
|
||||
if (stub?.Header == null || stub.Header.NewExeHeaderAddr == 0)
|
||||
return null;
|
||||
|
||||
// Set the MS-DOS stub
|
||||
executable.Stub = stub;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Executable Header
|
||||
|
||||
// Try to parse the executable header
|
||||
data.Seek(initialOffset + stub.Header.NewExeHeaderAddr, SeekOrigin.Begin);
|
||||
var executableHeader = ParseExecutableHeader(data);
|
||||
if (executableHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the executable header
|
||||
executable.Header = executableHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Segment Table
|
||||
|
||||
// If the offset for the segment table doesn't exist
|
||||
int tableAddress = initialOffset
|
||||
+ (int)stub.Header.NewExeHeaderAddr
|
||||
+ executableHeader.SegmentTableOffset;
|
||||
if (tableAddress >= data.Length)
|
||||
return executable;
|
||||
|
||||
// Try to parse the segment table
|
||||
data.Seek(tableAddress, SeekOrigin.Begin);
|
||||
var segmentTable = ParseSegmentTable(data, executableHeader.FileSegmentCount);
|
||||
if (segmentTable == null)
|
||||
return null;
|
||||
|
||||
// Set the segment table
|
||||
executable.SegmentTable = segmentTable;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Resource Table
|
||||
|
||||
// If the offset for the segment table doesn't exist
|
||||
tableAddress = initialOffset
|
||||
+ (int)stub.Header.NewExeHeaderAddr
|
||||
+ executableHeader.SegmentTableOffset;
|
||||
if (tableAddress >= data.Length)
|
||||
return executable;
|
||||
|
||||
// Try to parse the resource table
|
||||
data.Seek(tableAddress, SeekOrigin.Begin);
|
||||
var resourceTable = ParseResourceTable(data, executableHeader.ResourceEntriesCount);
|
||||
if (resourceTable == null)
|
||||
return null;
|
||||
|
||||
// Set the resource table
|
||||
executable.ResourceTable = resourceTable;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Resident-Name Table
|
||||
|
||||
// If the offset for the resident-name table doesn't exist
|
||||
tableAddress = initialOffset
|
||||
+ (int)stub.Header.NewExeHeaderAddr
|
||||
+ executableHeader.ResidentNameTableOffset;
|
||||
int endOffset = initialOffset
|
||||
+ (int)stub.Header.NewExeHeaderAddr
|
||||
+ executableHeader.ModuleReferenceTableOffset;
|
||||
if (tableAddress >= data.Length)
|
||||
return executable;
|
||||
|
||||
// Try to parse the resident-name table
|
||||
data.Seek(tableAddress, SeekOrigin.Begin);
|
||||
var residentNameTable = ParseResidentNameTable(data, endOffset);
|
||||
if (residentNameTable == null)
|
||||
return null;
|
||||
|
||||
// Set the resident-name table
|
||||
executable.ResidentNameTable = residentNameTable;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Module-Reference Table
|
||||
|
||||
// If the offset for the module-reference table doesn't exist
|
||||
tableAddress = initialOffset
|
||||
+ (int)stub.Header.NewExeHeaderAddr
|
||||
+ executableHeader.ModuleReferenceTableOffset;
|
||||
if (tableAddress >= data.Length)
|
||||
return executable;
|
||||
|
||||
// Try to parse the module-reference table
|
||||
data.Seek(tableAddress, SeekOrigin.Begin);
|
||||
var moduleReferenceTable = ParseModuleReferenceTable(data, executableHeader.ModuleReferenceTableSize);
|
||||
if (moduleReferenceTable == null)
|
||||
return null;
|
||||
|
||||
// Set the module-reference table
|
||||
executable.ModuleReferenceTable = moduleReferenceTable;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Imported-Name Table
|
||||
|
||||
// If the offset for the imported-name table doesn't exist
|
||||
tableAddress = initialOffset
|
||||
+ (int)stub.Header.NewExeHeaderAddr
|
||||
+ executableHeader.ImportedNamesTableOffset;
|
||||
endOffset = initialOffset
|
||||
+ (int)stub.Header.NewExeHeaderAddr
|
||||
+ executableHeader.EntryTableOffset;
|
||||
if (tableAddress >= data.Length)
|
||||
return executable;
|
||||
|
||||
// Try to parse the imported-name table
|
||||
data.Seek(tableAddress, SeekOrigin.Begin);
|
||||
var importedNameTable = ParseImportedNameTable(data, endOffset);
|
||||
if (importedNameTable == null)
|
||||
return null;
|
||||
|
||||
// Set the imported-name table
|
||||
executable.ImportedNameTable = importedNameTable;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Entry Table
|
||||
|
||||
// If the offset for the imported-name table doesn't exist
|
||||
tableAddress = initialOffset
|
||||
+ (int)stub.Header.NewExeHeaderAddr
|
||||
+ executableHeader.EntryTableOffset;
|
||||
endOffset = initialOffset
|
||||
+ (int)stub.Header.NewExeHeaderAddr
|
||||
+ executableHeader.EntryTableOffset
|
||||
+ executableHeader.EntryTableSize;
|
||||
if (tableAddress >= data.Length)
|
||||
return executable;
|
||||
|
||||
// Try to parse the imported-name table
|
||||
data.Seek(tableAddress, SeekOrigin.Begin);
|
||||
var entryTable = ParseEntryTable(data, endOffset);
|
||||
if (entryTable == null)
|
||||
return null;
|
||||
|
||||
// Set the entry table
|
||||
executable.EntryTable = entryTable;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Nonresident-Name Table
|
||||
|
||||
// If the offset for the nonresident-name table doesn't exist
|
||||
tableAddress = initialOffset
|
||||
+ (int)executableHeader.NonResidentNamesTableOffset;
|
||||
endOffset = initialOffset
|
||||
+ (int)executableHeader.NonResidentNamesTableOffset
|
||||
+ executableHeader.NonResidentNameTableSize;
|
||||
if (tableAddress >= data.Length)
|
||||
return executable;
|
||||
|
||||
// Try to parse the nonresident-name table
|
||||
data.Seek(tableAddress, SeekOrigin.Begin);
|
||||
var nonResidentNameTable = ParseNonResidentNameTable(data, endOffset);
|
||||
if (nonResidentNameTable == null)
|
||||
return null;
|
||||
|
||||
// Set the nonresident-name table
|
||||
executable.NonResidentNameTable = nonResidentNameTable;
|
||||
|
||||
#endregion
|
||||
|
||||
return executable;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a New Executable header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled executable header on success, null on error</returns>
|
||||
private static ExecutableHeader ParseExecutableHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var header = new ExecutableHeader();
|
||||
|
||||
byte[] magic = data.ReadBytes(2);
|
||||
header.Magic = Encoding.ASCII.GetString(magic);
|
||||
if (header.Magic != SignatureString)
|
||||
return null;
|
||||
|
||||
header.LinkerVersion = data.ReadByteValue();
|
||||
header.LinkerRevision = data.ReadByteValue();
|
||||
header.EntryTableOffset = data.ReadUInt16();
|
||||
header.EntryTableSize = data.ReadUInt16();
|
||||
header.CrcChecksum = data.ReadUInt32();
|
||||
header.FlagWord = (HeaderFlag)data.ReadUInt16();
|
||||
header.AutomaticDataSegmentNumber = data.ReadUInt16();
|
||||
header.InitialHeapAlloc = data.ReadUInt16();
|
||||
header.InitialStackAlloc = data.ReadUInt16();
|
||||
header.InitialCSIPSetting = data.ReadUInt32();
|
||||
header.InitialSSSPSetting = data.ReadUInt32();
|
||||
header.FileSegmentCount = data.ReadUInt16();
|
||||
header.ModuleReferenceTableSize = data.ReadUInt16();
|
||||
header.NonResidentNameTableSize = data.ReadUInt16();
|
||||
header.SegmentTableOffset = data.ReadUInt16();
|
||||
header.ResourceTableOffset = data.ReadUInt16();
|
||||
header.ResidentNameTableOffset = data.ReadUInt16();
|
||||
header.ModuleReferenceTableOffset = data.ReadUInt16();
|
||||
header.ImportedNamesTableOffset = data.ReadUInt16();
|
||||
header.NonResidentNamesTableOffset = data.ReadUInt32();
|
||||
header.MovableEntriesCount = data.ReadUInt16();
|
||||
header.SegmentAlignmentShiftCount = data.ReadUInt16();
|
||||
header.ResourceEntriesCount = data.ReadUInt16();
|
||||
header.TargetOperatingSystem = (OperatingSystem)data.ReadByteValue();
|
||||
header.AdditionalFlags = (OS2Flag)data.ReadByteValue();
|
||||
header.ReturnThunkOffset = data.ReadUInt16();
|
||||
header.SegmentReferenceThunkOffset = data.ReadUInt16();
|
||||
header.MinCodeSwapAreaSize = data.ReadUInt16();
|
||||
header.WindowsSDKRevision = data.ReadByteValue();
|
||||
header.WindowsSDKVersion = data.ReadByteValue();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a segment table
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="count">Number of segment table entries to read</param>
|
||||
/// <returns>Filled segment table on success, null on error</returns>
|
||||
private static SegmentTableEntry[] ParseSegmentTable(Stream data, int count)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var segmentTable = new SegmentTableEntry[count];
|
||||
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
var entry = new SegmentTableEntry();
|
||||
entry.Offset = data.ReadUInt16();
|
||||
entry.Length = data.ReadUInt16();
|
||||
entry.FlagWord = (SegmentTableEntryFlag)data.ReadUInt16();
|
||||
entry.MinimumAllocationSize = data.ReadUInt16();
|
||||
segmentTable[i] = entry;
|
||||
}
|
||||
|
||||
return segmentTable;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a resource table
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="count">Number of resource table entries to read</param>
|
||||
/// <returns>Filled resource table on success, null on error</returns>
|
||||
private static ResourceTable ParseResourceTable(Stream data, int count)
|
||||
{
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var resourceTable = new ResourceTable();
|
||||
|
||||
resourceTable.AlignmentShiftCount = data.ReadUInt16();
|
||||
resourceTable.ResourceTypes = new ResourceTypeInformationEntry[count];
|
||||
for (int i = 0; i < resourceTable.ResourceTypes.Length; i++)
|
||||
{
|
||||
var entry = new ResourceTypeInformationEntry();
|
||||
entry.TypeID = data.ReadUInt16();
|
||||
entry.ResourceCount = data.ReadUInt16();
|
||||
entry.Reserved = data.ReadUInt32();
|
||||
entry.Resources = new ResourceTypeResourceEntry[entry.ResourceCount];
|
||||
for (int j = 0; j < entry.ResourceCount; j++)
|
||||
{
|
||||
// TODO: Should we read and store the resource data?
|
||||
var resource = new ResourceTypeResourceEntry();
|
||||
resource.Offset = data.ReadUInt16();
|
||||
resource.Length = data.ReadUInt16();
|
||||
resource.FlagWord = (ResourceTypeResourceFlag)data.ReadUInt16();
|
||||
resource.ResourceID = data.ReadUInt16();
|
||||
resource.Reserved = data.ReadUInt32();
|
||||
entry.Resources[j] = resource;
|
||||
}
|
||||
resourceTable.ResourceTypes[i] = entry;
|
||||
}
|
||||
|
||||
// Get the full list of unique string offsets
|
||||
var stringOffsets = resourceTable.ResourceTypes
|
||||
.Where(rt => rt.IsIntegerType() == false)
|
||||
.Select(rt => rt.TypeID)
|
||||
.Union(resourceTable.ResourceTypes
|
||||
.SelectMany(rt => rt.Resources)
|
||||
.Where(r => r.IsIntegerType() == false)
|
||||
.Select(r => r.ResourceID))
|
||||
.Distinct()
|
||||
.OrderBy(o => o)
|
||||
.ToList();
|
||||
|
||||
// Populate the type and name string dictionary
|
||||
resourceTable.TypeAndNameStrings = new Dictionary<ushort, ResourceTypeAndNameString>();
|
||||
for (int i = 0; i < stringOffsets.Count; i++)
|
||||
{
|
||||
int stringOffset = (int)(stringOffsets[i] + initialOffset);
|
||||
data.Seek(stringOffset, SeekOrigin.Begin);
|
||||
var str = new ResourceTypeAndNameString();
|
||||
str.Length = data.ReadByteValue();
|
||||
str.Text = data.ReadBytes(str.Length);
|
||||
resourceTable.TypeAndNameStrings[stringOffsets[i]] = str;
|
||||
}
|
||||
|
||||
return resourceTable;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a resident-name table
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="endOffset">First address not part of the resident-name table</param>
|
||||
/// <returns>Filled resident-name table on success, null on error</returns>
|
||||
private static ResidentNameTableEntry[] ParseResidentNameTable(Stream data, int endOffset)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var residentNameTable = new List<ResidentNameTableEntry>();
|
||||
|
||||
while (data.Position < endOffset)
|
||||
{
|
||||
var entry = new ResidentNameTableEntry();
|
||||
entry.Length = data.ReadByteValue();
|
||||
entry.NameString = data.ReadBytes(entry.Length);
|
||||
entry.OrdinalNumber = data.ReadUInt16();
|
||||
residentNameTable.Add(entry);
|
||||
}
|
||||
|
||||
return residentNameTable.ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a module-reference table
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="count">Number of module-reference table entries to read</param>
|
||||
/// <returns>Filled module-reference table on success, null on error</returns>
|
||||
private static ModuleReferenceTableEntry[] ParseModuleReferenceTable(Stream data, int count)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var moduleReferenceTable = new ModuleReferenceTableEntry[count];
|
||||
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
var entry = new ModuleReferenceTableEntry();
|
||||
entry.Offset = data.ReadUInt16();
|
||||
moduleReferenceTable[i] = entry;
|
||||
}
|
||||
|
||||
return moduleReferenceTable;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an imported-name table
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="endOffset">First address not part of the imported-name table</param>
|
||||
/// <returns>Filled imported-name table on success, null on error</returns>
|
||||
private static Dictionary<ushort, ImportedNameTableEntry> ParseImportedNameTable(Stream data, int endOffset)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry>();
|
||||
|
||||
while (data.Position < endOffset)
|
||||
{
|
||||
ushort currentOffset = (ushort)data.Position;
|
||||
var entry = new ImportedNameTableEntry();
|
||||
entry.Length = data.ReadByteValue();
|
||||
entry.NameString = data.ReadBytes(entry.Length);
|
||||
importedNameTable[currentOffset] = entry;
|
||||
}
|
||||
|
||||
return importedNameTable;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an entry table
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="endOffset">First address not part of the entry table</param>
|
||||
/// <returns>Filled entry table on success, null on error</returns>
|
||||
private static EntryTableBundle[] ParseEntryTable(Stream data, int endOffset)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entryTable = new List<EntryTableBundle>();
|
||||
|
||||
while (data.Position < endOffset)
|
||||
{
|
||||
var entry = new EntryTableBundle();
|
||||
entry.EntryCount = data.ReadByteValue();
|
||||
entry.SegmentIndicator = data.ReadByteValue();
|
||||
switch (entry.GetEntryType())
|
||||
{
|
||||
case SegmentEntryType.Unused:
|
||||
break;
|
||||
|
||||
case SegmentEntryType.FixedSegment:
|
||||
entry.FixedFlagWord = (FixedSegmentEntryFlag)data.ReadByteValue();
|
||||
entry.FixedOffset = data.ReadUInt16();
|
||||
break;
|
||||
|
||||
case SegmentEntryType.MoveableSegment:
|
||||
entry.MoveableFlagWord = (MoveableSegmentEntryFlag)data.ReadByteValue();
|
||||
entry.MoveableReserved = data.ReadUInt16();
|
||||
entry.MoveableSegmentNumber = data.ReadByteValue();
|
||||
entry.MoveableOffset = data.ReadUInt16();
|
||||
break;
|
||||
}
|
||||
entryTable.Add(entry);
|
||||
}
|
||||
|
||||
return entryTable.ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a nonresident-name table
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="endOffset">First address not part of the nonresident-name table</param>
|
||||
/// <returns>Filled nonresident-name table on success, null on error</returns>
|
||||
private static NonResidentNameTableEntry[] ParseNonResidentNameTable(Stream data, int endOffset)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var residentNameTable = new List<NonResidentNameTableEntry>();
|
||||
|
||||
while (data.Position < endOffset)
|
||||
{
|
||||
var entry = new NonResidentNameTableEntry();
|
||||
entry.Length = data.ReadByteValue();
|
||||
entry.NameString = data.ReadBytes(entry.Length);
|
||||
entry.OrdinalNumber = data.ReadUInt16();
|
||||
residentNameTable.Add(entry);
|
||||
}
|
||||
|
||||
return residentNameTable.ToArray();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,393 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.Nitro;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public class Nitro
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a NDS cart image
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled cart image on success, null on error</returns>
|
||||
public static Cart ParseCart(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseCart(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a NDS cart image
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled cart image on success, null on error</returns>
|
||||
public static Cart ParseCart(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new cart image to fill
|
||||
var cart = new Cart();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseCommonHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the cart image header
|
||||
cart.CommonHeader = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Extended DSi Header
|
||||
|
||||
// If we have a DSi-compatible cartridge
|
||||
if (header.UnitCode == Unitcode.NDSPlusDSi || header.UnitCode == Unitcode.DSi)
|
||||
{
|
||||
var extendedDSiHeader = ParseExtendedDSiHeader(data);
|
||||
if (extendedDSiHeader == null)
|
||||
return null;
|
||||
|
||||
cart.ExtendedDSiHeader = extendedDSiHeader;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Secure Area
|
||||
|
||||
// Try to get the secure area offset
|
||||
long secureAreaOffset = 0x4000;
|
||||
if (secureAreaOffset > data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the secure area
|
||||
data.Seek(secureAreaOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the secure area without processing
|
||||
cart.SecureArea = data.ReadBytes(0x800);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Name Table
|
||||
|
||||
// Try to get the name table offset
|
||||
long nameTableOffset = header.FileNameTableOffset;
|
||||
if (nameTableOffset < 0 || nameTableOffset > data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the name table
|
||||
data.Seek(nameTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the name table
|
||||
var nameTable = ParseNameTable(data);
|
||||
if (nameTable == null)
|
||||
return null;
|
||||
|
||||
// Set the name table
|
||||
cart.NameTable = nameTable;
|
||||
|
||||
#endregion
|
||||
|
||||
#region File Allocation Table
|
||||
|
||||
// Try to get the file allocation table offset
|
||||
long fileAllocationTableOffset = header.FileAllocationTableOffset;
|
||||
if (fileAllocationTableOffset < 0 || fileAllocationTableOffset > data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the file allocation table
|
||||
data.Seek(fileAllocationTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the file allocation table
|
||||
var fileAllocationTable = new List<FileAllocationTableEntry>();
|
||||
|
||||
// Try to parse the file allocation table
|
||||
while (data.Position - fileAllocationTableOffset < header.FileAllocationTableLength)
|
||||
{
|
||||
var entry = ParseFileAllocationTableEntry(data);
|
||||
fileAllocationTable.Add(entry);
|
||||
}
|
||||
|
||||
// Set the file allocation table
|
||||
cart.FileAllocationTable = fileAllocationTable.ToArray();
|
||||
|
||||
#endregion
|
||||
|
||||
// TODO: Read and optionally parse out the other areas
|
||||
// Look for offsets and lengths in the header pieces
|
||||
|
||||
return cart;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a common header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled common header on success, null on error</returns>
|
||||
private static CommonHeader ParseCommonHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
CommonHeader commonHeader = new CommonHeader();
|
||||
|
||||
byte[] gameTitle = data.ReadBytes(12);
|
||||
commonHeader.GameTitle = Encoding.ASCII.GetString(gameTitle).TrimEnd('\0');
|
||||
commonHeader.GameCode = data.ReadUInt32();
|
||||
byte[] makerCode = data.ReadBytes(2);
|
||||
commonHeader.MakerCode = Encoding.ASCII.GetString(bytes: makerCode).TrimEnd('\0');
|
||||
commonHeader.UnitCode = (Unitcode)data.ReadByteValue();
|
||||
commonHeader.EncryptionSeedSelect = data.ReadByteValue();
|
||||
commonHeader.DeviceCapacity = data.ReadByteValue();
|
||||
commonHeader.Reserved1 = data.ReadBytes(7);
|
||||
commonHeader.GameRevision = data.ReadUInt16();
|
||||
commonHeader.RomVersion = data.ReadByteValue();
|
||||
commonHeader.InternalFlags = data.ReadByteValue();
|
||||
commonHeader.ARM9RomOffset = data.ReadUInt32();
|
||||
commonHeader.ARM9EntryAddress = data.ReadUInt32();
|
||||
commonHeader.ARM9LoadAddress = data.ReadUInt32();
|
||||
commonHeader.ARM9Size = data.ReadUInt32();
|
||||
commonHeader.ARM7RomOffset = data.ReadUInt32();
|
||||
commonHeader.ARM7EntryAddress = data.ReadUInt32();
|
||||
commonHeader.ARM7LoadAddress = data.ReadUInt32();
|
||||
commonHeader.ARM7Size = data.ReadUInt32();
|
||||
commonHeader.FileNameTableOffset = data.ReadUInt32();
|
||||
commonHeader.FileNameTableLength = data.ReadUInt32();
|
||||
commonHeader.FileAllocationTableOffset = data.ReadUInt32();
|
||||
commonHeader.FileAllocationTableLength = data.ReadUInt32();
|
||||
commonHeader.ARM9OverlayOffset = data.ReadUInt32();
|
||||
commonHeader.ARM9OverlayLength = data.ReadUInt32();
|
||||
commonHeader.ARM7OverlayOffset = data.ReadUInt32();
|
||||
commonHeader.ARM7OverlayLength = data.ReadUInt32();
|
||||
commonHeader.NormalCardControlRegisterSettings = data.ReadUInt32();
|
||||
commonHeader.SecureCardControlRegisterSettings = data.ReadUInt32();
|
||||
commonHeader.IconBannerOffset = data.ReadUInt32();
|
||||
commonHeader.SecureAreaCRC = data.ReadUInt16();
|
||||
commonHeader.SecureTransferTimeout = data.ReadUInt16();
|
||||
commonHeader.ARM9Autoload = data.ReadUInt32();
|
||||
commonHeader.ARM7Autoload = data.ReadUInt32();
|
||||
commonHeader.SecureDisable = data.ReadBytes(8);
|
||||
commonHeader.NTRRegionRomSize = data.ReadUInt32();
|
||||
commonHeader.HeaderSize = data.ReadUInt32();
|
||||
commonHeader.Reserved2 = data.ReadBytes(56);
|
||||
commonHeader.NintendoLogo = data.ReadBytes(156);
|
||||
commonHeader.NintendoLogoCRC = data.ReadUInt16();
|
||||
commonHeader.HeaderCRC = data.ReadUInt16();
|
||||
commonHeader.DebuggerReserved = data.ReadBytes(0x20);
|
||||
|
||||
return commonHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an extended DSi header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled extended DSi header on success, null on error</returns>
|
||||
private static ExtendedDSiHeader ParseExtendedDSiHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
ExtendedDSiHeader extendedDSiHeader = new ExtendedDSiHeader();
|
||||
|
||||
extendedDSiHeader.GlobalMBK15Settings = new uint[5];
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
extendedDSiHeader.GlobalMBK15Settings[i] = data.ReadUInt32();
|
||||
}
|
||||
extendedDSiHeader.LocalMBK68SettingsARM9 = new uint[3];
|
||||
for (int i = 0; i < 3; i++)
|
||||
{
|
||||
extendedDSiHeader.LocalMBK68SettingsARM9[i] = data.ReadUInt32();
|
||||
}
|
||||
extendedDSiHeader.LocalMBK68SettingsARM7 = new uint[3];
|
||||
for (int i = 0; i < 3; i++)
|
||||
{
|
||||
extendedDSiHeader.LocalMBK68SettingsARM7[i] = data.ReadUInt32();
|
||||
}
|
||||
extendedDSiHeader.GlobalMBK9Setting = data.ReadUInt32();
|
||||
extendedDSiHeader.RegionFlags = data.ReadUInt32();
|
||||
extendedDSiHeader.AccessControl = data.ReadUInt32();
|
||||
extendedDSiHeader.ARM7SCFGEXTMask = data.ReadUInt32();
|
||||
extendedDSiHeader.ReservedFlags = data.ReadUInt32();
|
||||
extendedDSiHeader.ARM9iRomOffset = data.ReadUInt32();
|
||||
extendedDSiHeader.Reserved3 = data.ReadUInt32();
|
||||
extendedDSiHeader.ARM9iLoadAddress = data.ReadUInt32();
|
||||
extendedDSiHeader.ARM9iSize = data.ReadUInt32();
|
||||
extendedDSiHeader.ARM7iRomOffset = data.ReadUInt32();
|
||||
extendedDSiHeader.Reserved4 = data.ReadUInt32();
|
||||
extendedDSiHeader.ARM7iLoadAddress = data.ReadUInt32();
|
||||
extendedDSiHeader.ARM7iSize = data.ReadUInt32();
|
||||
extendedDSiHeader.DigestNTRRegionOffset = data.ReadUInt32();
|
||||
extendedDSiHeader.DigestNTRRegionLength = data.ReadUInt32();
|
||||
extendedDSiHeader.DigestTWLRegionOffset = data.ReadUInt32();
|
||||
extendedDSiHeader.DigestTWLRegionLength = data.ReadUInt32();
|
||||
extendedDSiHeader.DigestSectorHashtableRegionOffset = data.ReadUInt32();
|
||||
extendedDSiHeader.DigestSectorHashtableRegionLength = data.ReadUInt32();
|
||||
extendedDSiHeader.DigestBlockHashtableRegionOffset = data.ReadUInt32();
|
||||
extendedDSiHeader.DigestBlockHashtableRegionLength = data.ReadUInt32();
|
||||
extendedDSiHeader.DigestSectorSize = data.ReadUInt32();
|
||||
extendedDSiHeader.DigestBlockSectorCount = data.ReadUInt32();
|
||||
extendedDSiHeader.IconBannerSize = data.ReadUInt32();
|
||||
extendedDSiHeader.Unknown1 = data.ReadUInt32();
|
||||
extendedDSiHeader.ModcryptArea1Offset = data.ReadUInt32();
|
||||
extendedDSiHeader.ModcryptArea1Size = data.ReadUInt32();
|
||||
extendedDSiHeader.ModcryptArea2Offset = data.ReadUInt32();
|
||||
extendedDSiHeader.ModcryptArea2Size = data.ReadUInt32();
|
||||
extendedDSiHeader.TitleID = data.ReadBytes(8);
|
||||
extendedDSiHeader.DSiWarePublicSavSize = data.ReadUInt32();
|
||||
extendedDSiHeader.DSiWarePrivateSavSize = data.ReadUInt32();
|
||||
extendedDSiHeader.ReservedZero = data.ReadBytes(176);
|
||||
extendedDSiHeader.Unknown2 = data.ReadBytes(0x10);
|
||||
extendedDSiHeader.ARM9WithSecureAreaSHA1HMACHash = data.ReadBytes(20);
|
||||
extendedDSiHeader.ARM7SHA1HMACHash = data.ReadBytes(20);
|
||||
extendedDSiHeader.DigestMasterSHA1HMACHash = data.ReadBytes(20);
|
||||
extendedDSiHeader.BannerSHA1HMACHash = data.ReadBytes(20);
|
||||
extendedDSiHeader.ARM9iDecryptedSHA1HMACHash = data.ReadBytes(20);
|
||||
extendedDSiHeader.ARM7iDecryptedSHA1HMACHash = data.ReadBytes(20);
|
||||
extendedDSiHeader.Reserved5 = data.ReadBytes(40);
|
||||
extendedDSiHeader.ARM9NoSecureAreaSHA1HMACHash = data.ReadBytes(20);
|
||||
extendedDSiHeader.Reserved6 = data.ReadBytes(2636);
|
||||
extendedDSiHeader.ReservedAndUnchecked = data.ReadBytes(0x180);
|
||||
extendedDSiHeader.RSASignature = data.ReadBytes(0x80);
|
||||
|
||||
return extendedDSiHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a name table
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled name table on success, null on error</returns>
|
||||
private static NameTable ParseNameTable(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
NameTable nameTable = new NameTable();
|
||||
|
||||
// Create a variable-length table
|
||||
var folderAllocationTable = new List<FolderAllocationTableEntry>();
|
||||
int entryCount = int.MaxValue;
|
||||
while (entryCount > 0)
|
||||
{
|
||||
var entry = ParseFolderAllocationTableEntry(data);
|
||||
folderAllocationTable.Add(entry);
|
||||
|
||||
// If we have the root entry
|
||||
if (entryCount == int.MaxValue)
|
||||
entryCount = (entry.Unknown << 8) | entry.ParentFolderIndex;
|
||||
|
||||
// Decrement the entry count
|
||||
entryCount--;
|
||||
}
|
||||
|
||||
// Assign the folder allocation table
|
||||
nameTable.FolderAllocationTable = folderAllocationTable.ToArray();
|
||||
|
||||
// Create a variable-length table
|
||||
var nameList = new List<NameListEntry>();
|
||||
while (true)
|
||||
{
|
||||
var entry = ParseNameListEntry(data);
|
||||
if (entry == null)
|
||||
break;
|
||||
|
||||
nameList.Add(entry);
|
||||
}
|
||||
|
||||
// Assign the name list
|
||||
nameTable.NameList = nameList.ToArray();
|
||||
|
||||
return nameTable;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a folder allocation table entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled folder allocation table entry on success, null on error</returns>
|
||||
private static FolderAllocationTableEntry ParseFolderAllocationTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
FolderAllocationTableEntry entry = new FolderAllocationTableEntry();
|
||||
|
||||
entry.StartOffset = data.ReadUInt32();
|
||||
entry.FirstFileIndex = data.ReadUInt16();
|
||||
entry.ParentFolderIndex = data.ReadByteValue();
|
||||
entry.Unknown = data.ReadByteValue();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a name list entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled name list entry on success, null on error</returns>
|
||||
private static NameListEntry ParseNameListEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
NameListEntry entry = new NameListEntry();
|
||||
|
||||
byte flagAndSize = data.ReadByteValue();
|
||||
if (flagAndSize == 0xFF)
|
||||
return null;
|
||||
|
||||
entry.Folder = (flagAndSize & 0x80) != 0;
|
||||
|
||||
byte size = (byte)(flagAndSize & ~0x80);
|
||||
if (size > 0)
|
||||
{
|
||||
byte[] name = data.ReadBytes(size);
|
||||
entry.Name = Encoding.UTF8.GetString(name);
|
||||
}
|
||||
|
||||
if (entry.Folder)
|
||||
entry.Index = data.ReadUInt16();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a name list entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled name list entry on success, null on error</returns>
|
||||
private static FileAllocationTableEntry ParseFileAllocationTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
FileAllocationTableEntry entry = new FileAllocationTableEntry();
|
||||
|
||||
entry.StartOffset = data.ReadUInt32();
|
||||
entry.EndOffset = data.ReadUInt32();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,137 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.PAK;
|
||||
using static SabreTools.Models.PAK.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class PAK
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a Half-Life Package
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled Half-Life Package on success, null on error</returns>
|
||||
public static SabreTools.Models.PAK.File ParseFile(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseFile(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Package
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Package on success, null on error</returns>
|
||||
public static SabreTools.Models.PAK.File ParseFile(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new Half-Life Package to fill
|
||||
var file = new SabreTools.Models.PAK.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the package header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Items
|
||||
|
||||
// Get the directory items offset
|
||||
uint directoryItemsOffset = header.DirectoryOffset;
|
||||
if (directoryItemsOffset < 0 || directoryItemsOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the directory items
|
||||
data.Seek(directoryItemsOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the directory item array
|
||||
file.DirectoryItems = new DirectoryItem[header.DirectoryLength / 64];
|
||||
|
||||
// Try to parse the directory items
|
||||
for (int i = 0; i < file.DirectoryItems.Length; i++)
|
||||
{
|
||||
var directoryItem = ParseDirectoryItem(data);
|
||||
file.DirectoryItems[i] = directoryItem;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Package header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Package header on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Header header = new Header();
|
||||
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
header.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (header.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
header.DirectoryOffset = data.ReadUInt32();
|
||||
header.DirectoryLength = data.ReadUInt32();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Package directory item
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Package directory item on success, null on error</returns>
|
||||
private static DirectoryItem ParseDirectoryItem(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryItem directoryItem = new DirectoryItem();
|
||||
|
||||
byte[] itemName = data.ReadBytes(56);
|
||||
directoryItem.ItemName = Encoding.ASCII.GetString(itemName).TrimEnd('\0');
|
||||
directoryItem.ItemOffset = data.ReadUInt32();
|
||||
directoryItem.ItemLength = data.ReadUInt32();
|
||||
|
||||
return directoryItem;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,211 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.PFF;
|
||||
using static SabreTools.Models.PFF.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public class PFF
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a PFF archive
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled archive on success, null on error</returns>
|
||||
public static Archive ParseArchive(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseArchive(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a PFF archive
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled archive on success, null on error</returns>
|
||||
public static Archive ParseArchive(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the archive header
|
||||
archive.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Segments
|
||||
|
||||
// Get the segments
|
||||
long offset = header.FileListOffset;
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the segments
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Create the segments array
|
||||
archive.Segments = new Segment[header.NumberOfFiles];
|
||||
|
||||
// Read all segments in turn
|
||||
for (int i = 0; i < header.NumberOfFiles; i++)
|
||||
{
|
||||
var file = ParseSegment(data, header.FileSegmentSize);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
archive.Segments[i] = file;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Footer
|
||||
|
||||
// Get the footer offset
|
||||
offset = header.FileListOffset + (header.FileSegmentSize * header.NumberOfFiles);
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the footer
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the footer
|
||||
var footer = ParseFooter(data);
|
||||
if (footer == null)
|
||||
return null;
|
||||
|
||||
// Set the archive footer
|
||||
archive.Footer = footer;
|
||||
|
||||
#endregion
|
||||
|
||||
return archive;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled header on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Header header = new Header();
|
||||
|
||||
header.HeaderSize = data.ReadUInt32();
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
header.Signature = Encoding.ASCII.GetString(signature);
|
||||
header.NumberOfFiles = data.ReadUInt32();
|
||||
header.FileSegmentSize = data.ReadUInt32();
|
||||
switch (header.Signature)
|
||||
{
|
||||
case Version0SignatureString:
|
||||
if (header.FileSegmentSize != Version0HSegmentSize)
|
||||
return null;
|
||||
break;
|
||||
|
||||
case Version2SignatureString:
|
||||
if (header.FileSegmentSize != Version2SegmentSize)
|
||||
return null;
|
||||
break;
|
||||
|
||||
// Version 3 can sometimes have Version 2 segment sizes
|
||||
case Version3SignatureString:
|
||||
if (header.FileSegmentSize != Version2SegmentSize && header.FileSegmentSize != Version3SegmentSize)
|
||||
return null;
|
||||
break;
|
||||
|
||||
case Version4SignatureString:
|
||||
if (header.FileSegmentSize != Version4SegmentSize)
|
||||
return null;
|
||||
break;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
header.FileListOffset = data.ReadUInt32();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a footer
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled footer on success, null on error</returns>
|
||||
private static Footer ParseFooter(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Footer footer = new Footer();
|
||||
|
||||
footer.SystemIP = data.ReadUInt32();
|
||||
footer.Reserved = data.ReadUInt32();
|
||||
byte[] kingTag = data.ReadBytes(4);
|
||||
footer.KingTag = Encoding.ASCII.GetString(kingTag);
|
||||
|
||||
return footer;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a file entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="segmentSize">PFF segment size</param>
|
||||
/// <returns>Filled file entry on success, null on error</returns>
|
||||
private static Segment ParseSegment(Stream data, uint segmentSize)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Segment segment = new Segment();
|
||||
|
||||
segment.Deleted = data.ReadUInt32();
|
||||
segment.FileLocation = data.ReadUInt32();
|
||||
segment.FileSize = data.ReadUInt32();
|
||||
segment.PackedDate = data.ReadUInt32();
|
||||
byte[] fileName = data.ReadBytes(0x10);
|
||||
segment.FileName = Encoding.ASCII.GetString(fileName).TrimEnd('\0');
|
||||
if (segmentSize > Version2SegmentSize)
|
||||
segment.ModifiedDate = data.ReadUInt32();
|
||||
if (segmentSize > Version3SegmentSize)
|
||||
segment.CompressionLevel = data.ReadUInt32();
|
||||
|
||||
return segment;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,463 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.PlayJ;
|
||||
using static SabreTools.Models.PlayJ.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public class PlayJ
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a PlayJ playlist
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled playlist on success, null on error</returns>
|
||||
public static Playlist ParsePlaylist(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParsePlaylist(dataStream);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a PlayJ audio file
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled audio file on success, null on error</returns>
|
||||
public static AudioFile ParseAudioFile(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseAudioFile(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a PlayJ playlist
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled playlist on success, null on error</returns>
|
||||
public static Playlist ParsePlaylist(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new playlist to fill
|
||||
var playlist = new Playlist();
|
||||
|
||||
#region Playlist Header
|
||||
|
||||
// Try to parse the playlist header
|
||||
var playlistHeader = ParsePlaylistHeader(data);
|
||||
if (playlistHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the playlist header
|
||||
playlist.Header = playlistHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Audio Files
|
||||
|
||||
// Create the audio files array
|
||||
playlist.AudioFiles = new AudioFile[playlistHeader.TrackCount];
|
||||
|
||||
// Try to parse the audio files
|
||||
for (int i = 0; i < playlist.AudioFiles.Length; i++)
|
||||
{
|
||||
long currentOffset = data.Position;
|
||||
var entryHeader = ParseAudioFile(data, currentOffset);
|
||||
if (entryHeader == null)
|
||||
return null;
|
||||
|
||||
playlist.AudioFiles[i] = entryHeader;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return playlist;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a PlayJ audio file
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="adjust">Offset to adjust all seeking by</param>
|
||||
/// <returns>Filled audio file on success, null on error</returns>
|
||||
public static AudioFile ParseAudioFile(Stream data, long adjust = 0)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new audio file to fill
|
||||
var audioFile = new AudioFile();
|
||||
|
||||
#region Audio Header
|
||||
|
||||
// Try to parse the audio header
|
||||
var audioHeader = ParseAudioHeader(data);
|
||||
if (audioHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the audio header
|
||||
audioFile.Header = audioHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Unknown Block 1
|
||||
|
||||
uint unknownOffset1 = (audioHeader.Version == 0x00000000)
|
||||
? (audioHeader as AudioHeaderV1).UnknownOffset1
|
||||
: (audioHeader as AudioHeaderV2).UnknownOffset1 + 0x54;
|
||||
|
||||
// If we have an unknown block 1 offset
|
||||
if (unknownOffset1 > 0)
|
||||
{
|
||||
// Get the unknown block 1 offset
|
||||
long offset = unknownOffset1 + adjust;
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the unknown block 1
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
// Try to parse the unknown block 1
|
||||
var unknownBlock1 = ParseUnknownBlock1(data);
|
||||
if (unknownBlock1 == null)
|
||||
return null;
|
||||
|
||||
// Set the unknown block 1
|
||||
audioFile.UnknownBlock1 = unknownBlock1;
|
||||
|
||||
#endregion
|
||||
|
||||
#region V1 Only
|
||||
|
||||
// If we have a V1 file
|
||||
if (audioHeader.Version == 0x00000000)
|
||||
{
|
||||
#region Unknown Value 2
|
||||
|
||||
// Get the V1 unknown offset 2
|
||||
uint? unknownOffset2 = (audioHeader as AudioHeaderV1)?.UnknownOffset2;
|
||||
|
||||
// If we have an unknown value 2 offset
|
||||
if (unknownOffset2 != null && unknownOffset2 > 0)
|
||||
{
|
||||
// Get the unknown value 2 offset
|
||||
long offset = unknownOffset2.Value + adjust;
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the unknown value 2
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
// Set the unknown value 2
|
||||
audioFile.UnknownValue2 = data.ReadUInt32();
|
||||
|
||||
#endregion
|
||||
|
||||
#region Unknown Block 3
|
||||
|
||||
// Get the V1 unknown offset 3
|
||||
uint? unknownOffset3 = (audioHeader as AudioHeaderV1)?.UnknownOffset3;
|
||||
|
||||
// If we have an unknown block 3 offset
|
||||
if (unknownOffset3 != null && unknownOffset3 > 0)
|
||||
{
|
||||
// Get the unknown block 3 offset
|
||||
long offset = unknownOffset3.Value + adjust;
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the unknown block 3
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
// Try to parse the unknown block 3
|
||||
var unknownBlock3 = ParseUnknownBlock3(data);
|
||||
if (unknownBlock3 == null)
|
||||
return null;
|
||||
|
||||
// Set the unknown block 3
|
||||
audioFile.UnknownBlock3 = unknownBlock3;
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region V2 Only
|
||||
|
||||
// If we have a V2 file
|
||||
if (audioHeader.Version == 0x0000000A)
|
||||
{
|
||||
#region Data Files Count
|
||||
|
||||
// Set the data files count
|
||||
audioFile.DataFilesCount = data.ReadUInt32();
|
||||
|
||||
#endregion
|
||||
|
||||
#region Data Files
|
||||
|
||||
// Create the data files array
|
||||
audioFile.DataFiles = new DataFile[audioFile.DataFilesCount];
|
||||
|
||||
// Try to parse the data files
|
||||
for (int i = 0; i < audioFile.DataFiles.Length; i++)
|
||||
{
|
||||
var dataFile = ParseDataFile(data);
|
||||
if (dataFile == null)
|
||||
return null;
|
||||
|
||||
audioFile.DataFiles[i] = dataFile;
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return audioFile;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a playlist header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled playlist header on success, null on error</returns>
|
||||
private static PlaylistHeader ParsePlaylistHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
PlaylistHeader playlistHeader = new PlaylistHeader();
|
||||
|
||||
playlistHeader.TrackCount = data.ReadUInt32();
|
||||
playlistHeader.Data = data.ReadBytes(52);
|
||||
|
||||
return playlistHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an audio header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled audio header on success, null on error</returns>
|
||||
private static AudioHeader ParseAudioHeader(Stream data)
|
||||
{
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
AudioHeader audioHeader;
|
||||
|
||||
// Get the common header pieces
|
||||
uint signature = data.ReadUInt32();
|
||||
if (signature != SignatureUInt32)
|
||||
return null;
|
||||
|
||||
uint version = data.ReadUInt32();
|
||||
|
||||
// Build the header according to version
|
||||
uint unknownOffset1;
|
||||
switch (version)
|
||||
{
|
||||
// Version 1
|
||||
case 0x00000000:
|
||||
AudioHeaderV1 v1 = new AudioHeaderV1();
|
||||
|
||||
v1.Signature = signature;
|
||||
v1.Version = version;
|
||||
v1.TrackID = data.ReadUInt32();
|
||||
v1.UnknownOffset1 = data.ReadUInt32();
|
||||
v1.UnknownOffset2 = data.ReadUInt32();
|
||||
v1.UnknownOffset3 = data.ReadUInt32();
|
||||
v1.Unknown1 = data.ReadUInt32();
|
||||
v1.Unknown2 = data.ReadUInt32();
|
||||
v1.Year = data.ReadUInt32();
|
||||
v1.TrackNumber = data.ReadByteValue();
|
||||
v1.Subgenre = (Subgenre)data.ReadByteValue();
|
||||
v1.Duration = data.ReadUInt32();
|
||||
|
||||
audioHeader = v1;
|
||||
unknownOffset1 = v1.UnknownOffset1;
|
||||
break;
|
||||
|
||||
// Version 2
|
||||
case 0x0000000A:
|
||||
AudioHeaderV2 v2 = new AudioHeaderV2();
|
||||
|
||||
v2.Signature = signature;
|
||||
v2.Version = version;
|
||||
v2.Unknown1 = data.ReadUInt32();
|
||||
v2.Unknown2 = data.ReadUInt32();
|
||||
v2.Unknown3 = data.ReadUInt32();
|
||||
v2.Unknown4 = data.ReadUInt32();
|
||||
v2.Unknown5 = data.ReadUInt32();
|
||||
v2.Unknown6 = data.ReadUInt32();
|
||||
v2.UnknownOffset1 = data.ReadUInt32();
|
||||
v2.Unknown7 = data.ReadUInt32();
|
||||
v2.Unknown8 = data.ReadUInt32();
|
||||
v2.Unknown9 = data.ReadUInt32();
|
||||
v2.UnknownOffset2 = data.ReadUInt32();
|
||||
v2.Unknown10 = data.ReadUInt32();
|
||||
v2.Unknown11 = data.ReadUInt32();
|
||||
v2.Unknown12 = data.ReadUInt32();
|
||||
v2.Unknown13 = data.ReadUInt32();
|
||||
v2.Unknown14 = data.ReadUInt32();
|
||||
v2.Unknown15 = data.ReadUInt32();
|
||||
v2.Unknown16 = data.ReadUInt32();
|
||||
v2.Unknown17 = data.ReadUInt32();
|
||||
v2.TrackID = data.ReadUInt32();
|
||||
v2.Year = data.ReadUInt32();
|
||||
v2.TrackNumber = data.ReadUInt32();
|
||||
v2.Unknown18 = data.ReadUInt32();
|
||||
|
||||
audioHeader = v2;
|
||||
unknownOffset1 = v2.UnknownOffset1 + 0x54;
|
||||
break;
|
||||
|
||||
// No other version are recognized
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
audioHeader.TrackLength = data.ReadUInt16();
|
||||
byte[] track = data.ReadBytes(audioHeader.TrackLength);
|
||||
if (track != null)
|
||||
audioHeader.Track = Encoding.ASCII.GetString(track);
|
||||
|
||||
audioHeader.ArtistLength = data.ReadUInt16();
|
||||
byte[] artist = data.ReadBytes(audioHeader.ArtistLength);
|
||||
if (artist != null)
|
||||
audioHeader.Artist = Encoding.ASCII.GetString(artist);
|
||||
|
||||
audioHeader.AlbumLength = data.ReadUInt16();
|
||||
byte[] album = data.ReadBytes(audioHeader.AlbumLength);
|
||||
if (album != null)
|
||||
audioHeader.Album = Encoding.ASCII.GetString(album);
|
||||
|
||||
audioHeader.WriterLength = data.ReadUInt16();
|
||||
byte[] writer = data.ReadBytes(audioHeader.WriterLength);
|
||||
if (writer != null)
|
||||
audioHeader.Writer = Encoding.ASCII.GetString(writer);
|
||||
|
||||
audioHeader.PublisherLength = data.ReadUInt16();
|
||||
byte[] publisher = data.ReadBytes(audioHeader.PublisherLength);
|
||||
if (publisher != null)
|
||||
audioHeader.Publisher = Encoding.ASCII.GetString(publisher);
|
||||
|
||||
audioHeader.LabelLength = data.ReadUInt16();
|
||||
byte[] label = data.ReadBytes(audioHeader.LabelLength);
|
||||
if (label != null)
|
||||
audioHeader.Label = Encoding.ASCII.GetString(label);
|
||||
|
||||
if (data.Position - initialOffset < unknownOffset1)
|
||||
{
|
||||
audioHeader.CommentsLength = data.ReadUInt16();
|
||||
byte[] comments = data.ReadBytes(audioHeader.CommentsLength);
|
||||
if (comments != null)
|
||||
audioHeader.Comments = Encoding.ASCII.GetString(comments);
|
||||
}
|
||||
|
||||
return audioHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an unknown block 1
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled unknown block 1 on success, null on error</returns>
|
||||
private static UnknownBlock1 ParseUnknownBlock1(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
UnknownBlock1 unknownBlock1 = new UnknownBlock1();
|
||||
|
||||
unknownBlock1.Length = data.ReadUInt32();
|
||||
unknownBlock1.Data = data.ReadBytes((int)unknownBlock1.Length);
|
||||
|
||||
return unknownBlock1;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an unknown block 3
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled unknown block 3 on success, null on error</returns>
|
||||
private static UnknownBlock3 ParseUnknownBlock3(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
UnknownBlock3 unknownBlock3 = new UnknownBlock3();
|
||||
|
||||
// No-op because we don't even know the length
|
||||
|
||||
return unknownBlock3;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a data file
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled data file on success, null on error</returns>
|
||||
private static DataFile ParseDataFile(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DataFile dataFile = new DataFile();
|
||||
|
||||
dataFile.FileNameLength = data.ReadUInt16();
|
||||
byte[] fileName = data.ReadBytes(dataFile.FileNameLength);
|
||||
if (fileName != null)
|
||||
dataFile.FileName = Encoding.ASCII.GetString(fileName);
|
||||
|
||||
dataFile.DataLength = data.ReadUInt32();
|
||||
dataFile.Data = data.ReadBytes((int)dataFile.DataLength);
|
||||
|
||||
return dataFile;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,184 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.Quantum;
|
||||
using static SabreTools.Models.Quantum.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public class Quantum
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a Quantum archive
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled archive on success, null on error</returns>
|
||||
public static Archive ParseArchive(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseArchive(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Quantum archive
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled archive on success, null on error</returns>
|
||||
public static Archive ParseArchive(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the archive header
|
||||
archive.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region File List
|
||||
|
||||
// If we have any files
|
||||
if (header.FileCount > 0)
|
||||
{
|
||||
var fileDescriptors = new FileDescriptor[header.FileCount];
|
||||
|
||||
// Read all entries in turn
|
||||
for (int i = 0; i < header.FileCount; i++)
|
||||
{
|
||||
var file = ParseFileDescriptor(data, header.MinorVersion);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
fileDescriptors[i] = file;
|
||||
}
|
||||
|
||||
// Set the file list
|
||||
archive.FileList = fileDescriptors;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
// Cache the compressed data offset
|
||||
archive.CompressedDataOffset = data.Position;
|
||||
|
||||
return archive;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled header on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Header header = new Header();
|
||||
|
||||
byte[] signature = data.ReadBytes(2);
|
||||
header.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (header.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
header.MajorVersion = data.ReadByteValue();
|
||||
header.MinorVersion = data.ReadByteValue();
|
||||
header.FileCount = data.ReadUInt16();
|
||||
header.TableSize = data.ReadByteValue();
|
||||
header.CompressionFlags = data.ReadByteValue();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a file descriptor
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="minorVersion">Minor version of the archive</param>
|
||||
/// <returns>Filled file descriptor on success, null on error</returns>
|
||||
private static FileDescriptor ParseFileDescriptor(Stream data, byte minorVersion)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
FileDescriptor fileDescriptor = new FileDescriptor();
|
||||
|
||||
fileDescriptor.FileNameSize = ReadVariableLength(data);
|
||||
if (fileDescriptor.FileNameSize > 0)
|
||||
{
|
||||
byte[] fileName = data.ReadBytes(fileDescriptor.FileNameSize);
|
||||
fileDescriptor.FileName = Encoding.ASCII.GetString(fileName);
|
||||
}
|
||||
|
||||
fileDescriptor.CommentFieldSize = ReadVariableLength(data);
|
||||
if (fileDescriptor.CommentFieldSize > 0)
|
||||
{
|
||||
byte[] commentField = data.ReadBytes(fileDescriptor.CommentFieldSize);
|
||||
fileDescriptor.CommentField = Encoding.ASCII.GetString(commentField);
|
||||
}
|
||||
|
||||
fileDescriptor.ExpandedFileSize = data.ReadUInt32();
|
||||
fileDescriptor.FileTime = data.ReadUInt16();
|
||||
fileDescriptor.FileDate = data.ReadUInt16();
|
||||
|
||||
// Hack for unknown format data
|
||||
if (minorVersion == 22)
|
||||
fileDescriptor.Unknown = data.ReadUInt16();
|
||||
|
||||
return fileDescriptor;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a variable-length size prefix
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Variable-length size prefix</returns>
|
||||
/// <remarks>
|
||||
/// Strings are prefixed with their length. If the length is less than 128
|
||||
/// then it is stored directly in one byte. If it is greater than 127 then
|
||||
/// the high bit of the first byte is set to 1 and the remaining fifteen bits
|
||||
/// contain the actual length in big-endian format.
|
||||
/// </remarks>
|
||||
private static int ReadVariableLength(Stream data)
|
||||
{
|
||||
byte b0 = data.ReadByteValue();
|
||||
if (b0 < 0x7F)
|
||||
return b0;
|
||||
|
||||
b0 &= 0x7F;
|
||||
byte b1 = data.ReadByteValue();
|
||||
return (b0 << 8) | b1;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,732 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.SGA;
|
||||
using static SabreTools.Models.SGA.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class SGA
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into an SGA
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled SGA on success, null on error</returns>
|
||||
public static SabreTools.Models.SGA.File ParseFile(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseFile(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SGA on success, null on error</returns>
|
||||
public static SabreTools.Models.SGA.File ParseFile(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new SGA to fill
|
||||
var file = new SabreTools.Models.SGA.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the SGA header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory
|
||||
|
||||
// Try to parse the directory
|
||||
var directory = ParseDirectory(data, header.MajorVersion);
|
||||
if (directory == null)
|
||||
return null;
|
||||
|
||||
// Set the SGA directory
|
||||
file.Directory = directory;
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SGA header on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
byte[] signatureBytes = data.ReadBytes(8);
|
||||
string signature = Encoding.ASCII.GetString(signatureBytes);
|
||||
if (signature != SignatureString)
|
||||
return null;
|
||||
|
||||
ushort majorVersion = data.ReadUInt16();
|
||||
ushort minorVersion = data.ReadUInt16();
|
||||
if (minorVersion != 0)
|
||||
return null;
|
||||
|
||||
switch (majorVersion)
|
||||
{
|
||||
// Versions 4 and 5 share the same header
|
||||
case 4:
|
||||
case 5:
|
||||
Header4 header4 = new Header4();
|
||||
|
||||
header4.Signature = signature;
|
||||
header4.MajorVersion = majorVersion;
|
||||
header4.MinorVersion = minorVersion;
|
||||
header4.FileMD5 = data.ReadBytes(0x10);
|
||||
byte[] header4Name = data.ReadBytes(count: 128);
|
||||
header4.Name = Encoding.Unicode.GetString(header4Name).TrimEnd('\0');
|
||||
header4.HeaderMD5 = data.ReadBytes(0x10);
|
||||
header4.HeaderLength = data.ReadUInt32();
|
||||
header4.FileDataOffset = data.ReadUInt32();
|
||||
header4.Dummy0 = data.ReadUInt32();
|
||||
|
||||
return header4;
|
||||
|
||||
// Versions 6 and 7 share the same header
|
||||
case 6:
|
||||
case 7:
|
||||
Header6 header6 = new Header6();
|
||||
|
||||
header6.Signature = signature;
|
||||
header6.MajorVersion = majorVersion;
|
||||
header6.MinorVersion = minorVersion;
|
||||
byte[] header6Name = data.ReadBytes(count: 128);
|
||||
header6.Name = Encoding.Unicode.GetString(header6Name).TrimEnd('\0');
|
||||
header6.HeaderLength = data.ReadUInt32();
|
||||
header6.FileDataOffset = data.ReadUInt32();
|
||||
header6.Dummy0 = data.ReadUInt32();
|
||||
|
||||
return header6;
|
||||
|
||||
// No other major versions are recognized
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">SGA major version</param>
|
||||
/// <returns>Filled SGA directory on success, null on error</returns>
|
||||
private static SabreTools.Models.SGA.Directory ParseDirectory(Stream data, ushort majorVersion)
|
||||
{
|
||||
#region Directory
|
||||
|
||||
// Create the appropriate type of directory
|
||||
SabreTools.Models.SGA.Directory directory;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: directory = new Directory4(); break;
|
||||
case 5: directory = new Directory5(); break;
|
||||
case 6: directory = new Directory6(); break;
|
||||
case 7: directory = new Directory7(); break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
|
||||
#region Directory Header
|
||||
|
||||
// Try to parse the directory header
|
||||
var directoryHeader = ParseDirectoryHeader(data, majorVersion);
|
||||
if (directoryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the directory header
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4).DirectoryHeader = directoryHeader as DirectoryHeader4; break;
|
||||
case 5: (directory as Directory5).DirectoryHeader = directoryHeader as DirectoryHeader5; break;
|
||||
case 6: (directory as Directory6).DirectoryHeader = directoryHeader as DirectoryHeader5; break;
|
||||
case 7: (directory as Directory7).DirectoryHeader = directoryHeader as DirectoryHeader7; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sections
|
||||
|
||||
// Get the sections offset
|
||||
long sectionOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sectionOffset = (directoryHeader as DirectoryHeader4).SectionOffset; break;
|
||||
case 5:
|
||||
case 6: sectionOffset = (directoryHeader as DirectoryHeader5).SectionOffset; break;
|
||||
case 7: sectionOffset = (directoryHeader as DirectoryHeader7).SectionOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the sections offset based on the directory
|
||||
sectionOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (sectionOffset < 0 || sectionOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the sections
|
||||
data.Seek(sectionOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the section count
|
||||
uint sectionCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sectionCount = (directoryHeader as DirectoryHeader4).SectionCount; break;
|
||||
case 5:
|
||||
case 6: sectionCount = (directoryHeader as DirectoryHeader5).SectionCount; break;
|
||||
case 7: sectionCount = (directoryHeader as DirectoryHeader7).SectionCount; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Create the sections array
|
||||
object[] sections;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sections = new Section4[sectionCount]; break;
|
||||
case 5:
|
||||
case 6:
|
||||
case 7: sections = new Section5[sectionCount]; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Try to parse the sections
|
||||
for (int i = 0; i < sections.Length; i++)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sections[i] = ParseSection4(data); break;
|
||||
case 5:
|
||||
case 6:
|
||||
case 7: sections[i] = ParseSection5(data); break;
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Assign the sections
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4).Sections = sections as Section4[]; break;
|
||||
case 5: (directory as Directory5).Sections = sections as Section5[]; break;
|
||||
case 6: (directory as Directory6).Sections = sections as Section5[]; break;
|
||||
case 7: (directory as Directory7).Sections = sections as Section5[]; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Folders
|
||||
|
||||
// Get the folders offset
|
||||
long folderOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folderOffset = (directoryHeader as DirectoryHeader4).FolderOffset; break;
|
||||
case 5: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break;
|
||||
case 6: folderOffset = (directoryHeader as DirectoryHeader5).FolderOffset; break;
|
||||
case 7: folderOffset = (directoryHeader as DirectoryHeader7).FolderOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the folders offset based on the directory
|
||||
folderOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (folderOffset < 0 || folderOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the folders
|
||||
data.Seek(folderOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the folder count
|
||||
uint folderCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folderCount = (directoryHeader as DirectoryHeader4).FolderCount; break;
|
||||
case 5: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break;
|
||||
case 6: folderCount = (directoryHeader as DirectoryHeader5).FolderCount; break;
|
||||
case 7: folderCount = (directoryHeader as DirectoryHeader7).FolderCount; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Create the folders array
|
||||
object[] folders;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folders = new Folder4[folderCount]; break;
|
||||
case 5: folders = new Folder5[folderCount]; break;
|
||||
case 6: folders = new Folder5[folderCount]; break;
|
||||
case 7: folders = new Folder5[folderCount]; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Try to parse the folders
|
||||
for (int i = 0; i < folders.Length; i++)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folders[i] = ParseFolder4(data); break;
|
||||
case 5: folders[i] = ParseFolder5(data); break;
|
||||
case 6: folders[i] = ParseFolder5(data); break;
|
||||
case 7: folders[i] = ParseFolder5(data); break;
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Assign the folders
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4).Folders = folders as Folder4[]; break;
|
||||
case 5: (directory as Directory5).Folders = folders as Folder5[]; break;
|
||||
case 6: (directory as Directory6).Folders = folders as Folder5[]; break;
|
||||
case 7: (directory as Directory7).Folders = folders as Folder5[]; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Files
|
||||
|
||||
// Get the files offset
|
||||
long fileOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: fileOffset = (directoryHeader as DirectoryHeader4).FileOffset; break;
|
||||
case 5: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break;
|
||||
case 6: fileOffset = (directoryHeader as DirectoryHeader5).FileOffset; break;
|
||||
case 7: fileOffset = (directoryHeader as DirectoryHeader7).FileOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the files offset based on the directory
|
||||
fileOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (fileOffset < 0 || fileOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the files
|
||||
data.Seek(fileOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the file count
|
||||
uint fileCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: fileCount = (directoryHeader as DirectoryHeader4).FileCount; break;
|
||||
case 5: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break;
|
||||
case 6: fileCount = (directoryHeader as DirectoryHeader5).FileCount; break;
|
||||
case 7: fileCount = (directoryHeader as DirectoryHeader7).FileCount; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Create the files array
|
||||
object[] files;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: files = new File4[fileCount]; break;
|
||||
case 5: files = new File4[fileCount]; break;
|
||||
case 6: files = new File6[fileCount]; break;
|
||||
case 7: files = new File7[fileCount]; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Try to parse the files
|
||||
for (int i = 0; i < files.Length; i++)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: files[i] = ParseFile4(data); break;
|
||||
case 5: files[i] = ParseFile4(data); break;
|
||||
case 6: files[i] = ParseFile6(data); break;
|
||||
case 7: files[i] = ParseFile7(data); break;
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Assign the files
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4).Files = files as File4[]; break;
|
||||
case 5: (directory as Directory5).Files = files as File4[]; break;
|
||||
case 6: (directory as Directory6).Files = files as File6[]; break;
|
||||
case 7: (directory as Directory7).Files = files as File7[]; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region String Table
|
||||
|
||||
// Get the string table offset
|
||||
long stringTableOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: stringTableOffset = (directoryHeader as DirectoryHeader4).StringTableOffset; break;
|
||||
case 5: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break;
|
||||
case 6: stringTableOffset = (directoryHeader as DirectoryHeader5).StringTableOffset; break;
|
||||
case 7: stringTableOffset = (directoryHeader as DirectoryHeader7).StringTableOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the string table offset based on the directory
|
||||
stringTableOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (stringTableOffset < 0 || stringTableOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the string table
|
||||
data.Seek(stringTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the string table count
|
||||
uint stringCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: stringCount = (directoryHeader as DirectoryHeader4).StringTableCount; break;
|
||||
case 5: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break;
|
||||
case 6: stringCount = (directoryHeader as DirectoryHeader5).StringTableCount; break;
|
||||
case 7: stringCount = (directoryHeader as DirectoryHeader7).StringTableCount; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// TODO: Are these strings actually indexed by number and not position?
|
||||
// TODO: If indexed by position, I think it needs to be adjusted by start of table
|
||||
|
||||
// Create the strings dictionary
|
||||
Dictionary<long, string> strings = new Dictionary<long, string>((int)stringCount);
|
||||
|
||||
// Get the current position to adjust the offsets
|
||||
long stringTableStart = data.Position;
|
||||
|
||||
// Try to parse the strings
|
||||
for (int i = 0; i < stringCount; i++)
|
||||
{
|
||||
long currentPosition = data.Position - stringTableStart;
|
||||
strings[currentPosition] = data.ReadString(Encoding.ASCII);
|
||||
}
|
||||
|
||||
// Assign the files
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4).StringTable = strings; break;
|
||||
case 5: (directory as Directory5).StringTable = strings; break;
|
||||
case 6: (directory as Directory6).StringTable = strings; break;
|
||||
case 7: (directory as Directory7).StringTable = strings; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Loop through all folders to assign names
|
||||
for (int i = 0; i < folderCount; i++)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4).Folders[i].Name = strings[(directory as Directory4).Folders[i].NameOffset]; break;
|
||||
case 5: (directory as Directory5).Folders[i].Name = strings[(directory as Directory5).Folders[i].NameOffset]; break;
|
||||
case 6: (directory as Directory6).Folders[i].Name = strings[(directory as Directory6).Folders[i].NameOffset]; break;
|
||||
case 7: (directory as Directory7).Folders[i].Name = strings[(directory as Directory7).Folders[i].NameOffset]; break;
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Loop through all files to assign names
|
||||
for (int i = 0; i < fileCount; i++)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4).Files[i].Name = strings[(directory as Directory4).Files[i].NameOffset]; break;
|
||||
case 5: (directory as Directory5).Files[i].Name = strings[(directory as Directory5).Files[i].NameOffset]; break;
|
||||
case 6: (directory as Directory6).Files[i].Name = strings[(directory as Directory6).Files[i].NameOffset]; break;
|
||||
case 7: (directory as Directory7).Files[i].Name = strings[(directory as Directory7).Files[i].NameOffset]; break;
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return directory;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">SGA major version</param>
|
||||
/// <returns>Filled SGA directory header on success, null on error</returns>
|
||||
private static object ParseDirectoryHeader(Stream data, ushort majorVersion)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: return ParseDirectory4Header(data);
|
||||
case 5: return ParseDirectory5Header(data);
|
||||
case 6: return ParseDirectory5Header(data);
|
||||
case 7: return ParseDirectory7Header(data);
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory header version 4
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SGA directory header version 4 on success, null on error</returns>
|
||||
private static DirectoryHeader4 ParseDirectory4Header(Stream data)
|
||||
{
|
||||
DirectoryHeader4 directoryHeader4 = new DirectoryHeader4();
|
||||
|
||||
directoryHeader4.SectionOffset = data.ReadUInt32();
|
||||
directoryHeader4.SectionCount = data.ReadUInt16();
|
||||
directoryHeader4.FolderOffset = data.ReadUInt32();
|
||||
directoryHeader4.FolderCount = data.ReadUInt16();
|
||||
directoryHeader4.FileOffset = data.ReadUInt32();
|
||||
directoryHeader4.FileCount = data.ReadUInt16();
|
||||
directoryHeader4.StringTableOffset = data.ReadUInt32();
|
||||
directoryHeader4.StringTableCount = data.ReadUInt16();
|
||||
|
||||
return directoryHeader4;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory header version 5
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SGA directory header version 5 on success, null on error</returns>
|
||||
private static DirectoryHeader5 ParseDirectory5Header(Stream data)
|
||||
{
|
||||
DirectoryHeader5 directoryHeader5 = new DirectoryHeader5();
|
||||
|
||||
directoryHeader5.SectionOffset = data.ReadUInt32();
|
||||
directoryHeader5.SectionCount = data.ReadUInt32();
|
||||
directoryHeader5.FolderOffset = data.ReadUInt32();
|
||||
directoryHeader5.FolderCount = data.ReadUInt32();
|
||||
directoryHeader5.FileOffset = data.ReadUInt32();
|
||||
directoryHeader5.FileCount = data.ReadUInt32();
|
||||
directoryHeader5.StringTableOffset = data.ReadUInt32();
|
||||
directoryHeader5.StringTableCount = data.ReadUInt32();
|
||||
|
||||
return directoryHeader5;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory header version 7
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SGA directory header version 7 on success, null on error</returns>
|
||||
private static DirectoryHeader7 ParseDirectory7Header(Stream data)
|
||||
{
|
||||
DirectoryHeader7 directoryHeader7 = new DirectoryHeader7();
|
||||
|
||||
directoryHeader7.SectionOffset = data.ReadUInt32();
|
||||
directoryHeader7.SectionCount = data.ReadUInt32();
|
||||
directoryHeader7.FolderOffset = data.ReadUInt32();
|
||||
directoryHeader7.FolderCount = data.ReadUInt32();
|
||||
directoryHeader7.FileOffset = data.ReadUInt32();
|
||||
directoryHeader7.FileCount = data.ReadUInt32();
|
||||
directoryHeader7.StringTableOffset = data.ReadUInt32();
|
||||
directoryHeader7.StringTableCount = data.ReadUInt32();
|
||||
directoryHeader7.HashTableOffset = data.ReadUInt32();
|
||||
directoryHeader7.BlockSize = data.ReadUInt32();
|
||||
|
||||
return directoryHeader7;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA section version 4
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">SGA major version</param>
|
||||
/// <returns>Filled SGA section version 4 on success, null on error</returns>
|
||||
private static Section4 ParseSection4(Stream data)
|
||||
{
|
||||
Section4 section4 = new Section4();
|
||||
|
||||
byte[] section4Alias = data.ReadBytes(count: 64);
|
||||
section4.Alias = Encoding.ASCII.GetString(section4Alias).TrimEnd('\0');
|
||||
byte[] section4Name = data.ReadBytes(64);
|
||||
section4.Name = Encoding.ASCII.GetString(section4Name).TrimEnd('\0');
|
||||
section4.FolderStartIndex = data.ReadUInt16();
|
||||
section4.FolderEndIndex = data.ReadUInt16();
|
||||
section4.FileStartIndex = data.ReadUInt16();
|
||||
section4.FileEndIndex = data.ReadUInt16();
|
||||
section4.FolderRootIndex = data.ReadUInt16();
|
||||
|
||||
return section4;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA section version 5
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">SGA major version</param>
|
||||
/// <returns>Filled SGA section version 5 on success, null on error</returns>
|
||||
private static Section5 ParseSection5(Stream data)
|
||||
{
|
||||
Section5 section5 = new Section5();
|
||||
|
||||
byte[] section5Alias = data.ReadBytes(count: 64);
|
||||
section5.Alias = Encoding.ASCII.GetString(section5Alias).TrimEnd('\0');
|
||||
byte[] section5Name = data.ReadBytes(64);
|
||||
section5.Name = Encoding.ASCII.GetString(section5Name).TrimEnd('\0');
|
||||
section5.FolderStartIndex = data.ReadUInt32();
|
||||
section5.FolderEndIndex = data.ReadUInt32();
|
||||
section5.FileStartIndex = data.ReadUInt32();
|
||||
section5.FileEndIndex = data.ReadUInt32();
|
||||
section5.FolderRootIndex = data.ReadUInt32();
|
||||
|
||||
return section5;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA folder version 4
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">SGA major version</param>
|
||||
/// <returns>Filled SGA folder version 4 on success, null on error</returns>
|
||||
private static Folder4 ParseFolder4(Stream data)
|
||||
{
|
||||
Folder4 folder4 = new Folder4();
|
||||
|
||||
folder4.NameOffset = data.ReadUInt32();
|
||||
folder4.Name = null; // Read from string table
|
||||
folder4.FolderStartIndex = data.ReadUInt16();
|
||||
folder4.FolderEndIndex = data.ReadUInt16();
|
||||
folder4.FileStartIndex = data.ReadUInt16();
|
||||
folder4.FileEndIndex = data.ReadUInt16();
|
||||
|
||||
return folder4;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA folder version 5
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">SGA major version</param>
|
||||
/// <returns>Filled SGA folder version 5 on success, null on error</returns>
|
||||
private static Folder5 ParseFolder5(Stream data)
|
||||
{
|
||||
Folder5 folder5 = new Folder5();
|
||||
|
||||
folder5.NameOffset = data.ReadUInt32();
|
||||
folder5.Name = null; // Read from string table
|
||||
folder5.FolderStartIndex = data.ReadUInt32();
|
||||
folder5.FolderEndIndex = data.ReadUInt32();
|
||||
folder5.FileStartIndex = data.ReadUInt32();
|
||||
folder5.FileEndIndex = data.ReadUInt32();
|
||||
|
||||
return folder5;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA file version 4
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">SGA major version</param>
|
||||
/// <returns>Filled SGA file version 4 on success, null on error</returns>
|
||||
private static File4 ParseFile4(Stream data)
|
||||
{
|
||||
File4 file4 = new File4();
|
||||
|
||||
file4.NameOffset = data.ReadUInt32();
|
||||
file4.Name = null; // Read from string table
|
||||
file4.Offset = data.ReadUInt32();
|
||||
file4.SizeOnDisk = data.ReadUInt32();
|
||||
file4.Size = data.ReadUInt32();
|
||||
file4.TimeModified = data.ReadUInt32();
|
||||
file4.Dummy0 = data.ReadByteValue();
|
||||
file4.Type = data.ReadByteValue();
|
||||
|
||||
return file4;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA file version 6
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">SGA major version</param>
|
||||
/// <returns>Filled SGA file version 6 on success, null on error</returns>
|
||||
private static File6 ParseFile6(Stream data)
|
||||
{
|
||||
File6 file6 = new File6();
|
||||
|
||||
file6.NameOffset = data.ReadUInt32();
|
||||
file6.Name = null; // Read from string table
|
||||
file6.Offset = data.ReadUInt32();
|
||||
file6.SizeOnDisk = data.ReadUInt32();
|
||||
file6.Size = data.ReadUInt32();
|
||||
file6.TimeModified = data.ReadUInt32();
|
||||
file6.Dummy0 = data.ReadByteValue();
|
||||
file6.Type = data.ReadByteValue();
|
||||
file6.CRC32 = data.ReadUInt32();
|
||||
|
||||
return file6;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA file version 7
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">SGA major version</param>
|
||||
/// <returns>Filled SGA file version 7 on success, null on error</returns>
|
||||
private static File7 ParseFile7(Stream data)
|
||||
{
|
||||
File7 file7 = new File7();
|
||||
|
||||
file7.NameOffset = data.ReadUInt32();
|
||||
file7.Name = null; // Read from string table
|
||||
file7.Offset = data.ReadUInt32();
|
||||
file7.SizeOnDisk = data.ReadUInt32();
|
||||
file7.Size = data.ReadUInt32();
|
||||
file7.TimeModified = data.ReadUInt32();
|
||||
file7.Dummy0 = data.ReadByteValue();
|
||||
file7.Type = data.ReadByteValue();
|
||||
file7.CRC32 = data.ReadUInt32();
|
||||
file7.HashOffset = data.ReadUInt32();
|
||||
|
||||
return file7;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,141 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.VBSP;
|
||||
using static SabreTools.Models.VBSP.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class VBSP
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a Half-Life 2 Level
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled Half-Life 2 Level on success, null on error</returns>
|
||||
public static SabreTools.Models.VBSP.File ParseFile(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseFile(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life 2 Level
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life 2 Level on success, null on error</returns>
|
||||
public static SabreTools.Models.VBSP.File ParseFile(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new Half-Life 2 Level to fill
|
||||
var file = new SabreTools.Models.VBSP.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the package header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life 2 Level header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life 2 Level header on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Header header = new Header();
|
||||
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
header.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (header.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
header.Version = data.ReadInt32();
|
||||
if ((header.Version < 19 || header.Version > 22) && header.Version != 0x00040014)
|
||||
return null;
|
||||
|
||||
header.Lumps = new Lump[HL_VBSP_LUMP_COUNT];
|
||||
for (int i = 0; i < HL_VBSP_LUMP_COUNT; i++)
|
||||
{
|
||||
header.Lumps[i] = ParseLump(data, header.Version);
|
||||
}
|
||||
|
||||
header.MapRevision = data.ReadInt32();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life 2 Level lump
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="version">VBSP version</param>
|
||||
/// <returns>Filled Half-Life 2 Level lump on success, null on error</returns>
|
||||
private static Lump ParseLump(Stream data, int version)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Lump lump = new Lump();
|
||||
|
||||
lump.Offset = data.ReadUInt32();
|
||||
lump.Length = data.ReadUInt32();
|
||||
lump.Version = data.ReadUInt32();
|
||||
lump.FourCC = new char[4];
|
||||
for (int i = 0; i < 4; i++)
|
||||
{
|
||||
lump.FourCC[i] = (char)data.ReadByte();
|
||||
}
|
||||
|
||||
// This block was commented out because test VBSPs with header
|
||||
// version 21 had the values in the "right" order already and
|
||||
// were causing decompression issues
|
||||
|
||||
//if (version >= 21 && version != 0x00040014)
|
||||
//{
|
||||
// uint temp = lump.Version;
|
||||
// lump.Version = lump.Offset;
|
||||
// lump.Offset = lump.Length;
|
||||
// lump.Length = temp;
|
||||
//}
|
||||
|
||||
return lump;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,318 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.VPK;
|
||||
using static SabreTools.Models.VPK.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class VPK
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a Valve Package
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled Valve Package on success, null on error</returns>
|
||||
public static SabreTools.Models.VPK.File ParseFile(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseFile(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Valve Package
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Valve Package on success, null on error</returns>
|
||||
public static SabreTools.Models.VPK.File ParseFile(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new Valve Package to fill
|
||||
var file = new SabreTools.Models.VPK.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
// The original version had no signature.
|
||||
var header = ParseHeader(data);
|
||||
|
||||
// Set the package header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Extended Header
|
||||
|
||||
if (header?.Version == 2)
|
||||
{
|
||||
// Try to parse the extended header
|
||||
var extendedHeader = ParseExtendedHeader(data);
|
||||
if (extendedHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the package extended header
|
||||
file.ExtendedHeader = extendedHeader;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Items
|
||||
|
||||
// Create the directory items tree
|
||||
var directoryItems = ParseDirectoryItemTree(data);
|
||||
|
||||
// Set the directory items
|
||||
file.DirectoryItems = directoryItems;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Archive Hashes
|
||||
|
||||
if (header?.Version == 2 && file.ExtendedHeader != null && file.ExtendedHeader.ArchiveHashLength > 0)
|
||||
{
|
||||
// Create the archive hashes list
|
||||
var archiveHashes = new List<ArchiveHash>();
|
||||
|
||||
// Cache the current offset
|
||||
initialOffset = data.Position;
|
||||
|
||||
// Try to parse the directory items
|
||||
while (data.Position < initialOffset + file.ExtendedHeader.ArchiveHashLength)
|
||||
{
|
||||
var archiveHash = ParseArchiveHash(data);
|
||||
archiveHashes.Add(archiveHash);
|
||||
}
|
||||
|
||||
file.ArchiveHashes = archiveHashes.ToArray();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Valve Package header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Valve Package header on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Header header = new Header();
|
||||
|
||||
header.Signature = data.ReadUInt32();
|
||||
if (header.Signature != SignatureUInt32)
|
||||
return null;
|
||||
|
||||
header.Version = data.ReadUInt32();
|
||||
if (header.Version > 2)
|
||||
return null;
|
||||
|
||||
header.DirectoryLength = data.ReadUInt32();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Valve Package extended header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Valve Package extended header on success, null on error</returns>
|
||||
private static ExtendedHeader ParseExtendedHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
ExtendedHeader extendedHeader = new ExtendedHeader();
|
||||
|
||||
extendedHeader.Dummy0 = data.ReadUInt32();
|
||||
extendedHeader.ArchiveHashLength = data.ReadUInt32();
|
||||
extendedHeader.ExtraLength = data.ReadUInt32();
|
||||
extendedHeader.Dummy1 = data.ReadUInt32();
|
||||
|
||||
return extendedHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Valve Package archive hash
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Valve Package archive hash on success, null on error</returns>
|
||||
private static ArchiveHash ParseArchiveHash(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
ArchiveHash archiveHash = new ArchiveHash();
|
||||
|
||||
archiveHash.ArchiveIndex = data.ReadUInt32();
|
||||
archiveHash.ArchiveOffset = data.ReadUInt32();
|
||||
archiveHash.Length = data.ReadUInt32();
|
||||
archiveHash.Hash = data.ReadBytes(0x10);
|
||||
|
||||
return archiveHash;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Valve Package directory item tree
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Valve Package directory item tree on success, null on error</returns>
|
||||
private static DirectoryItem[] ParseDirectoryItemTree(Stream data)
|
||||
{
|
||||
// Create the directory items list
|
||||
var directoryItems = new List<DirectoryItem>();
|
||||
|
||||
while (true)
|
||||
{
|
||||
// Get the extension
|
||||
string extensionString = data.ReadString(Encoding.ASCII);
|
||||
if (string.IsNullOrEmpty(extensionString))
|
||||
break;
|
||||
|
||||
// Sanitize the extension
|
||||
for (int i = 0; i < 0x20; i++)
|
||||
{
|
||||
extensionString = extensionString.Replace($"{(char)i}", string.Empty);
|
||||
}
|
||||
|
||||
while (true)
|
||||
{
|
||||
// Get the path
|
||||
string pathString = data.ReadString(Encoding.ASCII);
|
||||
if (string.IsNullOrEmpty(pathString))
|
||||
break;
|
||||
|
||||
// Sanitize the path
|
||||
for (int i = 0; i < 0x20; i++)
|
||||
{
|
||||
pathString = pathString.Replace($"{(char)i}", string.Empty);
|
||||
}
|
||||
|
||||
while (true)
|
||||
{
|
||||
// Get the name
|
||||
string nameString = data.ReadString(Encoding.ASCII);
|
||||
if (string.IsNullOrEmpty(nameString))
|
||||
break;
|
||||
|
||||
// Sanitize the name
|
||||
for (int i = 0; i < 0x20; i++)
|
||||
{
|
||||
nameString = nameString.Replace($"{(char)i}", string.Empty);
|
||||
}
|
||||
|
||||
// Get the directory item
|
||||
var directoryItem = ParseDirectoryItem(data, extensionString, pathString, nameString);
|
||||
|
||||
// Add the directory item
|
||||
directoryItems.Add(directoryItem);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return directoryItems.ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Valve Package directory item
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Valve Package directory item on success, null on error</returns>
|
||||
private static DirectoryItem ParseDirectoryItem(Stream data, string extension, string path, string name)
|
||||
{
|
||||
DirectoryItem directoryItem = new DirectoryItem();
|
||||
|
||||
directoryItem.Extension = extension;
|
||||
directoryItem.Path = path;
|
||||
directoryItem.Name = name;
|
||||
|
||||
// Get the directory entry
|
||||
var directoryEntry = ParseDirectoryEntry(data);
|
||||
|
||||
// Set the directory entry
|
||||
directoryItem.DirectoryEntry = directoryEntry;
|
||||
|
||||
// Get the preload data pointer
|
||||
long preloadDataPointer = -1; int preloadDataLength = -1;
|
||||
if (directoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE && directoryEntry.EntryLength > 0)
|
||||
{
|
||||
preloadDataPointer = directoryEntry.EntryOffset;
|
||||
preloadDataLength = (int)directoryEntry.EntryLength;
|
||||
}
|
||||
else if (directoryEntry.PreloadBytes > 0)
|
||||
{
|
||||
preloadDataPointer = data.Position;
|
||||
preloadDataLength = directoryEntry.PreloadBytes;
|
||||
}
|
||||
|
||||
// If we had a valid preload data pointer
|
||||
byte[] preloadData = null;
|
||||
if (preloadDataPointer >= 0 && preloadDataLength > 0)
|
||||
{
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Seek to the preload data offset
|
||||
data.Seek(preloadDataPointer, SeekOrigin.Begin);
|
||||
|
||||
// Read the preload data
|
||||
preloadData = data.ReadBytes(preloadDataLength);
|
||||
|
||||
// Seek back to the original offset
|
||||
data.Seek(initialOffset, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
// Set the preload data
|
||||
directoryItem.PreloadData = preloadData;
|
||||
|
||||
return directoryItem;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Valve Package directory entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Valve Package directory entry on success, null on error</returns>
|
||||
private static DirectoryEntry ParseDirectoryEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryEntry directoryEntry = new DirectoryEntry();
|
||||
|
||||
directoryEntry.CRC = data.ReadUInt32();
|
||||
directoryEntry.PreloadBytes = data.ReadUInt16();
|
||||
directoryEntry.ArchiveIndex = data.ReadUInt16();
|
||||
directoryEntry.EntryOffset = data.ReadUInt32();
|
||||
directoryEntry.EntryLength = data.ReadUInt32();
|
||||
directoryEntry.Dummy0 = data.ReadUInt16();
|
||||
|
||||
return directoryEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,266 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.WAD;
|
||||
using static SabreTools.Models.WAD.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class WAD
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a Half-Life Texture Package
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled Half-Life Texture Package on success, null on error</returns>
|
||||
public static SabreTools.Models.WAD.File ParseFile(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseFile(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package on success, null on error</returns>
|
||||
public static SabreTools.Models.WAD.File ParseFile(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new Half-Life Texture Package to fill
|
||||
var file = new SabreTools.Models.WAD.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the package header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Lumps
|
||||
|
||||
// Get the lump offset
|
||||
uint lumpOffset = header.LumpOffset;
|
||||
if (lumpOffset < 0 || lumpOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the lump offset
|
||||
data.Seek(lumpOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the lump array
|
||||
file.Lumps = new Lump[header.LumpCount];
|
||||
for (int i = 0; i < header.LumpCount; i++)
|
||||
{
|
||||
var lump = ParseLump(data);
|
||||
file.Lumps[i] = lump;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Lump Infos
|
||||
|
||||
// Create the lump info array
|
||||
file.LumpInfos = new LumpInfo[header.LumpCount];
|
||||
for (int i = 0; i < header.LumpCount; i++)
|
||||
{
|
||||
var lump = file.Lumps[i];
|
||||
if (lump.Compression != 0)
|
||||
{
|
||||
file.LumpInfos[i] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the lump info offset
|
||||
uint lumpInfoOffset = lump.Offset;
|
||||
if (lumpInfoOffset < 0 || lumpInfoOffset >= data.Length)
|
||||
{
|
||||
file.LumpInfos[i] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Seek to the lump info offset
|
||||
data.Seek(lumpInfoOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the lump info -- TODO: Do we ever set the mipmap level?
|
||||
var lumpInfo = ParseLumpInfo(data, lump.Type);
|
||||
file.LumpInfos[i] = lumpInfo;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package header on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Header header = new Header();
|
||||
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
header.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (header.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
header.LumpCount = data.ReadUInt32();
|
||||
header.LumpOffset = data.ReadUInt32();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package lump
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package lump on success, null on error</returns>
|
||||
private static Lump ParseLump(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Lump lump = new Lump();
|
||||
|
||||
lump.Offset = data.ReadUInt32();
|
||||
lump.DiskLength = data.ReadUInt32();
|
||||
lump.Length = data.ReadUInt32();
|
||||
lump.Type = data.ReadByteValue();
|
||||
lump.Compression = data.ReadByteValue();
|
||||
lump.Padding0 = data.ReadByteValue();
|
||||
lump.Padding1 = data.ReadByteValue();
|
||||
byte[] name = data.ReadBytes(16);
|
||||
lump.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
|
||||
|
||||
return lump;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package lump info
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="type">Lump type</param>
|
||||
/// <param name="mipmap">Mipmap level</param>
|
||||
/// <returns>Filled Half-Life Texture Package lump info on success, null on error</returns>
|
||||
private static LumpInfo ParseLumpInfo(Stream data, byte type, uint mipmap = 0)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
LumpInfo lumpInfo = new LumpInfo();
|
||||
|
||||
// Cache the initial offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Type 0x42 has no name, type 0x43 does. Are these flags?
|
||||
if (type == 0x42)
|
||||
{
|
||||
if (mipmap > 0)
|
||||
return null;
|
||||
|
||||
lumpInfo.Width = data.ReadUInt32();
|
||||
lumpInfo.Height = data.ReadUInt32();
|
||||
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
|
||||
lumpInfo.PaletteSize = data.ReadUInt16();
|
||||
}
|
||||
else if (type == 0x43)
|
||||
{
|
||||
if (mipmap > 3)
|
||||
return null;
|
||||
|
||||
byte[] name = data.ReadBytes(16);
|
||||
lumpInfo.Name = Encoding.ASCII.GetString(name);
|
||||
lumpInfo.Width = data.ReadUInt32();
|
||||
lumpInfo.Height = data.ReadUInt32();
|
||||
lumpInfo.PixelOffset = data.ReadUInt32();
|
||||
_ = data.ReadBytes(12); // Unknown data
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
|
||||
// Seek to the pixel data
|
||||
data.Seek(initialOffset + lumpInfo.PixelOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the pixel data
|
||||
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
|
||||
|
||||
// Seek back to the offset
|
||||
data.Seek(currentOffset, SeekOrigin.Begin);
|
||||
|
||||
uint pixelSize = lumpInfo.Width * lumpInfo.Height;
|
||||
|
||||
// Mipmap data -- TODO: How do we determine this during initial parsing?
|
||||
switch (mipmap)
|
||||
{
|
||||
case 1: _ = data.ReadBytes((int)pixelSize); break;
|
||||
case 2: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4))); break;
|
||||
case 3: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16))); break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
_ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16) + (pixelSize / 64))); // Pixel data
|
||||
lumpInfo.PaletteSize = data.ReadUInt16();
|
||||
lumpInfo.PaletteData = data.ReadBytes((int)lumpInfo.PaletteSize * 3);
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Adjust based on mipmap level
|
||||
switch (mipmap)
|
||||
{
|
||||
case 1:
|
||||
lumpInfo.Width /= 2;
|
||||
lumpInfo.Height /= 2;
|
||||
break;
|
||||
|
||||
case 2:
|
||||
lumpInfo.Width /= 4;
|
||||
lumpInfo.Height /= 4;
|
||||
break;
|
||||
|
||||
case 3:
|
||||
lumpInfo.Width /= 8;
|
||||
lumpInfo.Height /= 8;
|
||||
break;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
return lumpInfo;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -1,274 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO;
|
||||
using SabreTools.Models.XZP;
|
||||
using static SabreTools.Models.XZP.Constants;
|
||||
|
||||
namespace BinaryObjectScanner.Builders
|
||||
{
|
||||
public static class XZP
|
||||
{
|
||||
#region Byte Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a byte array into a XBox Package File
|
||||
/// </summary>
|
||||
/// <param name="data">Byte array to parse</param>
|
||||
/// <param name="offset">Offset into the byte array</param>
|
||||
/// <returns>Filled XBox Package File on success, null on error</returns>
|
||||
public static SabreTools.Models.XZP.File ParseFile(byte[] data, int offset)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a memory stream and parse that
|
||||
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
|
||||
return ParseFile(dataStream);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stream Data
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a XBox Package File
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled XBox Package File on success, null on error</returns>
|
||||
public static SabreTools.Models.XZP.File ParseFile(Stream data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new XBox Package File to fill
|
||||
var file = new SabreTools.Models.XZP.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the package header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Entries
|
||||
|
||||
// Create the directory entry array
|
||||
file.DirectoryEntries = new DirectoryEntry[header.DirectoryEntryCount];
|
||||
|
||||
// Try to parse the directory entries
|
||||
for (int i = 0; i < header.DirectoryEntryCount; i++)
|
||||
{
|
||||
var directoryEntry = ParseDirectoryEntry(data);
|
||||
file.DirectoryEntries[i] = directoryEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Preload Directory Entries
|
||||
|
||||
if (header.PreloadBytes > 0)
|
||||
{
|
||||
// Create the preload directory entry array
|
||||
file.PreloadDirectoryEntries = new DirectoryEntry[header.PreloadDirectoryEntryCount];
|
||||
|
||||
// Try to parse the preload directory entries
|
||||
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
|
||||
{
|
||||
var directoryEntry = ParseDirectoryEntry(data);
|
||||
file.PreloadDirectoryEntries[i] = directoryEntry;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Preload Directory Mappings
|
||||
|
||||
if (header.PreloadBytes > 0)
|
||||
{
|
||||
// Create the preload directory mapping array
|
||||
file.PreloadDirectoryMappings = new DirectoryMapping[header.PreloadDirectoryEntryCount];
|
||||
|
||||
// Try to parse the preload directory mappings
|
||||
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
|
||||
{
|
||||
var directoryMapping = ParseDirectoryMapping(data);
|
||||
file.PreloadDirectoryMappings[i] = directoryMapping;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Items
|
||||
|
||||
if (header.DirectoryItemCount > 0)
|
||||
{
|
||||
// Get the directory item offset
|
||||
uint directoryItemOffset = header.DirectoryItemOffset;
|
||||
if (directoryItemOffset < 0 || directoryItemOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the directory items
|
||||
data.Seek(directoryItemOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the directory item array
|
||||
file.DirectoryItems = new DirectoryItem[header.DirectoryItemCount];
|
||||
|
||||
// Try to parse the directory items
|
||||
for (int i = 0; i < header.DirectoryItemCount; i++)
|
||||
{
|
||||
var directoryItem = ParseDirectoryItem(data);
|
||||
file.DirectoryItems[i] = directoryItem;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Footer
|
||||
|
||||
// Seek to the footer
|
||||
data.Seek(-8, SeekOrigin.End);
|
||||
|
||||
// Try to parse the footer
|
||||
var footer = ParseFooter(data);
|
||||
if (footer == null)
|
||||
return null;
|
||||
|
||||
// Set the package footer
|
||||
file.Footer = footer;
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a XBox Package File header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled XBox Package File header on success, null on error</returns>
|
||||
private static Header ParseHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Header header = new Header();
|
||||
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
header.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (header.Signature != HeaderSignatureString)
|
||||
return null;
|
||||
|
||||
header.Version = data.ReadUInt32();
|
||||
if (header.Version != 6)
|
||||
return null;
|
||||
|
||||
header.PreloadDirectoryEntryCount = data.ReadUInt32();
|
||||
header.DirectoryEntryCount = data.ReadUInt32();
|
||||
header.PreloadBytes = data.ReadUInt32();
|
||||
header.HeaderLength = data.ReadUInt32();
|
||||
header.DirectoryItemCount = data.ReadUInt32();
|
||||
header.DirectoryItemOffset = data.ReadUInt32();
|
||||
header.DirectoryItemLength = data.ReadUInt32();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a XBox Package File directory entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled XBox Package File directory entry on success, null on error</returns>
|
||||
private static DirectoryEntry ParseDirectoryEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryEntry directoryEntry = new DirectoryEntry();
|
||||
|
||||
directoryEntry.FileNameCRC = data.ReadUInt32();
|
||||
directoryEntry.EntryLength = data.ReadUInt32();
|
||||
directoryEntry.EntryOffset = data.ReadUInt32();
|
||||
|
||||
return directoryEntry;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a XBox Package File directory mapping
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled XBox Package File directory mapping on success, null on error</returns>
|
||||
private static DirectoryMapping ParseDirectoryMapping(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryMapping directoryMapping = new DirectoryMapping();
|
||||
|
||||
directoryMapping.PreloadDirectoryEntryIndex = data.ReadUInt16();
|
||||
|
||||
return directoryMapping;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a XBox Package File directory item
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled XBox Package File directory item on success, null on error</returns>
|
||||
private static DirectoryItem ParseDirectoryItem(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryItem directoryItem = new DirectoryItem();
|
||||
|
||||
directoryItem.FileNameCRC = data.ReadUInt32();
|
||||
directoryItem.NameOffset = data.ReadUInt32();
|
||||
directoryItem.TimeCreated = data.ReadUInt32();
|
||||
|
||||
// Cache the current offset
|
||||
long currentPosition = data.Position;
|
||||
|
||||
// Seek to the name offset
|
||||
data.Seek(directoryItem.NameOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the name
|
||||
directoryItem.Name = data.ReadString(Encoding.ASCII);
|
||||
|
||||
// Seek back to the right position
|
||||
data.Seek(currentPosition, SeekOrigin.Begin);
|
||||
|
||||
return directoryItem;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a XBox Package File footer
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled XBox Package File footer on success, null on error</returns>
|
||||
private static Footer ParseFooter(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Footer footer = new Footer();
|
||||
|
||||
footer.FileLength = data.ReadUInt32();
|
||||
byte[] signature = data.ReadBytes(4);
|
||||
footer.Signature = Encoding.ASCII.GetString(signature);
|
||||
if (footer.Signature != FooterSignatureString)
|
||||
return null;
|
||||
|
||||
return footer;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -72,7 +72,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var mediaKeyBlock = Builders.AACS.ParseMediaKeyBlock(data);
|
||||
var mediaKeyBlock = new SabreTools.Serialization.Streams.AACS().Deserialize(data);
|
||||
if (mediaKeyBlock == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -89,7 +89,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var svm = Builders.BDPlus.ParseSVM(data);
|
||||
var svm = new SabreTools.Serialization.Streams.BDPlus().Deserialize(data);
|
||||
if (svm == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -86,7 +86,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var archive = Builders.BFPK.ParseArchive(data);
|
||||
var archive = new SabreTools.Serialization.Streams.BFPK().Deserialize(data);
|
||||
if (archive == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var file = Builders.BSP.ParseFile(data);
|
||||
var file = new SabreTools.Serialization.Streams.BSP().Deserialize(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -22,13 +22,13 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\BinaryObjectScanner.ASN1\BinaryObjectScanner.ASN1.csproj" />
|
||||
<ProjectReference Include="..\BinaryObjectScanner.Builders\BinaryObjectScanner.Builders.csproj" />
|
||||
<ProjectReference Include="..\BinaryObjectScanner.Compression\BinaryObjectScanner.Compression.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="SabreTools.IO" Version="1.1.1" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.1.1" />
|
||||
<PackageReference Include="SabreTools.Serialization" Version="1.1.0" />
|
||||
<PackageReference Include="SharpCompress" Version="0.32.2" />
|
||||
<PackageReference Include="SharpZipLib" Version="1.4.1" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -186,7 +186,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var binary = Builders.CFB.ParseBinary(data);
|
||||
var binary = new SabreTools.Serialization.Streams.CFB().Deserialize(data);
|
||||
if (binary == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -308,7 +308,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var archive = Builders.N3DS.ParseCIA(data);
|
||||
var archive = new SabreTools.Serialization.Streams.CIA().Deserialize(data);
|
||||
if (archive == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -459,7 +459,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var file = Builders.GCF.ParseFile(data);
|
||||
var file = new SabreTools.Serialization.Streams.GCF().Deserialize(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -286,7 +286,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var cabinet = Builders.InstallShieldCabinet.ParseCabinet(data);
|
||||
var cabinet = new SabreTools.Serialization.Streams.InstallShieldCabinet().Deserialize(data);
|
||||
if (cabinet == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -338,7 +338,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var executable = Builders.LinearExecutable.ParseExecutable(data);
|
||||
var executable = new SabreTools.Serialization.Streams.LinearExecutable().Deserialize(data);
|
||||
if (executable == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -136,7 +136,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var executable = Builders.MSDOS.ParseExecutable(data);
|
||||
var executable = new SabreTools.Serialization.Streams.MSDOS().Deserialize(data);
|
||||
if (executable == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -147,7 +147,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var cabinet = Builders.MicrosoftCabinet.ParseCabinet(data);
|
||||
var cabinet = new SabreTools.Serialization.Streams.MicrosoftCabinet().Deserialize(data);
|
||||
if (cabinet == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -269,7 +269,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var archive = Builders.N3DS.ParseCart(data);
|
||||
var archive = new SabreTools.Serialization.Streams.N3DS().Deserialize(data);
|
||||
if (archive == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -246,7 +246,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var file = Builders.NCF.ParseFile(data);
|
||||
var file = new SabreTools.Serialization.Streams.NCF().Deserialize(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using static BinaryObjectScanner.Builders.Extensions;
|
||||
using static SabreTools.Serialization.Extensions;
|
||||
|
||||
namespace BinaryObjectScanner.Wrappers
|
||||
{
|
||||
@@ -264,7 +264,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var executable = Builders.NewExecutable.ParseExecutable(data);
|
||||
var executable = new SabreTools.Serialization.Streams.NewExecutable().Deserialize(data);
|
||||
if (executable == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -364,7 +364,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var archive = Builders.Nitro.ParseCart(data);
|
||||
var archive = new SabreTools.Serialization.Streams.Nitro().Deserialize(data);
|
||||
if (archive == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var file = Builders.PAK.ParseFile(data);
|
||||
var file = new SabreTools.Serialization.Streams.PAK().Deserialize(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var archive = Builders.PFF.ParseArchive(data);
|
||||
var archive = new SabreTools.Serialization.Streams.PFF().Deserialize(data);
|
||||
if (archive == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -270,7 +270,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var audioFile = Builders.PlayJ.ParseAudioFile(data);
|
||||
var audioFile = new SabreTools.Serialization.Streams.PlayJAudio().Deserialize(data);
|
||||
if (audioFile == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ using System.Text;
|
||||
using System.Xml;
|
||||
using BinaryObjectScanner.ASN1;
|
||||
using SabreTools.IO;
|
||||
using static BinaryObjectScanner.Builders.Extensions;
|
||||
using static SabreTools.Serialization.Extensions;
|
||||
|
||||
namespace BinaryObjectScanner.Wrappers
|
||||
{
|
||||
@@ -1034,7 +1034,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var executable = Builders.PortableExecutable.ParseExecutable(data);
|
||||
var executable = new SabreTools.Serialization.Streams.PortableExecutable().Deserialize(data);
|
||||
if (executable == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -96,7 +96,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var archive = Builders.Quantum.ParseArchive(data);
|
||||
var archive = new SabreTools.Serialization.Streams.Quantum().Deserialize(data);
|
||||
if (archive == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -404,7 +404,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var file = Builders.SGA.ParseFile(data);
|
||||
var file = new SabreTools.Serialization.Streams.SGA().Deserialize(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var file = Builders.VBSP.ParseFile(data);
|
||||
var file = new SabreTools.Serialization.Streams.VBSP().Deserialize(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -166,7 +166,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var file = Builders.VPK.ParseFile(data);
|
||||
var file = new SabreTools.Serialization.Streams.VPK().Deserialize(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -97,7 +97,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var file = Builders.WAD.ParseFile(data);
|
||||
var file = new SabreTools.Serialization.Streams.WAD().Deserialize(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -140,7 +140,7 @@ namespace BinaryObjectScanner.Wrappers
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
var file = Builders.XZP.ParseFile(data);
|
||||
var file = new SabreTools.Serialization.Streams.XZP().Deserialize(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
|
||||
@@ -16,8 +16,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution
|
||||
README.md = README.md
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BinaryObjectScanner.Builders", "BinaryObjectScanner.Builders\BinaryObjectScanner.Builders.csproj", "{7577733A-CC8D-4E7C-8B6D-FFC7EC1B3D07}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BinaryObjectScanner.Wrappers", "BinaryObjectScanner.Wrappers\BinaryObjectScanner.Wrappers.csproj", "{35BD489F-E58D-45DD-9929-DC4B32414750}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BinaryObjectScanner.Matching", "BinaryObjectScanner.Matching\BinaryObjectScanner.Matching.csproj", "{563BC37B-8E02-4178-B6FE-F3F6F65E0096}"
|
||||
@@ -54,10 +52,6 @@ Global
|
||||
{88735BA2-778D-4192-8EB2-FFF6843719E2}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{88735BA2-778D-4192-8EB2-FFF6843719E2}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{88735BA2-778D-4192-8EB2-FFF6843719E2}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{7577733A-CC8D-4E7C-8B6D-FFC7EC1B3D07}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{7577733A-CC8D-4E7C-8B6D-FFC7EC1B3D07}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{7577733A-CC8D-4E7C-8B6D-FFC7EC1B3D07}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{7577733A-CC8D-4E7C-8B6D-FFC7EC1B3D07}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{35BD489F-E58D-45DD-9929-DC4B32414750}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{35BD489F-E58D-45DD-9929-DC4B32414750}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{35BD489F-E58D-45DD-9929-DC4B32414750}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
|
||||
@@ -44,10 +44,6 @@
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</ProjectReference>
|
||||
<ProjectReference Include="..\BinaryObjectScanner.Builders\BinaryObjectScanner.Builders.csproj">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</ProjectReference>
|
||||
<ProjectReference Include="..\BinaryObjectScanner.Compression\BinaryObjectScanner.Compression.csproj">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\BurnOutSharp\BurnOutSharp.csproj" />
|
||||
<ProjectReference Include="..\BinaryObjectScanner.Builders\BinaryObjectScanner.Builders.csproj" />
|
||||
<ProjectReference Include="..\BinaryObjectScanner.FileType\BinaryObjectScanner.FileType.csproj" />
|
||||
<ProjectReference Include="..\BinaryObjectScanner.Matching\BinaryObjectScanner.Matching.csproj" />
|
||||
<ProjectReference Include="..\BinaryObjectScanner.Utilities\BinaryObjectScanner.Utilities.csproj" />
|
||||
@@ -20,6 +19,7 @@
|
||||
<PackageReference Include="OpenMcdf" Version="2.2.1.12" />
|
||||
<PackageReference Include="SabreTools.IO" Version="1.1.1" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.1.1" />
|
||||
<PackageReference Include="SabreTools.Serialization" Version="1.1.0" />
|
||||
<PackageReference Include="UnshieldSharp" Version="1.6.9" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
Reference in New Issue
Block a user