Use wrappers from Serialization (nw)

This commit is contained in:
Matt Nadareski
2023-09-16 00:44:22 -04:00
parent a52d45f7c2
commit a801e720b2
37 changed files with 1990 additions and 7696 deletions

View File

@@ -35,7 +35,7 @@
<ItemGroup>
<PackageReference Include="SabreTools.IO" Version="1.1.1" />
<PackageReference Include="SabreTools.Models" Version="1.1.1" />
<PackageReference Include="SabreTools.Models" Version="1.1.2" />
<PackageReference Include="SharpCompress" Version="0.32.2" />
<PackageReference Include="SharpZipLib" Version="1.4.1" />
</ItemGroup>

View File

@@ -26,8 +26,8 @@
<ItemGroup>
<PackageReference Include="SabreTools.ASN1" Version="1.1.0" />
<PackageReference Include="SabreTools.IO" Version="1.1.1" />
<PackageReference Include="SabreTools.Models" Version="1.1.1" />
<PackageReference Include="SabreTools.Serialization" Version="1.1.1" />
<PackageReference Include="SabreTools.Models" Version="1.1.2" />
<PackageReference Include="SabreTools.Serialization" Version="1.1.3" />
</ItemGroup>
</Project>

View File

@@ -1,108 +0,0 @@
using System.IO;
using System.Text;
using SabreTools.Models.AACS;
namespace BinaryObjectScanner.Wrappers
{
public class AACSMediaKeyBlock : WrapperBase<MediaKeyBlock>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "AACS Media Key Block";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public AACSMediaKeyBlock(MediaKeyBlock model, byte[] data, int offset)
#else
public AACSMediaKeyBlock(MediaKeyBlock? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public AACSMediaKeyBlock(MediaKeyBlock model, Stream data)
#else
public AACSMediaKeyBlock(MediaKeyBlock? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an AACS media key block from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An AACS media key block wrapper on success, null on failure</returns>
#if NET48
public static AACSMediaKeyBlock Create(byte[] data, int offset)
#else
public static AACSMediaKeyBlock? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create an AACS media key block from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>An AACS media key block wrapper on success, null on failure</returns>
#if NET48
public static AACSMediaKeyBlock Create(Stream data)
#else
public static AACSMediaKeyBlock? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var mediaKeyBlock = new SabreTools.Serialization.Streams.AACS().Deserialize(data);
if (mediaKeyBlock == null)
return null;
try
{
return new AACSMediaKeyBlock(mediaKeyBlock, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.AACSMediaKeyBlock.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,108 +0,0 @@
using System.IO;
using System.Text;
using SabreTools.Models.BDPlus;
namespace BinaryObjectScanner.Wrappers
{
public class BDPlusSVM : WrapperBase<SVM>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "BD+ SVM";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public BDPlusSVM(SVM model, byte[] data, int offset)
#else
public BDPlusSVM(SVM? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public BDPlusSVM(SVM model, Stream data)
#else
public BDPlusSVM(SVM? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a BD+ SVM from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A BD+ SVM wrapper on success, null on failure</returns>
#if NET48
public static BDPlusSVM Create(byte[] data, int offset)
#else
public static BDPlusSVM? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a BD+ SVM from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A BD+ SVM wrapper on success, null on failure</returns>
#if NET48
public static BDPlusSVM Create(Stream data)
#else
public static BDPlusSVM? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var svm = new SabreTools.Serialization.Streams.BDPlus().Deserialize(data);
if (svm == null)
return null;
try
{
return new BDPlusSVM(svm, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.BDPlusSVM.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,205 +0,0 @@
using System.IO;
using System.Text;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
namespace BinaryObjectScanner.Wrappers
{
public class BFPK : WrapperBase<SabreTools.Models.BFPK.Archive>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "BFPK Archive";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public BFPK(SabreTools.Models.BFPK.Archive model, byte[] data, int offset)
#else
public BFPK(SabreTools.Models.BFPK.Archive? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public BFPK(SabreTools.Models.BFPK.Archive model, Stream data)
#else
public BFPK(SabreTools.Models.BFPK.Archive? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a BFPK archive from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A BFPK archive wrapper on success, null on failure</returns>
#if NET48
public static BFPK Create(byte[] data, int offset)
#else
public static BFPK? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a BFPK archive from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A BFPK archive wrapper on success, null on failure</returns>
#if NET48
public static BFPK Create(Stream data)
#else
public static BFPK? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var archive = new SabreTools.Serialization.Streams.BFPK().Deserialize(data);
if (archive == null)
return null;
try
{
return new BFPK(archive, data);
}
catch
{
return null;
}
}
#endregion
#region Data
/// <summary>
/// Extract all files from the BFPK to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// If we have no files
if (this.Model.Files == null || this.Model.Files.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < this.Model.Files.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the BFPK to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// If we have no files
if (this.Model.Files == null || this.Model.Files.Length == 0)
return false;
// If we have an invalid index
if (index < 0 || index >= this.Model.Files.Length)
return false;
// Get the file information
var file = this.Model.Files[index];
if (file == null)
return false;
// Get the read index and length
int offset = file.Offset + 4;
int compressedSize = file.CompressedSize;
// Some files can lack the length prefix
if (compressedSize > GetEndOfFile())
{
offset -= 4;
compressedSize = file.UncompressedSize;
}
try
{
// Ensure the output directory exists
Directory.CreateDirectory(outputDirectory);
// Create the output path
string filePath = Path.Combine(outputDirectory, file.Name ?? $"file{index}");
using (FileStream fs = File.OpenWrite(filePath))
{
// Read the data block
#if NET48
byte[] data = ReadFromDataSource(offset, compressedSize);
#else
byte[]? data = ReadFromDataSource(offset, compressedSize);
#endif
if (data == null)
return false;
// If we have uncompressed data
if (compressedSize == file.UncompressedSize)
{
fs.Write(data, 0, compressedSize);
}
else
{
MemoryStream ms = new MemoryStream(data);
ZlibStream zs = new ZlibStream(ms, CompressionMode.Decompress);
zs.CopyTo(fs);
}
}
return true;
}
catch
{
return false;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.BFPK.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,389 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using static SabreTools.Models.BSP.Constants;
namespace BinaryObjectScanner.Wrappers
{
public class BSP : WrapperBase<SabreTools.Models.BSP.File>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Half-Life Level (BSP)";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public BSP(SabreTools.Models.BSP.File model, byte[] data, int offset)
#else
public BSP(SabreTools.Models.BSP.File? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public BSP(SabreTools.Models.BSP.File model, Stream data)
#else
public BSP(SabreTools.Models.BSP.File? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a BSP from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the BSP</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A BSP wrapper on success, null on failure</returns>
#if NET48
public static BSP Create(byte[] data, int offset)
#else
public static BSP? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a BSP from a Stream
/// </summary>
/// <param name="data">Stream representing the BSP</param>
/// <returns>An BSP wrapper on success, null on failure</returns>
#if NET48
public static BSP Create(Stream data)
#else
public static BSP? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var file = new SabreTools.Serialization.Streams.BSP().Deserialize(data);
if (file == null)
return null;
try
{
return new BSP(file, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.BSP.Print(builder, this.Model);
return builder;
}
#endregion
#region Extraction
/// <summary>
/// Extract all lumps from the BSP to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all lumps extracted, false otherwise</returns>
public bool ExtractAllLumps(string outputDirectory)
{
// If we have no lumps
if (this.Model.Lumps == null || this.Model.Lumps.Length == 0)
return false;
// Loop through and extract all lumps to the output
bool allExtracted = true;
for (int i = 0; i < this.Model.Lumps.Length; i++)
{
allExtracted &= ExtractLump(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a lump from the BSP to an output directory by index
/// </summary>
/// <param name="index">Lump index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the lump extracted, false otherwise</returns>
public bool ExtractLump(int index, string outputDirectory)
{
// If we have no lumps
if (this.Model.Lumps == null || this.Model.Lumps.Length == 0)
return false;
// If the lumps index is invalid
if (index < 0 || index >= this.Model.Lumps.Length)
return false;
// Get the lump
var lump = this.Model.Lumps[index];
if (lump == null)
return false;
// Read the data
#if NET48
byte[] data = ReadFromDataSource((int)lump.Offset, (int)lump.Length);
#else
byte[]? data = ReadFromDataSource((int)lump.Offset, (int)lump.Length);
#endif
if (data == null)
return false;
// Create the filename
string filename = $"lump_{index}.bin";
switch (index)
{
case HL_BSP_LUMP_ENTITIES:
filename = "entities.ent";
break;
case HL_BSP_LUMP_TEXTUREDATA:
filename = "texture_data.bin";
break;
}
// If we have an invalid output directory
if (string.IsNullOrWhiteSpace(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
#if NET48
string directoryName = Path.GetDirectoryName(filename);
#else
string? directoryName = Path.GetDirectoryName(filename);
#endif
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using (Stream fs = File.OpenWrite(filename))
{
fs.Write(data, 0, data.Length);
}
}
catch
{
return false;
}
return true;
}
/// <summary>
/// Extract all textures from the BSP to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all textures extracted, false otherwise</returns>
public bool ExtractAllTextures(string outputDirectory)
{
// If we have no textures
if (this.Model.TextureHeader?.Offsets == null || this.Model.TextureHeader.Offsets.Length == 0)
return false;
// Loop through and extract all lumps to the output
bool allExtracted = true;
for (int i = 0; i < this.Model.TextureHeader.Offsets.Length; i++)
{
allExtracted &= ExtractTexture(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a texture from the BSP to an output directory by index
/// </summary>
/// <param name="index">Lump index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the texture extracted, false otherwise</returns>
public bool ExtractTexture(int index, string outputDirectory)
{
// If we have no textures
if (this.Model.Textures == null || this.Model.Textures.Length == 0)
return false;
// If the texture index is invalid
if (index < 0 || index >= this.Model.Textures.Length)
return false;
// Get the texture
var texture = this.Model.Textures[index];
if (texture == null)
return false;
// Read the data
#if NET48
byte[] data = CreateTextureData(texture);
#else
byte[]? data = CreateTextureData(texture);
#endif
if (data == null)
return false;
// Create the filename
string filename = $"{texture.Name}.bmp";
// If we have an invalid output directory
if (string.IsNullOrWhiteSpace(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
#if NET48
string directoryName = Path.GetDirectoryName(filename);
#else
string? directoryName = Path.GetDirectoryName(filename);
#endif
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using (Stream fs = File.OpenWrite(filename))
{
fs.Write(data, 0, data.Length);
}
}
catch
{
return false;
}
return true;
}
/// <summary>
/// Create a bitmap from the texture and palette data
/// </summary>
/// <param name="texture">Texture object to format</param>
/// <returns>Byte array representing the texture as a bitmap</returns>
#if NET48
private static byte[] CreateTextureData(SabreTools.Models.BSP.Texture texture)
#else
private static byte[]? CreateTextureData(SabreTools.Models.BSP.Texture texture)
#endif
{
// If there's no palette data
if (texture.PaletteData == null || texture.PaletteData.Length == 0)
return null;
// If there's no texture data
if (texture.TextureData == null || texture.TextureData.Length == 0)
return null;
// Create the bitmap file header
var fileHeader = new SabreTools.Models.BMP.BITMAPFILEHEADER()
{
Type = ('M' << 8) | 'B',
Size = 14 + 40 + (texture.PaletteSize * 4) + (texture.Width * texture.Height),
OffBits = 14 + 40 + (texture.PaletteSize * 4),
};
// Create the bitmap info header
var infoHeader = new SabreTools.Models.BMP.BITMAPINFOHEADER
{
Size = 40,
Width = (int)texture.Width,
Height = (int)texture.Height,
Planes = 1,
BitCount = 8,
SizeImage = 0,
ClrUsed = texture.PaletteSize,
ClrImportant = texture.PaletteSize,
};
// Reformat the palette data
byte[] paletteData = new byte[texture.PaletteSize * 4];
for (uint i = 0; i < texture.PaletteSize; i++)
{
paletteData[i * 4 + 0] = texture.PaletteData[i * 3 + 2];
paletteData[i * 4 + 1] = texture.PaletteData[i * 3 + 1];
paletteData[i * 4 + 2] = texture.PaletteData[i * 3 + 0];
paletteData[i * 4 + 3] = 0;
}
// Reformat the pixel data
byte[] pixelData = new byte[texture.Width * texture.Height];
for (uint i = 0; i < texture.Width; i++)
{
for (uint j = 0; j < texture.Height; j++)
{
pixelData[i + ((texture.Height - 1 - j) * texture.Width)] = texture.TextureData[i + j * texture.Width];
}
}
// Build the file data
List<byte> buffer = new List<byte>();
// Bitmap file header
buffer.AddRange(BitConverter.GetBytes(fileHeader.Type));
buffer.AddRange(BitConverter.GetBytes(fileHeader.Size));
buffer.AddRange(BitConverter.GetBytes(fileHeader.Reserved1));
buffer.AddRange(BitConverter.GetBytes(fileHeader.Reserved2));
buffer.AddRange(BitConverter.GetBytes(fileHeader.OffBits));
// Bitmap info header
buffer.AddRange(BitConverter.GetBytes(infoHeader.Size));
buffer.AddRange(BitConverter.GetBytes(infoHeader.Width));
buffer.AddRange(BitConverter.GetBytes(infoHeader.Height));
buffer.AddRange(BitConverter.GetBytes(infoHeader.Planes));
buffer.AddRange(BitConverter.GetBytes(infoHeader.BitCount));
buffer.AddRange(BitConverter.GetBytes(infoHeader.Compression));
buffer.AddRange(BitConverter.GetBytes(infoHeader.SizeImage));
buffer.AddRange(BitConverter.GetBytes(infoHeader.XPelsPerMeter));
buffer.AddRange(BitConverter.GetBytes(infoHeader.YPelsPerMeter));
buffer.AddRange(BitConverter.GetBytes(infoHeader.ClrUsed));
buffer.AddRange(BitConverter.GetBytes(infoHeader.ClrImportant));
// Palette data
buffer.AddRange(paletteData);
// Pixel data
buffer.AddRange(pixelData);
return buffer.ToArray();
}
#endregion
}
}

View File

@@ -30,8 +30,8 @@
<ItemGroup>
<PackageReference Include="SabreTools.IO" Version="1.1.1" />
<PackageReference Include="SabreTools.Models" Version="1.1.1" />
<PackageReference Include="SabreTools.Serialization" Version="1.1.1" />
<PackageReference Include="SabreTools.Models" Version="1.1.2" />
<PackageReference Include="SabreTools.Serialization" Version="1.1.3" />
<PackageReference Include="SharpCompress" Version="0.32.2" />
<PackageReference Include="SharpZipLib" Version="1.4.1" />
</ItemGroup>

View File

@@ -1,365 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class CFB : WrapperBase<SabreTools.Models.CFB.Binary>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Compact File Binary";
#endregion
#region Extension Properties
/// <summary>
/// Normal sector size in bytes
/// </summary>
#if NET48
public long SectorSize => (long)Math.Pow(2, this.Model.Header.SectorShift);
#else
public long SectorSize => (long)Math.Pow(2, this.Model.Header?.SectorShift ?? 0);
#endif
/// <summary>
/// Mini sector size in bytes
/// </summary>
#if NET48
public long MiniSectorSize => (long)Math.Pow(2, this.Model.Header.MiniSectorShift);
#else
public long MiniSectorSize => (long)Math.Pow(2, this.Model.Header?.MiniSectorShift ?? 0);
#endif
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public CFB(SabreTools.Models.CFB.Binary model, byte[] data, int offset)
#else
public CFB(SabreTools.Models.CFB.Binary? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public CFB(SabreTools.Models.CFB.Binary model, Stream data)
#else
public CFB(SabreTools.Models.CFB.Binary? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a Compound File Binary from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A Compound File Binary wrapper on success, null on failure</returns>
#if NET48
public static CFB Create(byte[] data, int offset)
#else
public static CFB? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a Compound File Binary from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A Compound File Binary wrapper on success, null on failure</returns>
#if NET48
public static CFB Create(Stream data)
#else
public static CFB? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var binary = new SabreTools.Serialization.Streams.CFB().Deserialize(data);
if (binary == null)
return null;
try
{
return new CFB(binary, data);
}
catch
{
return null;
}
}
#endregion
#region FAT Sector Data
/// <summary>
/// Get the ordered FAT sector chain for a given starting sector
/// </summary>
/// <param name="startingSector">Initial FAT sector</param>
/// <returns>Ordered list of sector numbers, null on error</returns>
#if NET48
public List<SabreTools.Models.CFB.SectorNumber> GetFATSectorChain(SabreTools.Models.CFB.SectorNumber startingSector)
#else
public List<SabreTools.Models.CFB.SectorNumber?>? GetFATSectorChain(SabreTools.Models.CFB.SectorNumber? startingSector)
#endif
{
// If we have an invalid sector
#if NET48
if (startingSector < 0 || this.Model.FATSectorNumbers == null || (long)startingSector >= this.Model.FATSectorNumbers.Length)
#else
if (startingSector == null || startingSector < 0 || this.Model.FATSectorNumbers == null || (long)startingSector >= this.Model.FATSectorNumbers.Length)
#endif
return null;
// Setup the returned list
#if NET48
var sectors = new List<SabreTools.Models.CFB.SectorNumber> { startingSector };
#else
var sectors = new List<SabreTools.Models.CFB.SectorNumber?> { startingSector };
#endif
var lastSector = startingSector;
while (true)
{
#if NET6_0_OR_GREATER
if (lastSector == null)
break;
#endif
// Get the next sector from the lookup table
#if NET48
var nextSector = this.Model.FATSectorNumbers[(uint)lastSector];
#else
var nextSector = this.Model.FATSectorNumbers[(uint)lastSector!.Value];
#endif
// If we have an end of chain or free sector
if (nextSector == SabreTools.Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == SabreTools.Models.CFB.SectorNumber.FREESECT)
break;
// Add the next sector to the list and replace the last sector
sectors.Add(nextSector);
lastSector = nextSector;
}
return sectors;
}
/// <summary>
/// Get the data for the FAT sector chain starting at a given starting sector
/// </summary>
/// <param name="startingSector">Initial FAT sector</param>
/// <returns>Ordered list of sector numbers, null on error</returns>
#if NET48
public byte[] GetFATSectorChainData(SabreTools.Models.CFB.SectorNumber startingSector)
#else
public byte[]? GetFATSectorChainData(SabreTools.Models.CFB.SectorNumber startingSector)
#endif
{
// Get the sector chain first
var sectorChain = GetFATSectorChain(startingSector);
if (sectorChain == null)
return null;
// Sequentially read the sectors
var data = new List<byte>();
for (int i = 0; i < sectorChain.Count; i++)
{
// Try to get the sector data offset
int sectorDataOffset = (int)FATSectorToFileOffset(sectorChain[i]);
if (sectorDataOffset < 0 || sectorDataOffset >= GetEndOfFile())
return null;
// Try to read the sector data
var sectorData = ReadFromDataSource(sectorDataOffset, (int)SectorSize);
if (sectorData == null)
return null;
// Add the sector data to the output
data.AddRange(sectorData);
}
return data.ToArray();
}
/// <summary>
/// Convert a FAT sector value to a byte offset
/// </summary>
/// <param name="sector">Sector to convert</param>
/// <returns>File offset in bytes, -1 on error</returns>
#if NET48
public long FATSectorToFileOffset(SabreTools.Models.CFB.SectorNumber sector)
#else
public long FATSectorToFileOffset(SabreTools.Models.CFB.SectorNumber? sector)
#endif
{
// If we have an invalid sector number
#if NET48
if (sector > SabreTools.Models.CFB.SectorNumber.MAXREGSECT)
#else
if (sector == null || sector > SabreTools.Models.CFB.SectorNumber.MAXREGSECT)
#endif
return -1;
// Convert based on the sector shift value
return (long)(sector + 1) * SectorSize;
}
#endregion
#region Mini FAT Sector Data
/// <summary>
/// Get the ordered Mini FAT sector chain for a given starting sector
/// </summary>
/// <param name="startingSector">Initial Mini FAT sector</param>
/// <returns>Ordered list of sector numbers, null on error</returns>
#if NET48
public List<SabreTools.Models.CFB.SectorNumber> GetMiniFATSectorChain(SabreTools.Models.CFB.SectorNumber startingSector)
#else
public List<SabreTools.Models.CFB.SectorNumber?>? GetMiniFATSectorChain(SabreTools.Models.CFB.SectorNumber? startingSector)
#endif
{
// If we have an invalid sector
#if NET48
if (startingSector < 0 || this.Model.MiniFATSectorNumbers == null || (long)startingSector >= this.Model.MiniFATSectorNumbers.Length)
#else
if (startingSector == null || startingSector < 0 || this.Model.MiniFATSectorNumbers == null || (long)startingSector >= this.Model.MiniFATSectorNumbers.Length)
#endif
return null;
// Setup the returned list
#if NET48
var sectors = new List<SabreTools.Models.CFB.SectorNumber> { startingSector };
#else
var sectors = new List<SabreTools.Models.CFB.SectorNumber?> { startingSector };
#endif
var lastSector = startingSector;
while (true)
{
#if NET6_0_OR_GREATER
if (lastSector == null)
break;
#endif
// Get the next sector from the lookup table
#if NET48
var nextSector = this.Model.MiniFATSectorNumbers[(uint)lastSector];
#else
var nextSector = this.Model.MiniFATSectorNumbers[(uint)lastSector!.Value];
#endif
// If we have an end of chain or free sector
if (nextSector == SabreTools.Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == SabreTools.Models.CFB.SectorNumber.FREESECT)
break;
// Add the next sector to the list and replace the last sector
sectors.Add(nextSector);
lastSector = nextSector;
}
return sectors;
}
/// <summary>
/// Get the data for the Mini FAT sector chain starting at a given starting sector
/// </summary>
/// <param name="startingSector">Initial Mini FAT sector</param>
/// <returns>Ordered list of sector numbers, null on error</returns>
#if NET48
public byte[] GetMiniFATSectorChainData(SabreTools.Models.CFB.SectorNumber startingSector)
#else
public byte[]? GetMiniFATSectorChainData(SabreTools.Models.CFB.SectorNumber startingSector)
#endif
{
// Get the sector chain first
var sectorChain = GetMiniFATSectorChain(startingSector);
if (sectorChain == null)
return null;
// Sequentially read the sectors
var data = new List<byte>();
for (int i = 0; i < sectorChain.Count; i++)
{
// Try to get the sector data offset
int sectorDataOffset = (int)MiniFATSectorToFileOffset(sectorChain[i]);
if (sectorDataOffset < 0 || sectorDataOffset >= GetEndOfFile())
return null;
// Try to read the sector data
var sectorData = ReadFromDataSource(sectorDataOffset, (int)MiniSectorSize);
if (sectorData == null)
return null;
// Add the sector data to the output
data.AddRange(sectorData);
}
return data.ToArray();
}
/// <summary>
/// Convert a Mini FAT sector value to a byte offset
/// </summary>
/// <param name="sector">Sector to convert</param>
/// <returns>File offset in bytes, -1 on error</returns>
#if NET48
public long MiniFATSectorToFileOffset(SabreTools.Models.CFB.SectorNumber sector)
#else
public long MiniFATSectorToFileOffset(SabreTools.Models.CFB.SectorNumber? sector)
#endif
{
// If we have an invalid sector number
#if NET48
if (sector > SabreTools.Models.CFB.SectorNumber.MAXREGSECT)
#else
if (sector == null || sector > SabreTools.Models.CFB.SectorNumber.MAXREGSECT)
#endif
return -1;
// Convert based on the sector shift value
return (long)(sector + 1) * MiniSectorSize;
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.CFB.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,107 +0,0 @@
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class CIA : WrapperBase<SabreTools.Models.N3DS.CIA>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "CTR Importable Archive (CIA)";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public CIA(SabreTools.Models.N3DS.CIA model, byte[] data, int offset)
#else
public CIA(SabreTools.Models.N3DS.CIA? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public CIA(SabreTools.Models.N3DS.CIA model, Stream data)
#else
public CIA(SabreTools.Models.N3DS.CIA? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a CIA archive from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A CIA archive wrapper on success, null on failure</returns>
#if NET48
public static CIA Create(byte[] data, int offset)
#else
public static CIA? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a CIA archive from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A CIA archive wrapper on success, null on failure</returns>
#if NET48
public static CIA Create(Stream data)
#else
public static CIA? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var archive = new SabreTools.Serialization.Streams.CIA().Deserialize(data);
if (archive == null)
return null;
try
{
return new CIA(archive, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.CIA.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,82 +0,0 @@
#if NET6_0_OR_GREATER
using System;
using System.Reflection;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace BinaryObjectScanner.Wrappers
{
/// <summary>
/// Serializer class for abstract classes
/// </summary>
/// <see href="https://stackoverflow.com/a/72775719"/>
internal class ConcreteAbstractSerializer : JsonConverterFactory
{
public override bool CanConvert(Type typeToConvert) => typeToConvert.IsAbstract;
class ConcreteAbstractSerializerOfType<TAbstract> : JsonConverter<TAbstract>
{
static ConcreteAbstractSerializerOfType()
{
if (!typeof(TAbstract).IsAbstract && !typeof(TAbstract).IsInterface)
throw new NotImplementedException(string.Format("Concrete class {0} is not supported", typeof(TAbstract)));
}
public override TAbstract? Read(ref System.Text.Json.Utf8JsonReader reader, Type typeToConvert, System.Text.Json.JsonSerializerOptions options) =>
throw new NotImplementedException();
public override void Write(System.Text.Json.Utf8JsonWriter writer, TAbstract value, System.Text.Json.JsonSerializerOptions options) =>
JsonSerializer.Serialize<object>(writer, value!, options);
}
public override JsonConverter CreateConverter(Type type, JsonSerializerOptions options) =>
(JsonConverter)Activator.CreateInstance(
typeof(ConcreteAbstractSerializerOfType<>).MakeGenericType(new Type[] { type }),
BindingFlags.Instance | BindingFlags.Public,
binder: null,
args: Array.Empty<object>(),
culture: null).ThrowOnNull();
}
/// <summary>
/// Serializer class for interfaces
/// </summary>
/// <see href="https://stackoverflow.com/a/72775719"/>
internal class ConcreteInterfaceSerializer : JsonConverterFactory
{
public override bool CanConvert(Type typeToConvert) => typeToConvert.IsInterface;
class ConcreteInterfaceSerializerOfType<TInterface> : JsonConverter<TInterface>
{
static ConcreteInterfaceSerializerOfType()
{
if (!typeof(TInterface).IsAbstract && !typeof(TInterface).IsInterface)
throw new NotImplementedException(string.Format("Concrete class {0} is not supported", typeof(TInterface)));
}
public override TInterface? Read(ref System.Text.Json.Utf8JsonReader reader, Type typeToConvert, System.Text.Json.JsonSerializerOptions options) =>
throw new NotImplementedException();
public override void Write(System.Text.Json.Utf8JsonWriter writer, TInterface value, System.Text.Json.JsonSerializerOptions options) =>
JsonSerializer.Serialize<object>(writer, value!, options);
}
public override JsonConverter CreateConverter(Type type, JsonSerializerOptions options) =>
(JsonConverter)Activator.CreateInstance(
typeof(ConcreteInterfaceSerializerOfType<>).MakeGenericType(new Type[] { type }),
BindingFlags.Instance | BindingFlags.Public,
binder: null,
args: Array.Empty<object>(),
culture: null).ThrowOnNull();
}
/// <summary>
/// Extensions for generic object types
/// </summary>
/// <see href="https://stackoverflow.com/a/72775719"/>
internal static class ObjectExtensions
{
public static T ThrowOnNull<T>(this T? value) where T : class => value ?? throw new ArgumentNullException();
}
}
#endif

View File

@@ -1,23 +0,0 @@
namespace BinaryObjectScanner.Wrappers
{
/// <summary>
/// Location that the data originated from
/// </summary>
public enum DataSource
{
/// <summary>
/// Unknown origin / testing
/// </summary>
UNKNOWN = 0,
/// <summary>
/// Byte array with offset
/// </summary>
ByteArray = 1,
/// <summary>
/// Stream
/// </summary>
Stream = 2,
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,416 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class GCF : WrapperBase<SabreTools.Models.GCF.File>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Half-Life Game Cache File (GCF)";
#endregion
#region Extension Properties
/// <summary>
/// Set of all files and their information
/// </summary>
#if NET48
public FileInfo[] Files
#else
public FileInfo[]? Files
#endif
{
get
{
// Use the cached value if we have it
if (_files != null)
return _files;
// If we don't have a required property
if (this.Model.DirectoryEntries == null || this.Model.DirectoryMapEntries == null || this.Model.BlockEntries == null)
return null;
// Otherwise, scan and build the files
var files = new List<FileInfo>();
for (int i = 0; i < this.Model.DirectoryEntries.Length; i++)
{
// Get the directory entry
var directoryEntry = this.Model.DirectoryEntries[i];
var directoryMapEntry = this.Model.DirectoryMapEntries[i];
if (directoryEntry == null || directoryMapEntry == null)
continue;
// If we have a directory, skip for now
if (!directoryEntry.DirectoryFlags.HasFlag(SabreTools.Models.GCF.HL_GCF_FLAG.HL_GCF_FLAG_FILE))
continue;
// Otherwise, start building the file info
var fileInfo = new FileInfo()
{
Size = directoryEntry.ItemSize,
Encrypted = directoryEntry.DirectoryFlags.HasFlag(SabreTools.Models.GCF.HL_GCF_FLAG.HL_GCF_FLAG_ENCRYPTED),
};
var pathParts = new List<string> { directoryEntry.Name ?? string.Empty };
#if NET48
var blockEntries = new List<SabreTools.Models.GCF.BlockEntry>();
#else
var blockEntries = new List<SabreTools.Models.GCF.BlockEntry?>();
#endif
// Traverse the parent tree
uint index = directoryEntry.ParentIndex;
while (index != 0xFFFFFFFF)
{
var parentDirectoryEntry = this.Model.DirectoryEntries[index];
if (parentDirectoryEntry == null)
break;
pathParts.Add(parentDirectoryEntry.Name ?? string.Empty);
index = parentDirectoryEntry.ParentIndex;
}
// Traverse the block entries
index = directoryMapEntry.FirstBlockIndex;
while (index != this.Model.DataBlockHeader?.BlockCount)
{
var nextBlock = this.Model.BlockEntries[index];
if (nextBlock == null)
break;
blockEntries.Add(nextBlock);
index = nextBlock.NextBlockEntryIndex;
}
// Reverse the path parts because of traversal
pathParts.Reverse();
// Build the remaining file info
fileInfo.Path = Path.Combine(pathParts.ToArray());
fileInfo.BlockEntries = blockEntries.ToArray();
// Add the file info and continue
files.Add(fileInfo);
}
// Set and return the file infos
_files = files.ToArray();
return _files;
}
}
/// <summary>
/// Set of all data block offsets
/// </summary>
#if NET48
public long[] DataBlockOffsets
#else
public long[]? DataBlockOffsets
#endif
{
get
{
// Use the cached value if we have it
if (_dataBlockOffsets != null)
return _dataBlockOffsets;
#if NET6_0_OR_GREATER
// If we don't have a block count, offset, or size
if (this.Model.DataBlockHeader?.BlockCount == null || this.Model.DataBlockHeader?.FirstBlockOffset == null || this.Model.DataBlockHeader?.BlockSize == null)
return null;
#endif
// Otherwise, build the data block set
_dataBlockOffsets = new long[this.Model.DataBlockHeader.BlockCount];
for (int i = 0; i < this.Model.DataBlockHeader.BlockCount; i++)
{
long dataBlockOffset = this.Model.DataBlockHeader.FirstBlockOffset + (i * this.Model.DataBlockHeader.BlockSize);
_dataBlockOffsets[i] = dataBlockOffset;
}
// Return the set of data blocks
return _dataBlockOffsets;
}
}
#endregion
#region Instance Variables
/// <summary>
/// Set of all files and their information
/// </summary>
#if NET48
private FileInfo[] _files = null;
#else
private FileInfo[]? _files = null;
#endif
/// <summary>
/// Set of all data block offsets
/// </summary>
#if NET48
private long[] _dataBlockOffsets = null;
#else
private long[]? _dataBlockOffsets = null;
#endif
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public GCF(SabreTools.Models.GCF.File model, byte[] data, int offset)
#else
public GCF(SabreTools.Models.GCF.File? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public GCF(SabreTools.Models.GCF.File model, Stream data)
#else
public GCF(SabreTools.Models.GCF.File? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an GCF from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the GCF</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An GCF wrapper on success, null on failure</returns>
#if NET48
public static GCF Create(byte[] data, int offset)
#else
public static GCF? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a GCF from a Stream
/// </summary>
/// <param name="data">Stream representing the GCF</param>
/// <returns>An GCF wrapper on success, null on failure</returns>
#if NET48
public static GCF Create(Stream data)
#else
public static GCF? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var file = new SabreTools.Serialization.Streams.GCF().Deserialize(data);
if (file == null)
return null;
try
{
return new GCF(file, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.GCF.Print(builder, this.Model);
return builder;
}
#endregion
#region Extraction
/// <summary>
/// Extract all files from the GCF to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// If we have no files
if (Files == null || Files.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < Files.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the GCF to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// If we have no files
if (Files == null || Files.Length == 0 || DataBlockOffsets == null)
return false;
// If the files index is invalid
if (index < 0 || index >= Files.Length)
return false;
// Get the file
var file = Files[index];
if (file?.BlockEntries == null || file.Size == 0)
return false;
// If the file is encrypted -- TODO: Revisit later
if (file.Encrypted)
return false;
// Get all data block offsets needed for extraction
var dataBlockOffsets = new List<long>();
for (int i = 0; i < file.BlockEntries.Length; i++)
{
var blockEntry = file.BlockEntries[i];
if (blockEntry == null)
continue;
uint dataBlockIndex = blockEntry.FirstDataBlockIndex;
long blockEntrySize = blockEntry.FileDataSize;
while (blockEntrySize > 0)
{
long dataBlockOffset = DataBlockOffsets[dataBlockIndex++];
dataBlockOffsets.Add(dataBlockOffset);
blockEntrySize -= this.Model.DataBlockHeader?.BlockSize ?? 0;
}
}
// Create the filename
#if NET48
string filename = file.Path;
#else
string? filename = file.Path;
#endif
// If we have an invalid output directory
if (string.IsNullOrWhiteSpace(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename ?? $"file{index}");
// Ensure the output directory is created
#if NET48
string directoryName = Path.GetDirectoryName(filename);
#else
string? directoryName = Path.GetDirectoryName(filename);
#endif
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using (Stream fs = File.OpenWrite(filename))
{
// Now read the data sequentially and write out while we have data left
long fileSize = file.Size;
for (int i = 0; i < dataBlockOffsets.Count; i++)
{
int readSize = (int)Math.Min(this.Model.DataBlockHeader?.BlockSize ?? 0, fileSize);
#if NET48
byte[] data = ReadFromDataSource((int)dataBlockOffsets[i], readSize);
#else
byte[]? data = ReadFromDataSource((int)dataBlockOffsets[i], readSize);
#endif
if (data == null)
return false;
fs.Write(data, 0, data.Length);
}
}
}
catch
{
return false;
}
return true;
}
#endregion
#region Helper Classes
/// <summary>
/// Class to contain all necessary file information
/// </summary>
public sealed class FileInfo
{
/// <summary>
/// Full item path
/// </summary>
#if NET48
public string Path;
#else
public string? Path;
#endif
/// <summary>
/// File size
/// </summary>
public uint Size;
/// <summary>
/// Indicates if the block is encrypted
/// </summary>
public bool Encrypted;
/// <summary>
/// Array of block entries
/// </summary>
#if NET48
public SabreTools.Models.GCF.BlockEntry[] BlockEntries;
#else
public SabreTools.Models.GCF.BlockEntry?[]? BlockEntries;
#endif
}
#endregion
}
}

View File

@@ -1,24 +0,0 @@
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public interface IWrapper
{
/// <summary>
/// Get a human-readable description of the wrapper
/// </summary>
string Description();
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
StringBuilder PrettyPrint();
#if NET6_0_OR_GREATER
/// <summary>
/// Export the item information as JSON
/// </summary>
string ExportJSON();
#endif
}
}

View File

@@ -1,134 +0,0 @@
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public partial class InstallShieldCabinet : WrapperBase<SabreTools.Models.InstallShieldCabinet.Cabinet>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "InstallShield Cabinet";
#endregion
#region Extension Properties
/// <summary>
/// The major version of the cabinet
/// </summary>
public int MajorVersion
{
get
{
uint majorVersion = this.Model.CommonHeader?.Version ?? 0;
if (majorVersion >> 24 == 1)
{
majorVersion = (majorVersion >> 12) & 0x0F;
}
else if (majorVersion >> 24 == 2 || majorVersion >> 24 == 4)
{
majorVersion = majorVersion & 0xFFFF;
if (majorVersion != 0)
majorVersion /= 100;
}
return (int)majorVersion;
}
}
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public InstallShieldCabinet(SabreTools.Models.InstallShieldCabinet.Cabinet model, byte[] data, int offset)
#else
public InstallShieldCabinet(SabreTools.Models.InstallShieldCabinet.Cabinet? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public InstallShieldCabinet(SabreTools.Models.InstallShieldCabinet.Cabinet model, Stream data)
#else
public InstallShieldCabinet(SabreTools.Models.InstallShieldCabinet.Cabinet? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an InstallShield Cabinet from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the cabinet</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A cabinet wrapper on success, null on failure</returns>
#if NET48
public static InstallShieldCabinet Create(byte[] data, int offset)
#else
public static InstallShieldCabinet? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a InstallShield Cabinet from a Stream
/// </summary>
/// <param name="data">Stream representing the cabinet</param>
/// <returns>A cabinet wrapper on success, null on failure</returns>
#if NET48
public static InstallShieldCabinet Create(Stream data)
#else
public static InstallShieldCabinet? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var cabinet = new SabreTools.Serialization.Streams.InstallShieldCabinet().Deserialize(data);
if (cabinet == null)
return null;
try
{
return new InstallShieldCabinet(cabinet, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.InstallShieldCabinet.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,153 +0,0 @@
using System;
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class LinearExecutable : WrapperBase<SabreTools.Models.LinearExecutable.Executable>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Linear Executable (LE/LX)";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public LinearExecutable(SabreTools.Models.LinearExecutable.Executable model, byte[] data, int offset)
#else
public LinearExecutable(SabreTools.Models.LinearExecutable.Executable? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public LinearExecutable(SabreTools.Models.LinearExecutable.Executable model, Stream data)
#else
public LinearExecutable(SabreTools.Models.LinearExecutable.Executable? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}/// <summary>
/// Create an LE/LX executable from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the executable</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An LE/LX executable wrapper on success, null on failure</returns>
#if NET48
public static LinearExecutable Create(byte[] data, int offset)
#else
public static LinearExecutable? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create an LE/LX executable from a Stream
/// </summary>
/// <param name="data">Stream representing the executable</param>
/// <returns>An LE/LX executable wrapper on success, null on failure</returns>
#if NET48
public static LinearExecutable Create(Stream data)
#else
public static LinearExecutable? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var executable = new SabreTools.Serialization.Streams.LinearExecutable().Deserialize(data);
if (executable == null)
return null;
try
{
return new LinearExecutable(executable, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.LinearExecutable.Print(builder, this.Model);
return builder;
}
#endregion
#region REMOVE -- DO NOT USE
/// <summary>
/// Read an arbitrary range from the source
/// </summary>
/// <param name="rangeStart">The start of where to read data from, -1 means start of source</param>
/// <param name="length">How many bytes to read, -1 means read until end</param>
/// <returns>Byte array representing the range, null on error</returns>
[Obsolete]
#if NET48
public byte[] ReadArbitraryRange(int rangeStart = -1, int length = -1)
#else
public byte[]? ReadArbitraryRange(int rangeStart = -1, int length = -1)
#endif
{
// If we have an unset range start, read from the start of the source
if (rangeStart == -1)
rangeStart = 0;
// If we have an unset length, read the whole source
if (length == -1)
{
switch (_dataSource)
{
case DataSource.ByteArray:
#if NET48
length = _byteArrayData.Length - _byteArrayOffset;
#else
length = _byteArrayData!.Length - _byteArrayOffset;
#endif
break;
case DataSource.Stream:
#if NET48
length = (int)_streamData.Length;
#else
length = (int)_streamData!.Length;
#endif
break;
}
}
return ReadFromDataSource(rangeStart, length);
}
#endregion
}
}

View File

@@ -1,105 +0,0 @@
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class MSDOS : WrapperBase<SabreTools.Models.MSDOS.Executable>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "MS-DOS Executable";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public MSDOS(SabreTools.Models.MSDOS.Executable model, byte[] data, int offset)
#else
public MSDOS(SabreTools.Models.MSDOS.Executable? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public MSDOS(SabreTools.Models.MSDOS.Executable model, Stream data)
#else
public MSDOS(SabreTools.Models.MSDOS.Executable? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}/// <summary>
/// Create an MS-DOS executable from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the executable</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An MS-DOS executable wrapper on success, null on failure</returns>
#if NET48
public static MSDOS Create(byte[] data, int offset)
#else
public static MSDOS? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create an MS-DOS executable from a Stream
/// </summary>
/// <param name="data">Stream representing the executable</param>
/// <returns>An MS-DOS executable wrapper on success, null on failure</returns>
#if NET48
public static MSDOS Create(Stream data)
#else
public static MSDOS? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var executable = new SabreTools.Serialization.Streams.MSDOS().Deserialize(data);
if (executable == null)
return null;
try
{
return new MSDOS(executable, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.MSDOS.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,372 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public partial class MicrosoftCabinet : WrapperBase<SabreTools.Models.MicrosoftCabinet.Cabinet>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Microsoft Cabinet";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public MicrosoftCabinet(SabreTools.Models.MicrosoftCabinet.Cabinet model, byte[] data, int offset)
#else
public MicrosoftCabinet(SabreTools.Models.MicrosoftCabinet.Cabinet? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public MicrosoftCabinet(SabreTools.Models.MicrosoftCabinet.Cabinet model, Stream data)
#else
public MicrosoftCabinet(SabreTools.Models.MicrosoftCabinet.Cabinet? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}/// <summary>
/// Create a Microsoft Cabinet from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the cabinet</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A cabinet wrapper on success, null on failure</returns>
#if NET48
public static MicrosoftCabinet Create(byte[] data, int offset)
#else
public static MicrosoftCabinet? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a Microsoft Cabinet from a Stream
/// </summary>
/// <param name="data">Stream representing the cabinet</param>
/// <returns>A cabinet wrapper on success, null on failure</returns>
#if NET48
public static MicrosoftCabinet Create(Stream data)
#else
public static MicrosoftCabinet? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var cabinet = new SabreTools.Serialization.Streams.MicrosoftCabinet().Deserialize(data);
if (cabinet == null)
return null;
try
{
return new MicrosoftCabinet(cabinet, data);
}
catch
{
return null;
}
}
#endregion
#region Checksumming
/// <summary>
/// The computation and verification of checksums found in CFDATA structure entries cabinet files is
/// done by using a function described by the following mathematical notation. When checksums are
/// not supplied by the cabinet file creating application, the checksum field is set to 0 (zero). Cabinet
/// extracting applications do not compute or verify the checksum if the field is set to 0 (zero).
/// </summary>
private static uint ChecksumData(byte[] data)
{
uint[] C = new uint[4]
{
S(data, 1, data.Length),
S(data, 2, data.Length),
S(data, 3, data.Length),
S(data, 4, data.Length),
};
return C[0] ^ C[1] ^ C[2] ^ C[3];
}
/// <summary>
/// Individual algorithmic step
/// </summary>
private static uint S(byte[] a, int b, int x)
{
int n = a.Length;
if (x < 4 && b > n % 4)
return 0;
else if (x < 4 && b <= n % 4)
return a[n - b + 1];
else // if (x >= 4)
return a[n - x + b] ^ S(a, b, x - 4);
}
#endregion
#region Folders
/// <summary>
/// Get the uncompressed data associated with a folder
/// </summary>
/// <param name="folderIndex">Folder index to check</param>
/// <returns>Byte array representing the data, null on error</returns>
/// <remarks>All but uncompressed are unimplemented</remarks>
#if NET48
public byte[] GetUncompressedData(int folderIndex)
#else
public byte[]? GetUncompressedData(int folderIndex)
#endif
{
// If we have an invalid folder index
if (folderIndex < 0 || this.Model.Folders == null || folderIndex >= this.Model.Folders.Length)
return null;
// Get the folder header
var folder = this.Model.Folders[folderIndex];
if (folder == null)
return null;
// If we have invalid data blocks
if (folder.DataBlocks == null || folder.DataBlocks.Length == 0)
return null;
// Setup LZX decompression
var lzx = new Compression.LZX.State();
Compression.LZX.Decompressor.Init(((ushort)folder.CompressionType >> 8) & 0x1f, lzx);
// Setup MS-ZIP decompression
Compression.MSZIP.State mszip = new Compression.MSZIP.State();
// Setup Quantum decompression
var qtm = new Compression.Quantum.State();
Compression.Quantum.Decompressor.InitState(qtm, folder);
List<byte> data = new List<byte>();
foreach (var dataBlock in folder.DataBlocks)
{
if (dataBlock == null)
continue;
#if NET48
byte[] decompressed = new byte[dataBlock.UncompressedSize];
#else
byte[]? decompressed = new byte[dataBlock.UncompressedSize];
#endif
switch (folder.CompressionType & SabreTools.Models.MicrosoftCabinet.CompressionType.MASK_TYPE)
{
case SabreTools.Models.MicrosoftCabinet.CompressionType.TYPE_NONE:
decompressed = dataBlock.CompressedData;
break;
case SabreTools.Models.MicrosoftCabinet.CompressionType.TYPE_MSZIP:
decompressed = new byte[SabreTools.Models.Compression.MSZIP.Constants.ZIPWSIZE];
Compression.MSZIP.Decompressor.Decompress(mszip, dataBlock.CompressedSize, dataBlock.CompressedData, dataBlock.UncompressedSize, decompressed);
Array.Resize(ref decompressed, dataBlock.UncompressedSize);
break;
case SabreTools.Models.MicrosoftCabinet.CompressionType.TYPE_QUANTUM:
Compression.Quantum.Decompressor.Decompress(qtm, dataBlock.CompressedSize, dataBlock.CompressedData, dataBlock.UncompressedSize, decompressed);
break;
case SabreTools.Models.MicrosoftCabinet.CompressionType.TYPE_LZX:
Compression.LZX.Decompressor.Decompress(state: lzx, dataBlock.CompressedSize, dataBlock.CompressedData, dataBlock.UncompressedSize, decompressed);
break;
default:
return null;
}
if (decompressed != null)
data.AddRange(decompressed);
}
return data.ToArray();
}
#endregion
#region Files
/// <summary>
/// Extract all files from the MS-CAB to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all filez extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// If we have no files
if (this.Model.Files == null || this.Model.Files.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < this.Model.Files.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the MS-CAB to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// If we have an invalid file index
if (index < 0 || this.Model.Files == null || index >= this.Model.Files.Length)
return false;
// If we have an invalid output directory
if (string.IsNullOrWhiteSpace(outputDirectory))
return false;
// Ensure the directory exists
Directory.CreateDirectory(outputDirectory);
// Get the file header
var file = this.Model.Files[index];
if (file == null || file.FileSize == 0)
return false;
// Create the output filename
string fileName = Path.Combine(outputDirectory, file.Name ?? $"file{index}");
// Get the file data, if possible
#if NET48
byte[] fileData = GetFileData(index);
#else
byte[]? fileData = GetFileData(index);
#endif
if (fileData == null)
return false;
// Write the file data
using (FileStream fs = File.OpenWrite(fileName))
{
fs.Write(fileData, 0, fileData.Length);
}
return true;
}
/// <summary>
/// Get the DateTime for a particular file index
/// </summary>
/// <param name="fileIndex">File index to check</param>
/// <returns>DateTime representing the file time, null on error</returns>
public DateTime? GetDateTime(int fileIndex)
{
// If we have an invalid file index
if (fileIndex < 0 || this.Model.Files == null || fileIndex >= this.Model.Files.Length)
return null;
// Get the file header
var file = this.Model.Files[fileIndex];
if (file == null)
return null;
// If we have an invalid DateTime
if (file.Date == 0 && file.Time == 0)
return null;
try
{
// Date property
int year = (file.Date >> 9) + 1980;
int month = (file.Date >> 5) & 0x0F;
int day = file.Date & 0x1F;
// Time property
int hour = file.Time >> 11;
int minute = (file.Time >> 5) & 0x3F;
int second = (file.Time << 1) & 0x3E;
return new DateTime(year, month, day, hour, minute, second);
}
catch
{
return DateTime.MinValue;
}
}
/// <summary>
/// Get the uncompressed data associated with a file
/// </summary>
/// <param name="fileIndex">File index to check</param>
/// <returns>Byte array representing the data, null on error</returns>
#if NET48
public byte[] GetFileData(int fileIndex)
#else
public byte[]? GetFileData(int fileIndex)
#endif
{
// If we have an invalid file index
if (fileIndex < 0 || this.Model.Files == null || fileIndex >= this.Model.Files.Length)
return null;
// Get the file header
var file = this.Model.Files[fileIndex];
if (file == null || file.FileSize == 0)
return null;
// Get the parent folder data
#if NET48
byte[] folderData = GetUncompressedData((int)file.FolderIndex);
#else
byte[]? folderData = GetUncompressedData((int)file.FolderIndex);
#endif
if (folderData == null || folderData.Length == 0)
return null;
// Create the output file data
byte[] fileData = new byte[file.FileSize];
if (folderData.Length < file.FolderStartOffset + file.FileSize)
return null;
// Get the segment that represents this file
Array.Copy(folderData, file.FolderStartOffset, fileData, 0, file.FileSize);
return fileData;
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.MicrosoftCabinet.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,107 +0,0 @@
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class N3DS : WrapperBase<SabreTools.Models.N3DS.Cart>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Nintendo 3DS Cart Image";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public N3DS(SabreTools.Models.N3DS.Cart model, byte[] data, int offset)
#else
public N3DS(SabreTools.Models.N3DS.Cart? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public N3DS(SabreTools.Models.N3DS.Cart model, Stream data)
#else
public N3DS(SabreTools.Models.N3DS.Cart? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a 3DS cart image from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A 3DS cart image wrapper on success, null on failure</returns>
#if NET48
public static N3DS Create(byte[] data, int offset)
#else
public static N3DS? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a 3DS cart image from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A 3DS cart image wrapper on success, null on failure</returns>
#if NET48
public static N3DS Create(Stream data)
#else
public static N3DS? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var archive = new SabreTools.Serialization.Streams.N3DS().Deserialize(data);
if (archive == null)
return null;
try
{
return new N3DS(archive, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.N3DS.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,107 +0,0 @@
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class NCF : WrapperBase<SabreTools.Models.NCF.File>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Half-Life No Cache File (NCF)";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public NCF(SabreTools.Models.NCF.File model, byte[] data, int offset)
#else
public NCF(SabreTools.Models.NCF.File? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public NCF(SabreTools.Models.NCF.File model, Stream data)
#else
public NCF(SabreTools.Models.NCF.File? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an NCF from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the NCF</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An NCF wrapper on success, null on failure</returns>
#if NET48
public static NCF Create(byte[] data, int offset)
#else
public static NCF? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a NCF from a Stream
/// </summary>
/// <param name="data">Stream representing the NCF</param>
/// <returns>An NCF wrapper on success, null on failure</returns>
#if NET48
public static NCF Create(Stream data)
#else
public static NCF? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var file = new SabreTools.Serialization.Streams.NCF().Deserialize(data);
if (file == null)
return null;
try
{
return new NCF(file, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.NCF.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,155 +0,0 @@
using System;
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class NewExecutable : WrapperBase<SabreTools.Models.NewExecutable.Executable>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "New Executable (NE)";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public NewExecutable(SabreTools.Models.NewExecutable.Executable model, byte[] data, int offset)
#else
public NewExecutable(SabreTools.Models.NewExecutable.Executable? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public NewExecutable(SabreTools.Models.NewExecutable.Executable model, Stream data)
#else
public NewExecutable(SabreTools.Models.NewExecutable.Executable? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an NE executable from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the executable</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An NE executable wrapper on success, null on failure</returns>
#if NET48
public static NewExecutable Create(byte[] data, int offset)
#else
public static NewExecutable? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create an NE executable from a Stream
/// </summary>
/// <param name="data">Stream representing the executable</param>
/// <returns>An NE executable wrapper on success, null on failure</returns>
#if NET48
public static NewExecutable Create(Stream data)
#else
public static NewExecutable? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var executable = new SabreTools.Serialization.Streams.NewExecutable().Deserialize(data);
if (executable == null)
return null;
try
{
return new NewExecutable(executable, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.NewExecutable.Print(builder, this.Model);
return builder;
}
#endregion
#region REMOVE -- DO NOT USE
/// <summary>
/// Read an arbitrary range from the source
/// </summary>
/// <param name="rangeStart">The start of where to read data from, -1 means start of source</param>
/// <param name="length">How many bytes to read, -1 means read until end</param>
/// <returns>Byte array representing the range, null on error</returns>
[Obsolete]
#if NET48
public byte[] ReadArbitraryRange(int rangeStart = -1, int length = -1)
#else
public byte[]? ReadArbitraryRange(int rangeStart = -1, int length = -1)
#endif
{
// If we have an unset range start, read from the start of the source
if (rangeStart == -1)
rangeStart = 0;
// If we have an unset length, read the whole source
if (length == -1)
{
switch (_dataSource)
{
case DataSource.ByteArray:
#if NET48
length = _byteArrayData.Length - _byteArrayOffset;
#else
length = _byteArrayData!.Length - _byteArrayOffset;
#endif
break;
case DataSource.Stream:
#if NET48
length = (int)_streamData.Length;
#else
length = (int)_streamData!.Length;
#endif
break;
}
}
return ReadFromDataSource(rangeStart, length);
}
#endregion
}
}

View File

@@ -1,107 +0,0 @@
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class Nitro : WrapperBase<SabreTools.Models.Nitro.Cart>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Nintendo DS/DSi Cart Image";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public Nitro(SabreTools.Models.Nitro.Cart model, byte[] data, int offset)
#else
public Nitro(SabreTools.Models.Nitro.Cart? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public Nitro(SabreTools.Models.Nitro.Cart model, Stream data)
#else
public Nitro(SabreTools.Models.Nitro.Cart? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a NDS cart image from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A NDS cart image wrapper on success, null on failure</returns>
#if NET48
public static Nitro Create(byte[] data, int offset)
#else
public static Nitro? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a NDS cart image from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A NDS cart image wrapper on success, null on failure</returns>
#if NET48
public static Nitro Create(Stream data)
#else
public static Nitro? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var archive = new SabreTools.Serialization.Streams.Nitro().Deserialize(data);
if (archive == null)
return null;
try
{
return new Nitro(archive, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.Nitro.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,202 +0,0 @@
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class PAK : WrapperBase<SabreTools.Models.PAK.File>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Half-Life Package File (PAK)";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public PAK(SabreTools.Models.PAK.File model, byte[] data, int offset)
#else
public PAK(SabreTools.Models.PAK.File? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public PAK(SabreTools.Models.PAK.File model, Stream data)
#else
public PAK(SabreTools.Models.PAK.File? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a PAK from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the PAK</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A PAK wrapper on success, null on failure</returns>
#if NET48
public static PAK Create(byte[] data, int offset)
#else
public static PAK? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a PAK from a Stream
/// </summary>
/// <param name="data">Stream representing the PAK</param>
/// <returns>A PAK wrapper on success, null on failure</returns>
#if NET48
public static PAK Create(Stream data)
#else
public static PAK? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var file = new SabreTools.Serialization.Streams.PAK().Deserialize(data);
if (file == null)
return null;
try
{
return new PAK(file, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.PAK.Print(builder, this.Model);
return builder;
}
#endregion
#region Extraction
/// <summary>
/// Extract all files from the PAK to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// If we have no directory items
if (this.Model.DirectoryItems == null || this.Model.DirectoryItems.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < this.Model.DirectoryItems.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the PAK to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// If we have no directory items
if (this.Model.DirectoryItems == null || this.Model.DirectoryItems.Length == 0)
return false;
// If the directory item index is invalid
if (index < 0 || index >= this.Model.DirectoryItems.Length)
return false;
// Get the directory item
var directoryItem = this.Model.DirectoryItems[index];
if (directoryItem == null)
return false;
// Read the item data
#if NET48
byte[] data = ReadFromDataSource((int)directoryItem.ItemOffset, (int)directoryItem.ItemLength);
#else
byte[]? data = ReadFromDataSource((int)directoryItem.ItemOffset, (int)directoryItem.ItemLength);
#endif
if (data == null)
return false;
// Create the filename
#if NET48
string filename = directoryItem.ItemName;
#else
string? filename = directoryItem.ItemName;
#endif
// If we have an invalid output directory
if (string.IsNullOrWhiteSpace(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename ?? $"file{index}");
// Ensure the output directory is created
#if NET48
string directoryName = Path.GetDirectoryName(filename);
#else
string? directoryName = Path.GetDirectoryName(filename);
#endif
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using (Stream fs = File.OpenWrite(filename))
{
fs.Write(data, 0, data.Length);
}
}
catch
{
return false;
}
return true;
}
#endregion
}
}

View File

@@ -1,185 +0,0 @@
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class PFF : WrapperBase<SabreTools.Models.PFF.Archive>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "NovaLogic Game Archive Format (PFF)";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public PFF(SabreTools.Models.PFF.Archive model, byte[] data, int offset)
#else
public PFF(SabreTools.Models.PFF.Archive? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public PFF(SabreTools.Models.PFF.Archive model, Stream data)
#else
public PFF(SabreTools.Models.PFF.Archive? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}/// <summary>
/// Create a PFF archive from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A PFF archive wrapper on success, null on failure</returns>
#if NET48
public static PFF Create(byte[] data, int offset)
#else
public static PFF? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a PFF archive from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A PFF archive wrapper on success, null on failure</returns>
#if NET48
public static PFF Create(Stream data)
#else
public static PFF? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var archive = new SabreTools.Serialization.Streams.PFF().Deserialize(data);
if (archive == null)
return null;
try
{
return new PFF(archive, data);
}
catch
{
return null;
}
}
#endregion
#region Data
/// <summary>
/// Extract all segments from the PFF to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all segments extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// If we have no segments
if (this.Model.Segments == null || this.Model.Segments.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < this.Model.Segments.Length; i++)
{
allExtracted &= ExtractSegment(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a segment from the PFF to an output directory by index
/// </summary>
/// <param name="index">Segment index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the segment extracted, false otherwise</returns>
public bool ExtractSegment(int index, string outputDirectory)
{
// If we have no segments
if (this.Model.Header?.NumberOfFiles == null || this.Model.Header.NumberOfFiles == 0 || this.Model.Segments == null || this.Model.Segments.Length == 0)
return false;
// If we have an invalid index
if (index < 0 || index >= this.Model.Segments.Length)
return false;
// Get the segment information
var file = this.Model.Segments[index];
if (file == null)
return false;
// Get the read index and length
int offset = (int)file.FileLocation;
int size = (int)file.FileSize;
try
{
// Ensure the output directory exists
Directory.CreateDirectory(outputDirectory);
// Create the output path
string filePath = Path.Combine(outputDirectory, file.FileName ?? $"file{index}");
using (FileStream fs = File.OpenWrite(filePath))
{
// Read the data block
#if NET48
byte[] data = ReadFromDataSource(offset, size);
#else
byte[]? data = ReadFromDataSource(offset, size);
#endif
if (data == null)
return false;
// Write the data -- TODO: Compressed data?
fs.Write(data, 0, size);
}
return true;
}
catch
{
return false;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.PFF.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,107 +0,0 @@
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class PlayJAudioFile : WrapperBase<SabreTools.Models.PlayJ.AudioFile>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "PlayJ Audio File (PLJ)";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public PlayJAudioFile(SabreTools.Models.PlayJ.AudioFile model, byte[] data, int offset)
#else
public PlayJAudioFile(SabreTools.Models.PlayJ.AudioFile? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public PlayJAudioFile(SabreTools.Models.PlayJ.AudioFile model, Stream data)
#else
public PlayJAudioFile(SabreTools.Models.PlayJ.AudioFile? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a PlayJ audio file from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A PlayJ audio file wrapper on success, null on failure</returns>
#if NET48
public static PlayJAudioFile Create(byte[] data, int offset)
#else
public static PlayJAudioFile? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a PlayJ audio file from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A PlayJ audio file wrapper on success, null on failure</returns>
#if NET48
public static PlayJAudioFile Create(Stream data)
#else
public static PlayJAudioFile? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var audioFile = new SabreTools.Serialization.Streams.PlayJAudio().Deserialize(data);
if (audioFile == null)
return null;
try
{
return new PlayJAudioFile(audioFile, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.PlayJAudioFile.Print(builder, this.Model);
return builder;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,298 @@
using System.Text;
using SabreTools.Serialization.Interfaces;
using SabreTools.Serialization.Wrappers;
namespace BinaryObjectScanner.Wrappers
{
/// <summary>
/// Extensions to allow for pretty printing
/// </summary>
public static class PrintExtensions
{
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
public static StringBuilder PrettyPrint(this IWrapper wrapper)
{
switch (wrapper)
{
case AACSMediaKeyBlock item: return item.PrettyPrint();
case BDPlusSVM item: return item.PrettyPrint();
case BFPK item: return item.PrettyPrint();
case BSP item: return item.PrettyPrint();
case CFB item: return item.PrettyPrint();
case CIA item: return item.PrettyPrint();
case GCF item: return item.PrettyPrint();
case InstallShieldCabinet item: return item.PrettyPrint();
case LinearExecutable item: return item.PrettyPrint();
case MicrosoftCabinet item: return item.PrettyPrint();
case MSDOS item: return item.PrettyPrint();
case N3DS item: return item.PrettyPrint();
case NCF item: return item.PrettyPrint();
case NewExecutable item: return item.PrettyPrint();
case Nitro item: return item.PrettyPrint();
case PAK item: return item.PrettyPrint();
case PFF item: return item.PrettyPrint();
case PlayJAudioFile item: return item.PrettyPrint();
case PortableExecutable item: return item.PrettyPrint();
case Quantum item: return item.PrettyPrint();
case SGA item: return item.PrettyPrint();
case VBSP item: return item.PrettyPrint();
case VPK item: return item.PrettyPrint();
case WAD item: return item.PrettyPrint();
case XZP item: return item.PrettyPrint();
default: return new StringBuilder();
}
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this AACSMediaKeyBlock item)
{
StringBuilder builder = new StringBuilder();
Printing.AACSMediaKeyBlock.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this BDPlusSVM item)
{
StringBuilder builder = new StringBuilder();
Printing.BDPlusSVM.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this BFPK item)
{
StringBuilder builder = new StringBuilder();
Printing.BFPK.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this BSP item)
{
StringBuilder builder = new StringBuilder();
Printing.BSP.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this CFB item)
{
StringBuilder builder = new StringBuilder();
Printing.CFB.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this CIA item)
{
StringBuilder builder = new StringBuilder();
Printing.CIA.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this GCF item)
{
StringBuilder builder = new StringBuilder();
Printing.GCF.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this InstallShieldCabinet item)
{
StringBuilder builder = new StringBuilder();
Printing.InstallShieldCabinet.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this LinearExecutable item)
{
StringBuilder builder = new StringBuilder();
Printing.LinearExecutable.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this MicrosoftCabinet item)
{
StringBuilder builder = new StringBuilder();
Printing.MicrosoftCabinet.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this MSDOS item)
{
StringBuilder builder = new StringBuilder();
Printing.MSDOS.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this N3DS item)
{
StringBuilder builder = new StringBuilder();
Printing.N3DS.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this NCF item)
{
StringBuilder builder = new StringBuilder();
Printing.NCF.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this NewExecutable item)
{
StringBuilder builder = new StringBuilder();
Printing.NewExecutable.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Nitro item)
{
StringBuilder builder = new StringBuilder();
Printing.Nitro.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this PAK item)
{
StringBuilder builder = new StringBuilder();
Printing.PAK.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this PFF item)
{
StringBuilder builder = new StringBuilder();
Printing.PFF.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this PlayJAudioFile item)
{
StringBuilder builder = new StringBuilder();
Printing.PlayJAudioFile.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this PortableExecutable item)
{
StringBuilder builder = new StringBuilder();
Printing.PortableExecutable.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Quantum item)
{
StringBuilder builder = new StringBuilder();
Printing.Quantum.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this SGA item)
{
StringBuilder builder = new StringBuilder();
Printing.SGA.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this VBSP item)
{
StringBuilder builder = new StringBuilder();
Printing.VBSP.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this VPK item)
{
StringBuilder builder = new StringBuilder();
Printing.VPK.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this WAD item)
{
StringBuilder builder = new StringBuilder();
Printing.WAD.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this XZP item)
{
StringBuilder builder = new StringBuilder();
Printing.XZP.Print(builder, item.Model);
return builder;
}
}
}

View File

@@ -1,223 +0,0 @@
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class Quantum : WrapperBase<SabreTools.Models.Quantum.Archive>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Quantum Archive";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public Quantum(SabreTools.Models.Quantum.Archive model, byte[] data, int offset)
#else
public Quantum(SabreTools.Models.Quantum.Archive? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public Quantum(SabreTools.Models.Quantum.Archive model, Stream data)
#else
public Quantum(SabreTools.Models.Quantum.Archive? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a Quantum archive from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A Quantum archive wrapper on success, null on failure</returns>
#if NET48
public static Quantum Create(byte[] data, int offset)
#else
public static Quantum? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a Quantum archive from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A Quantum archive wrapper on success, null on failure</returns>
#if NET48
public static Quantum Create(Stream data)
#else
public static Quantum? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var archive = new SabreTools.Serialization.Streams.Quantum().Deserialize(data);
if (archive == null)
return null;
try
{
return new Quantum(archive, data);
}
catch
{
return null;
}
}
#endregion
#region Data
/// <summary>
/// Extract all files from the Quantum archive to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// If we have no files
if (this.Model.FileList == null || this.Model.FileList.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < this.Model.FileList.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the Quantum archive to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// If we have no files
if (this.Model.Header == null || this.Model.Header.FileCount == 0 || this.Model.FileList == null || this.Model.FileList.Length == 0)
return false;
// If we have an invalid index
if (index < 0 || index >= this.Model.FileList.Length)
return false;
// Get the file information
var fileDescriptor = this.Model.FileList[index];
// Read the entire compressed data
int compressedDataOffset = (int)this.Model.CompressedDataOffset;
int compressedDataLength = GetEndOfFile() - compressedDataOffset;
#if NET48
byte[] compressedData = ReadFromDataSource(compressedDataOffset, compressedDataLength);
#else
byte[]? compressedData = ReadFromDataSource(compressedDataOffset, compressedDataLength);
#endif
// TODO: Figure out decompression
// - Single-file archives seem to work
// - Single-file archives with files that span a window boundary seem to work
// - The first files in each archive seem to work
return false;
// // Setup the decompression state
// State state = new State();
// Decompressor.InitState(state, TableSize, CompressionFlags);
// // Decompress the entire array
// int decompressedDataLength = (int)FileList.Sum(fd => fd.ExpandedFileSize);
// byte[] decompressedData = new byte[decompressedDataLength];
// Decompressor.Decompress(state, compressedData.Length, compressedData, decompressedData.Length, decompressedData);
// // Read the data
// int offset = (int)FileList.Take(index).Sum(fd => fd.ExpandedFileSize);
// byte[] data = new byte[fileDescriptor.ExpandedFileSize];
// Array.Copy(decompressedData, offset, data, 0, data.Length);
// // Loop through all files before the current
// for (int i = 0; i < index; i++)
// {
// // Decompress the next block of data
// byte[] tempData = new byte[FileList[i].ExpandedFileSize];
// int lastRead = Decompressor.Decompress(state, compressedData.Length, compressedData, tempData.Length, tempData);
// compressedData = new ReadOnlySpan<byte>(compressedData, (lastRead), compressedData.Length - (lastRead)).ToArray();
// }
// // Read the data
// byte[] data = new byte[fileDescriptor.ExpandedFileSize];
// _ = Decompressor.Decompress(state, compressedData.Length, compressedData, data.Length, data);
// // Create the filename
// string filename = fileDescriptor.FileName;
// // If we have an invalid output directory
// if (string.IsNullOrWhiteSpace(outputDirectory))
// return false;
// // Create the full output path
// filename = Path.Combine(outputDirectory, filename);
// // Ensure the output directory is created
// Directory.CreateDirectory(Path.GetDirectoryName(filename));
// // Try to write the data
// try
// {
// // Open the output file for writing
// using (Stream fs = File.OpenWrite(filename))
// {
// fs.Write(data, 0, data.Length);
// }
// }
// catch
// {
// return false;
// }
return true;
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.Quantum.Print(builder, this.Model);
return builder;
}
#endregion
}
}

View File

@@ -1,352 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using ICSharpCode.SharpZipLib.Zip.Compression;
namespace BinaryObjectScanner.Wrappers
{
public class SGA : WrapperBase<SabreTools.Models.SGA.File>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "SGA";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public SGA(SabreTools.Models.SGA.File model, byte[] data, int offset)
#else
public SGA(SabreTools.Models.SGA.File? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public SGA(SabreTools.Models.SGA.File model, Stream data)
#else
public SGA(SabreTools.Models.SGA.File? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an SGA from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the SGA</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An SGA wrapper on success, null on failure</returns>
#if NET48
public static SGA Create(byte[] data, int offset)
#else
public static SGA? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a SGA from a Stream
/// </summary>
/// <param name="data">Stream representing the SGA</param>
/// <returns>An SGA wrapper on success, null on failure</returns>
#if NET48
public static SGA Create(Stream data)
#else
public static SGA? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var file = new SabreTools.Serialization.Streams.SGA().Deserialize(data);
if (file == null)
return null;
try
{
return new SGA(file, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.SGA.Print(builder, this.Model);
return builder;
}
#endregion
#region Extraction
/// <summary>
/// Extract all files from the SGA to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// Get the number of files
int filesLength;
switch (this.Model.Header?.MajorVersion)
{
case 4: filesLength = (Model.Directory as SabreTools.Models.SGA.Directory4)?.Files?.Length ?? 0; break;
case 5: filesLength = (Model.Directory as SabreTools.Models.SGA.Directory5)?.Files?.Length ?? 0; break;
case 6: filesLength = (Model.Directory as SabreTools.Models.SGA.Directory6)?.Files?.Length ?? 0; break;
case 7: filesLength = (Model.Directory as SabreTools.Models.SGA.Directory7)?.Files?.Length ?? 0; break;
default: return false;
}
// If we have no files
if (filesLength == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < filesLength; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the SGA to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// Get the number of files
int filesLength;
switch (this.Model.Header?.MajorVersion)
{
case 4: filesLength = (Model.Directory as SabreTools.Models.SGA.Directory4)?.Files?.Length ?? 0; break;
case 5: filesLength = (Model.Directory as SabreTools.Models.SGA.Directory5)?.Files?.Length ?? 0; break;
case 6: filesLength = (Model.Directory as SabreTools.Models.SGA.Directory6)?.Files?.Length ?? 0; break;
case 7: filesLength = (Model.Directory as SabreTools.Models.SGA.Directory7)?.Files?.Length ?? 0; break;
default: return false;
}
// If we have no files
if (filesLength == 0)
return false;
// If the files index is invalid
if (index < 0 || index >= filesLength)
return false;
// Get the files
#if NET48
object file;
#else
object? file;
#endif
switch (this.Model.Header?.MajorVersion)
{
case 4: file = (Model.Directory as SabreTools.Models.SGA.Directory4)?.Files?[index]; break;
case 5: file = (Model.Directory as SabreTools.Models.SGA.Directory5)?.Files?[index]; break;
case 6: file = (Model.Directory as SabreTools.Models.SGA.Directory6)?.Files?[index]; break;
case 7: file = (Model.Directory as SabreTools.Models.SGA.Directory7)?.Files?[index]; break;
default: return false;
}
if (file == null)
return false;
// Create the filename
#if NET48
string filename;
#else
string? filename;
#endif
switch (this.Model.Header?.MajorVersion)
{
case 4:
case 5: filename = (file as SabreTools.Models.SGA.File4)?.Name; break;
case 6: filename = (file as SabreTools.Models.SGA.File6)?.Name; break;
case 7: filename = (file as SabreTools.Models.SGA.File7)?.Name; break;
default: return false;
}
// Loop through and get all parent directories
#if NET48
var parentNames = new List<string> { filename };
#else
var parentNames = new List<string?> { filename };
#endif
// Get the parent directory
#if NET48
object folder;
#else
object? folder;
#endif
switch (this.Model.Header?.MajorVersion)
{
#if NET48
case 4: folder = (Model.Directory as SabreTools.Models.SGA.Directory4)?.Folders?.FirstOrDefault(f => index >= f.FileStartIndex && index <= f.FileEndIndex); break;
case 5: folder = (Model.Directory as SabreTools.Models.SGA.Directory5)?.Folders?.FirstOrDefault(f => index >= f.FileStartIndex && index <= f.FileEndIndex); break;
case 6: folder = (Model.Directory as SabreTools.Models.SGA.Directory6)?.Folders?.FirstOrDefault(f => index >= f.FileStartIndex && index <= f.FileEndIndex); break;
case 7: folder = (Model.Directory as SabreTools.Models.SGA.Directory7)?.Folders?.FirstOrDefault(f => index >= f.FileStartIndex && index <= f.FileEndIndex); break;
#else
case 4: folder = (Model.Directory as SabreTools.Models.SGA.Directory4)?.Folders?.FirstOrDefault(f => f != null && index >= f.FileStartIndex && index <= f.FileEndIndex); break;
case 5: folder = (Model.Directory as SabreTools.Models.SGA.Directory5)?.Folders?.FirstOrDefault(f => f != null && index >= f.FileStartIndex && index <= f.FileEndIndex); break;
case 6: folder = (Model.Directory as SabreTools.Models.SGA.Directory6)?.Folders?.FirstOrDefault(f => f != null && index >= f.FileStartIndex && index <= f.FileEndIndex); break;
case 7: folder = (Model.Directory as SabreTools.Models.SGA.Directory7)?.Folders?.FirstOrDefault(f => f != null && index >= f.FileStartIndex && index <= f.FileEndIndex); break;
#endif
default: return false;
}
// If we have a parent folder
if (folder != null)
{
switch (this.Model.Header?.MajorVersion)
{
case 4: parentNames.Add((folder as SabreTools.Models.SGA.Folder4)?.Name); break;
case 5:
case 6:
case 7: parentNames.Add((folder as SabreTools.Models.SGA.Folder5)?.Name); break;
default: return false;
}
}
// TODO: Should the section name/alias be used in the path as well?
// Reverse and assemble the filename
parentNames.Reverse();
filename = Path.Combine(parentNames.Cast<string>().ToArray());
// Get the file offset
long fileOffset;
switch (this.Model.Header?.MajorVersion)
{
case 4:
case 5: fileOffset = (file as SabreTools.Models.SGA.File4)?.Offset ?? 0; break;
case 6: fileOffset = (file as SabreTools.Models.SGA.File6)?.Offset ?? 0; break;
case 7: fileOffset = (file as SabreTools.Models.SGA.File7)?.Offset ?? 0; break;
default: return false;
}
// Adjust the file offset
switch (this.Model.Header?.MajorVersion)
{
case 4: fileOffset += (Model.Header as SabreTools.Models.SGA.Header4)?.FileDataOffset ?? 0; break;
case 5: fileOffset += (Model.Header as SabreTools.Models.SGA.Header4)?.FileDataOffset ?? 0; break;
case 6: fileOffset += (Model.Header as SabreTools.Models.SGA.Header6)?.FileDataOffset ?? 0; break;
case 7: fileOffset += (Model.Header as SabreTools.Models.SGA.Header6)?.FileDataOffset ?? 0; break;
default: return false;
};
// Get the file sizes
long fileSize, outputFileSize;
switch (this.Model.Header?.MajorVersion)
{
case 4:
case 5:
fileSize = (file as SabreTools.Models.SGA.File4)?.SizeOnDisk ?? 0;
outputFileSize = (file as SabreTools.Models.SGA.File4)?.Size ?? 0;
break;
case 6:
fileSize = (file as SabreTools.Models.SGA.File6)?.SizeOnDisk ?? 0;
outputFileSize = (file as SabreTools.Models.SGA.File6)?.Size ?? 0;
break;
case 7:
fileSize = (file as SabreTools.Models.SGA.File7)?.SizeOnDisk ?? 0;
outputFileSize = (file as SabreTools.Models.SGA.File7)?.Size ?? 0;
break;
default: return false;
}
// Read the compressed data directly
#if NET48
byte[] compressedData = ReadFromDataSource((int)fileOffset, (int)fileSize);
#else
byte[]? compressedData = ReadFromDataSource((int)fileOffset, (int)fileSize);
#endif
if (compressedData == null)
return false;
// If the compressed and uncompressed sizes match
byte[] data;
if (fileSize == outputFileSize)
{
data = compressedData;
}
else
{
// Decompress the data
data = new byte[outputFileSize];
Inflater inflater = new Inflater();
inflater.SetInput(compressedData);
inflater.Inflate(data);
}
// If we have an invalid output directory
if (string.IsNullOrWhiteSpace(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
#if NET48
string directoryName = Path.GetDirectoryName(filename);
#else
string? directoryName = Path.GetDirectoryName(filename);
#endif
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using (Stream fs = File.OpenWrite(filename))
{
fs.Write(data, 0, data.Length);
}
}
catch
{
return false;
}
return false;
}
#endregion
}
}

View File

@@ -1,208 +0,0 @@
using System.IO;
using System.Text;
using static SabreTools.Models.VBSP.Constants;
namespace BinaryObjectScanner.Wrappers
{
public class VBSP : WrapperBase<SabreTools.Models.VBSP.File>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Half-Life 2 Level (VBSP)";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public VBSP(SabreTools.Models.VBSP.File model, byte[] data, int offset)
#else
public VBSP(SabreTools.Models.VBSP.File? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public VBSP(SabreTools.Models.VBSP.File model, Stream data)
#else
public VBSP(SabreTools.Models.VBSP.File? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a VBSP from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the VBSP</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A VBSP wrapper on success, null on failure</returns>
#if NET48
public static VBSP Create(byte[] data, int offset)
#else
public static VBSP? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a VBSP from a Stream
/// </summary>
/// <param name="data">Stream representing the VBSP</param>
/// <returns>An VBSP wrapper on success, null on failure</returns>
#if NET48
public static VBSP Create(Stream data)
#else
public static VBSP? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var file = new SabreTools.Serialization.Streams.VBSP().Deserialize(data);
if (file == null)
return null;
try
{
return new VBSP(file, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.VBSP.Print(builder, this.Model);
return builder;
}
#endregion
#region Extraction
/// <summary>
/// Extract all lumps from the VBSP to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all lumps extracted, false otherwise</returns>
public bool ExtractAllLumps(string outputDirectory)
{
// If we have no lumps
if (this.Model.Header?.Lumps == null || this.Model.Header.Lumps.Length == 0)
return false;
// Loop through and extract all lumps to the output
bool allExtracted = true;
for (int i = 0; i < this.Model.Header.Lumps.Length; i++)
{
allExtracted &= ExtractLump(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a lump from the VBSP to an output directory by index
/// </summary>
/// <param name="index">Lump index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the lump extracted, false otherwise</returns>
public bool ExtractLump(int index, string outputDirectory)
{
// If we have no lumps
if (this.Model.Header?.Lumps == null || this.Model.Header.Lumps.Length == 0)
return false;
// If the lumps index is invalid
if (index < 0 || index >= this.Model.Header.Lumps.Length)
return false;
// Get the lump
var lump = this.Model.Header.Lumps[index];
if (lump == null)
return false;
// Read the data
#if NET48
byte[] data = ReadFromDataSource((int)lump.Offset, (int)lump.Length);
#else
byte[]? data = ReadFromDataSource((int)lump.Offset, (int)lump.Length);
#endif
if (data == null)
return false;
// Create the filename
string filename = $"lump_{index}.bin";
switch (index)
{
case HL_VBSP_LUMP_ENTITIES:
filename = "entities.ent";
break;
case HL_VBSP_LUMP_PAKFILE:
filename = "pakfile.zip";
break;
}
// If we have an invalid output directory
if (string.IsNullOrWhiteSpace(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
#if NET48
string directoryName = Path.GetDirectoryName(filename);
#else
string? directoryName = Path.GetDirectoryName(filename);
#endif
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using (Stream fs = File.OpenWrite(filename))
{
fs.Write(data, 0, data.Length);
}
}
catch
{
return false;
}
return true;
}
#endregion
}
}

View File

@@ -1,338 +0,0 @@
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO;
using static SabreTools.Models.VPK.Constants;
namespace BinaryObjectScanner.Wrappers
{
public class VPK : WrapperBase<SabreTools.Models.VPK.File>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Valve Package File (VPK)";
#endregion
#region Extension Properties
/// <summary>
/// Array of archive filenames attached to the given VPK
/// </summary>
#if NET48
public string[] ArchiveFilenames
#else
public string[]? ArchiveFilenames
#endif
{
get
{
// Use the cached value if we have it
if (_archiveFilenames != null)
return _archiveFilenames;
// If we don't have a source filename
if (!(_streamData is FileStream fs) || string.IsNullOrWhiteSpace(fs.Name))
return null;
// If the filename is not the right format
string extension = Path.GetExtension(fs.Name).TrimStart('.');
#if NET48
string directoryName = Path.GetDirectoryName(fs.Name);
#else
string? directoryName = Path.GetDirectoryName(fs.Name);
#endif
string fileName = directoryName == null
? Path.GetFileNameWithoutExtension(fs.Name)
: Path.Combine(directoryName, Path.GetFileNameWithoutExtension(fs.Name));
if (fileName.Length < 3)
return null;
else if (fileName.Substring(fileName.Length - 3) != "dir")
return null;
// Get the archive count
int archiveCount = this.Model.DirectoryItems == null
? 0
: this.Model.DirectoryItems
.Select(di => di?.DirectoryEntry)
.Select(de => de?.ArchiveIndex ?? 0)
.Where(ai => ai != HL_VPK_NO_ARCHIVE)
.Max();
// Build the list of archive filenames to populate
_archiveFilenames = new string[archiveCount];
// Loop through and create the archive filenames
for (int i = 0; i < archiveCount; i++)
{
// We need 5 digits to print a short, but we already have 3 for dir.
string archiveFileName = $"{fileName.Substring(0, fileName.Length - 3)}{i.ToString().PadLeft(3, '0')}.{extension}";
_archiveFilenames[i] = archiveFileName;
}
// Return the array
return _archiveFilenames;
}
}
#endregion
#region Instance Variables
/// <summary>
/// Array of archive filenames attached to the given VPK
/// </summary>
#if NET48
private string[] _archiveFilenames = null;
#else
private string[]? _archiveFilenames = null;
#endif
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public VPK(SabreTools.Models.VPK.File model, byte[] data, int offset)
#else
public VPK(SabreTools.Models.VPK.File? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public VPK(SabreTools.Models.VPK.File model, Stream data)
#else
public VPK(SabreTools.Models.VPK.File? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a VPK from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the VPK</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A VPK wrapper on success, null on failure</returns>
#if NET48
public static VPK Create(byte[] data, int offset)
#else
public static VPK? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a VPK from a Stream
/// </summary>
/// <param name="data">Stream representing the VPK</param>
/// <returns>A VPK wrapper on success, null on failure</returns>
#if NET48
public static VPK Create(Stream data)
#else
public static VPK? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var file = new SabreTools.Serialization.Streams.VPK().Deserialize(data);
if (file == null)
return null;
try
{
return new VPK(file, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.VPK.Print(builder, this.Model);
return builder;
}
#endregion
#region Extraction
/// <summary>
/// Extract all files from the VPK to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// If we have no directory items
if (this.Model.DirectoryItems == null || this.Model.DirectoryItems.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < this.Model.DirectoryItems.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the VPK to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// If we have no directory items
if (this.Model.DirectoryItems == null || this.Model.DirectoryItems.Length == 0)
return false;
// If the directory item index is invalid
if (index < 0 || index >= this.Model.DirectoryItems.Length)
return false;
// Get the directory item
var directoryItem = this.Model.DirectoryItems[index];
if (directoryItem?.DirectoryEntry == null)
return false;
// If we have an item with no archive
#if NET48
byte[] data;
#else
byte[]? data;
#endif
if (directoryItem.DirectoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE)
{
if (directoryItem.PreloadData == null)
return false;
data = directoryItem.PreloadData;
}
else
{
// If we have invalid archives
if (ArchiveFilenames == null || ArchiveFilenames.Length == 0)
return false;
// If we have an invalid index
if (directoryItem.DirectoryEntry.ArchiveIndex < 0 || directoryItem.DirectoryEntry.ArchiveIndex >= ArchiveFilenames.Length)
return false;
// Get the archive filename
string archiveFileName = ArchiveFilenames[directoryItem.DirectoryEntry.ArchiveIndex];
if (string.IsNullOrWhiteSpace(archiveFileName))
return false;
// If the archive doesn't exist
if (!File.Exists(archiveFileName))
return false;
// Try to open the archive
#if NET48
Stream archiveStream = null;
#else
Stream? archiveStream = null;
#endif
try
{
// Open the archive
archiveStream = File.OpenRead(archiveFileName);
// Seek to the data
archiveStream.Seek(directoryItem.DirectoryEntry.EntryOffset, SeekOrigin.Begin);
// Read the directory item bytes
data = archiveStream.ReadBytes((int)directoryItem.DirectoryEntry.EntryLength);
}
catch
{
return false;
}
finally
{
archiveStream?.Close();
}
// If we have preload data, prepend it
if (data != null && directoryItem.PreloadData != null)
data = directoryItem.PreloadData.Concat(data).ToArray();
}
// If there is nothing to write out
if (data == null)
return false;
// Create the filename
string filename = $"{directoryItem.Name}.{directoryItem.Extension}";
if (!string.IsNullOrWhiteSpace(directoryItem.Path))
filename = Path.Combine(directoryItem.Path, filename);
// If we have an invalid output directory
if (string.IsNullOrWhiteSpace(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
#if NET48
string directoryName = Path.GetDirectoryName(filename);
#else
string? directoryName = Path.GetDirectoryName(filename);
#endif
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using (Stream fs = File.OpenWrite(filename))
{
fs.Write(data, 0, data.Length);
}
}
catch
{
return false;
}
return true;
}
#endregion
}
}

View File

@@ -1,198 +0,0 @@
using System.IO;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class WAD : WrapperBase<SabreTools.Models.WAD.File>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Half-Life Texture Package File (WAD)";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public WAD(SabreTools.Models.WAD.File model, byte[] data, int offset)
#else
public WAD(SabreTools.Models.WAD.File? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public WAD(SabreTools.Models.WAD.File model, Stream data)
#else
public WAD(SabreTools.Models.WAD.File? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a WAD from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the WAD</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A WAD wrapper on success, null on failure</returns>
#if NET48
public static WAD Create(byte[] data, int offset)
#else
public static WAD? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a WAD from a Stream
/// </summary>
/// <param name="data">Stream representing the WAD</param>
/// <returns>An WAD wrapper on success, null on failure</returns>
#if NET48
public static WAD Create(Stream data)
#else
public static WAD? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var file = new SabreTools.Serialization.Streams.WAD().Deserialize(data);
if (file == null)
return null;
try
{
return new WAD(file, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.WAD.Print(builder, this.Model);
return builder;
}
#endregion
#region Extraction
/// <summary>
/// Extract all lumps from the WAD to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all lumps extracted, false otherwise</returns>
public bool ExtractAllLumps(string outputDirectory)
{
// If we have no lumps
if (this.Model.Lumps == null || this.Model.Lumps.Length == 0)
return false;
// Loop through and extract all lumps to the output
bool allExtracted = true;
for (int i = 0; i < this.Model.Lumps.Length; i++)
{
allExtracted &= ExtractLump(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a lump from the WAD to an output directory by index
/// </summary>
/// <param name="index">Lump index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the lump extracted, false otherwise</returns>
public bool ExtractLump(int index, string outputDirectory)
{
// If we have no lumps
if (this.Model.Lumps == null || this.Model.Lumps.Length == 0)
return false;
// If the lumps index is invalid
if (index < 0 || index >= this.Model.Lumps.Length)
return false;
// Get the lump
var lump = this.Model.Lumps[index];
if (lump == null)
return false;
// Read the data -- TODO: Handle uncompressed lumps (see BSP.ExtractTexture)
#if NET48
byte[] data = ReadFromDataSource((int)lump.Offset, (int)lump.Length);
#else
byte[]? data = ReadFromDataSource((int)lump.Offset, (int)lump.Length);
#endif
if (data == null)
return false;
// Create the filename
string filename = $"{lump.Name}.lmp";
// If we have an invalid output directory
if (string.IsNullOrWhiteSpace(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
#if NET48
string directoryName = Path.GetDirectoryName(filename);
#else
string? directoryName = Path.GetDirectoryName(filename);
#endif
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using (Stream fs = File.OpenWrite(filename))
{
fs.Write(data, 0, data.Length);
}
}
catch
{
return false;
}
return true;
}
#endregion
}
}

View File

@@ -1,419 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO;
namespace BinaryObjectScanner.Wrappers
{
public abstract class WrapperBase<T> : IWrapper
{
#region Descriptive Properties
/// <inheritdoc/>
public string Description() => DescriptionString;
/// <summary>
/// Description of the object
/// </summary>
public abstract string DescriptionString { get; }
#endregion
#region Properties
/// <summary>
/// Internal model
/// </summary>
#if NET48
public T Model { get; private set; }
#else
public T Model { get; init; }
#endif
#endregion
#region Instance Variables
/// <summary>
/// Source of the original data
/// </summary>
protected DataSource _dataSource = DataSource.UNKNOWN;
/// <summary>
/// Source byte array data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
#if NET48
protected byte[] _byteArrayData = null;
#else
protected byte[]? _byteArrayData = null;
#endif
/// <summary>
/// Source byte array data offset
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected int _byteArrayOffset = -1;
/// <summary>
/// Source Stream data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.Stream"/></remarks>
#if NET48
protected Stream _streamData = null;
#else
protected Stream? _streamData = null;
#endif
#if NET6_0_OR_GREATER
/// <summary>
/// JSON serializer options for output printing
/// </summary>
protected System.Text.Json.JsonSerializerOptions _jsonSerializerOptions
{
get
{
var serializer = new System.Text.Json.JsonSerializerOptions { IncludeFields = true, WriteIndented = true };
serializer.Converters.Add(new ConcreteAbstractSerializer());
serializer.Converters.Add(new ConcreteInterfaceSerializer());
serializer.Converters.Add(new System.Text.Json.Serialization.JsonStringEnumConverter());
return serializer;
}
}
#endif
#endregion
#region Constructors
/// <summary>
/// Construct a new instance of the wrapper from a byte array
/// </summary>
#if NET48
protected WrapperBase(T model, byte[] data, int offset)
#else
protected WrapperBase(T? model, byte[]? data, int offset)
#endif
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (offset < 0 || offset >= data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
this.Model = model;
_dataSource = DataSource.ByteArray;
_byteArrayData = data;
_byteArrayOffset = offset;
}
/// <summary>
/// Construct a new instance of the wrapper from a Stream
/// </summary>
#if NET48
protected WrapperBase(T model, Stream data)
#else
protected WrapperBase(T? model, Stream? data)
#endif
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (data.Length == 0 || !data.CanSeek || !data.CanRead)
throw new ArgumentOutOfRangeException(nameof(data));
this.Model = model;
_dataSource = DataSource.Stream;
_streamData = data;
}
#endregion
#region Data
/// <summary>
/// Validate the backing data source
/// </summary>
/// <returns>True if the data source is valid, false otherwise</returns>
protected bool DataSourceIsValid()
{
switch (_dataSource)
{
// Byte array data requires both a valid array and offset
case DataSource.ByteArray:
return _byteArrayData != null && _byteArrayOffset >= 0;
// Stream data requires both a valid stream
case DataSource.Stream:
return _streamData != null && _streamData.CanRead && _streamData.CanSeek;
// Everything else is invalid
case DataSource.UNKNOWN:
default:
return false;
}
}
/// <summary>
/// Check if a data segment is valid in the data source
/// </summary>
/// <param name="position">Position in the source</param>
/// <param name="length">Length of the data to check</param>
/// <returns>True if the positional data is valid, false otherwise</returns>
protected bool SegmentValid(int position, int length)
{
// Validate the data souece
if (!DataSourceIsValid())
return false;
// If we have an invalid position
if (position < 0 || position >= GetEndOfFile())
return false;
switch (_dataSource)
{
case DataSource.ByteArray:
#if NET48
return _byteArrayOffset + position + length <= _byteArrayData.Length;
#else
return _byteArrayOffset + position + length <= _byteArrayData!.Length;
#endif
case DataSource.Stream:
#if NET48
return position + length <= _streamData.Length;
#else
return position + length <= _streamData!.Length;
#endif
// Everything else is invalid
case DataSource.UNKNOWN:
default:
return false;
}
}
/// <summary>
/// Read data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <returns>Byte array containing the requested data, null on error</returns>
#if NET48
protected byte[] ReadFromDataSource(int position, int length)
#else
protected byte[]? ReadFromDataSource(int position, int length)
#endif
{
// Validate the data source
if (!DataSourceIsValid())
return null;
// Validate the requested segment
if (!SegmentValid(position, length))
return null;
// Read and return the data
#if NET48
byte[] sectionData = null;
#else
byte[]? sectionData = null;
#endif
switch (_dataSource)
{
case DataSource.ByteArray:
sectionData = new byte[length];
#if NET48
Array.Copy(_byteArrayData, _byteArrayOffset + position, sectionData, 0, length);
#else
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
#endif
break;
case DataSource.Stream:
#if NET48
long currentLocation = _streamData.Position;
#else
long currentLocation = _streamData!.Position;
#endif
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
}
return sectionData;
}
/// <summary>
/// Read string data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <returns>String list containing the requested data, null on error</returns>
#if NET48
protected List<string> ReadStringsFromDataSource(int position, int length, int charLimit = 5)
#else
protected List<string>? ReadStringsFromDataSource(int position, int length, int charLimit = 5)
#endif
{
// Read the data as a byte array first
#if NET48
byte[] sourceData = ReadFromDataSource(position, length);
#else
byte[]? sourceData = ReadFromDataSource(position, length);
#endif
if (sourceData == null)
return null;
// If we have an invalid character limit, default to 5
if (charLimit <= 0)
charLimit = 5;
// Create the string list to return
var sourceStrings = new List<string>();
// Setup cached data
int sourceDataIndex = 0;
string cachedString = string.Empty;
// Check for ASCII strings
while (sourceDataIndex < sourceData.Length)
{
// If we have a control character or an invalid byte
if (sourceData[sourceDataIndex] < 0x20 || sourceData[sourceDataIndex] > 0x7F)
{
// If we have no cached string
if (cachedString.Length == 0)
{
sourceDataIndex++;
continue;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
cachedString = string.Empty;
sourceDataIndex++;
continue;
}
// All other characters get read in
cachedString += Encoding.ASCII.GetString(sourceData, sourceDataIndex, 1);
sourceDataIndex++;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
// Reset cached data
sourceDataIndex = 0;
cachedString = string.Empty;
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
// Check for Unicode strings
while (sourceDataIndex < sourceData.Length)
{
// Unicode characters are always 2 bytes
if (sourceDataIndex == sourceData.Length - 1)
break;
ushort ch = BitConverter.ToUInt16(sourceData, sourceDataIndex);
// If we have a null terminator or "invalid" character
if (ch == 0x0000 || (ch & 0xFF00) != 0)
{
// If we have no cached string
if (cachedString.Length == 0)
{
sourceDataIndex += 2;
continue;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
cachedString = string.Empty;
sourceDataIndex += 2;
continue;
}
// All other characters get read in
cachedString += Encoding.Unicode.GetString(sourceData, sourceDataIndex, 2);
sourceDataIndex += 2;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
// Deduplicate the string list for storage
sourceStrings = sourceStrings.Distinct().OrderBy(s => s).ToList();
// TODO: Complete implementation of string finding
return sourceStrings;
}
/// <summary>
/// Get the ending offset of the source
/// </summary>
/// <returns>Value greater than 0 for a valid end of file, -1 on error</returns>
protected int GetEndOfFile()
{
// Validate the data souece
if (!DataSourceIsValid())
return -1;
// Return the effective endpoint
switch (_dataSource)
{
case DataSource.ByteArray:
#if NET48
return _byteArrayData.Length - _byteArrayOffset;
#else
return _byteArrayData!.Length - _byteArrayOffset;
#endif
case DataSource.Stream:
#if NET48
return (int)_streamData.Length;
#else
return (int)_streamData!.Length;
#endif
case DataSource.UNKNOWN:
default:
return -1;
}
}
#endregion
#region Printing
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
public abstract StringBuilder PrettyPrint();
#if NET6_0_OR_GREATER
/// <summary>
/// Export the item information as JSON
/// </summary>
public string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
#endif
#endregion
}
}

View File

@@ -3,6 +3,8 @@ using System.IO;
using BinaryObjectScanner.Matching;
using BinaryObjectScanner.Utilities;
using SabreTools.IO;
using SabreTools.Serialization.Interfaces;
using SabreTools.Serialization.Wrappers;
namespace BinaryObjectScanner.Wrappers
{

View File

@@ -1,212 +0,0 @@
using System.IO;
using System.Linq;
using System.Text;
namespace BinaryObjectScanner.Wrappers
{
public class XZP : WrapperBase<SabreTools.Models.XZP.File>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "Xbox Package File (XZP)";
#endregion
#region Constructors
/// <inheritdoc/>
#if NET48
public XZP(SabreTools.Models.XZP.File model, byte[] data, int offset)
#else
public XZP(SabreTools.Models.XZP.File? model, byte[]? data, int offset)
#endif
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
#if NET48
public XZP(SabreTools.Models.XZP.File model, Stream data)
#else
public XZP(SabreTools.Models.XZP.File? model, Stream? data)
#endif
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a XZP from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the XZP</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A XZP wrapper on success, null on failure</returns>
#if NET48
public static XZP Create(byte[] data, int offset)
#else
public static XZP? Create(byte[]? data, int offset)
#endif
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
MemoryStream dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a XZP from a Stream
/// </summary>
/// <param name="data">Stream representing the XZP</param>
/// <returns>A XZP wrapper on success, null on failure</returns>
#if NET48
public static XZP Create(Stream data)
#else
public static XZP? Create(Stream? data)
#endif
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var file = new SabreTools.Serialization.Streams.XZP().Deserialize(data);
if (file == null)
return null;
try
{
return new XZP(file, data);
}
catch
{
return null;
}
}
#endregion
#region Printing
/// <inheritdoc/>
public override StringBuilder PrettyPrint()
{
StringBuilder builder = new StringBuilder();
Printing.XZP.Print(builder, this.Model);
return builder;
}
#endregion
#region Extraction
/// <summary>
/// Extract all files from the XZP to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// If we have no directory entries
if (this.Model.DirectoryEntries == null || this.Model.DirectoryEntries.Length == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < this.Model.DirectoryEntries.Length; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the XZP to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// If we have no directory entries
if (this.Model.DirectoryEntries == null || this.Model.DirectoryEntries.Length == 0)
return false;
// If we have no directory items
if (this.Model.DirectoryItems == null || this.Model.DirectoryItems.Length == 0)
return false;
// If the directory entry index is invalid
if (index < 0 || index >= this.Model.DirectoryEntries.Length)
return false;
// Get the directory entry
var directoryEntry = this.Model.DirectoryEntries[index];
if (directoryEntry == null)
return false;
// Get the associated directory item
var directoryItem = this.Model.DirectoryItems.Where(di => di?.FileNameCRC == directoryEntry.FileNameCRC).FirstOrDefault();
if (directoryItem == null)
return false;
// Load the item data
#if NET48
byte[] data = ReadFromDataSource((int)directoryEntry.EntryOffset, (int)directoryEntry.EntryLength);
#else
byte[]? data = ReadFromDataSource((int)directoryEntry.EntryOffset, (int)directoryEntry.EntryLength);
#endif
if (data == null)
return false;
// Create the filename
#if NET48
string filename = directoryItem.Name;
#else
string? filename = directoryItem.Name;
#endif
// If we have an invalid output directory
if (string.IsNullOrWhiteSpace(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename ?? $"file{index}");
// Ensure the output directory is created
#if NET48
string directoryName = Path.GetDirectoryName(filename);
#else
string? directoryName = Path.GetDirectoryName(filename);
#endif
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using (Stream fs = File.OpenWrite(filename))
{
fs.Write(data, 0, data.Length);
}
}
catch
{
return false;
}
return true;
}
#endregion
}
}

View File

@@ -21,7 +21,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="SabreTools.Models" Version="1.1.1" />
<PackageReference Include="SabreTools.Models" Version="1.1.2" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="7.0.0" />
<PackageReference Include="Teronis.MSBuild.Packaging.ProjectBuildInPackage" Version="1.0.0">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>

View File

@@ -18,8 +18,8 @@
<ItemGroup>
<PackageReference Include="OpenMcdf" Version="2.2.1.12" />
<PackageReference Include="SabreTools.IO" Version="1.1.1" />
<PackageReference Include="SabreTools.Models" Version="1.1.1" />
<PackageReference Include="SabreTools.Serialization" Version="1.1.1" />
<PackageReference Include="SabreTools.Models" Version="1.1.2" />
<PackageReference Include="SabreTools.Serialization" Version="1.1.3" />
<PackageReference Include="UnshieldSharp" Version="1.6.9" />
</ItemGroup>