Compare commits

...

11 Commits
1.8.2 ... 1.8.3

Author SHA1 Message Date
Matt Nadareski
0bda1f4f88 Bump version 2024-12-13 10:44:00 -05:00
Matt Nadareski
7d50e0e1c5 Fix filename map 2024-12-11 21:17:39 -05:00
Matt Nadareski
224a4caab0 Add secondary link for ISAv3 2024-12-11 14:42:00 -05:00
Matt Nadareski
b4689da404 Add reference from UnshieldSharp 2024-12-11 14:39:27 -05:00
Matt Nadareski
af66657399 Slightly safer indexing 2024-12-11 14:34:11 -05:00
Matt Nadareski
0f3e2d8275 Add 2 more extension properties 2024-12-11 14:25:46 -05:00
Matt Nadareski
d664b6defc Use const for data offset 2024-12-11 14:18:32 -05:00
Matt Nadareski
adbf74a6e0 Add ISAv3 extraction 2024-12-11 14:17:35 -05:00
Matt Nadareski
7eb401efed Port obvious things from UnshieldSharp 2024-12-11 14:04:29 -05:00
Matt Nadareski
ba97381b99 Add more ISAv3 stuff 2024-12-11 13:56:01 -05:00
Matt Nadareski
3de92de225 Add Compression package 2024-12-11 13:31:54 -05:00
26 changed files with 1948 additions and 20 deletions

View File

@@ -9,7 +9,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.8.2</Version>
<Version>1.8.3</Version>
</PropertyGroup>
<!-- Support All Frameworks -->

View File

@@ -0,0 +1,73 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Deserializers;
using Xunit;
namespace SabreTools.Serialization.Test.Deserializers
{
public class LZKWAJTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var deserializer = new LZKWAJ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,74 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Deserializers;
using Xunit;
namespace SabreTools.Serialization.Test.Deserializers
{
public class LZQBasicTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var deserializer = new LZQBasic();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,73 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Deserializers;
using Xunit;
namespace SabreTools.Serialization.Test.Deserializers
{
public class LZSZDDTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var deserializer = new LZSZDD();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
}
}

View File

@@ -28,7 +28,7 @@
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
<PackageReference Include="SabreTools.Hashing" Version="1.4.0" />
<PackageReference Include="SabreTools.Models" Version="1.5.6" />
<PackageReference Include="SabreTools.Models" Version="1.5.7" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Wrappers;
using Xunit;
namespace SabreTools.Serialization.Test.Wrappers
{
public class InstallShieldArchiveV3Tests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var actual = InstallShieldArchiveV3.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var actual = InstallShieldArchiveV3.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var actual = InstallShieldArchiveV3.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var actual = InstallShieldArchiveV3.Create(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var actual = InstallShieldArchiveV3.Create(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var actual = InstallShieldArchiveV3.Create(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Wrappers;
using Xunit;
namespace SabreTools.Serialization.Test.Wrappers
{
public class LZKWAJTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var actual = LZKWAJ.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var actual = LZKWAJ.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var actual = LZKWAJ.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var actual = LZKWAJ.Create(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var actual = LZKWAJ.Create(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var actual = LZKWAJ.Create(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Wrappers;
using Xunit;
namespace SabreTools.Serialization.Test.Wrappers
{
public class LZQBasicTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var actual = LZQBasic.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var actual = LZQBasic.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var actual = LZQBasic.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var actual = LZQBasic.Create(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var actual = LZQBasic.Create(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var actual = LZQBasic.Create(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Wrappers;
using Xunit;
namespace SabreTools.Serialization.Test.Wrappers
{
public class LZSZDDTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var actual = LZSZDD.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var actual = LZSZDD.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var actual = LZSZDD.Create(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var actual = LZSZDD.Create(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var actual = LZSZDD.Create(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var actual = LZSZDD.Create(data);
Assert.Null(actual);
}
}
}

View File

@@ -0,0 +1,121 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.LZ;
using static SabreTools.Models.LZ.Constants;
namespace SabreTools.Serialization.Deserializers
{
public class LZKWAJ : BaseBinaryDeserializer<KWAJFile>
{
/// <inheritdoc/>
public override KWAJFile? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new file to fill
var file = new KWAJFile();
#region File Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the header
file.Header = header;
#endregion
#region Extended Header
if (header.HeaderFlags != 0)
{
var extensions = new KWAJHeaderExtensions();
#if NET20 || NET35
if ((header.HeaderFlags & KWAJHeaderFlags.HasDecompressedLength) != 0)
extensions.DecompressedLength = data.ReadUInt32();
if ((header.HeaderFlags & KWAJHeaderFlags.HasUnknownFlag) != 0)
extensions.UnknownPurpose = data.ReadUInt16();
if ((header.HeaderFlags & KWAJHeaderFlags.HasPrefixedData) != 0)
{
extensions.UnknownDataLength = data.ReadUInt16();
extensions.UnknownData = data.ReadBytes((int)extensions.UnknownDataLength);
}
if ((header.HeaderFlags & KWAJHeaderFlags.HasFileName) != 0)
extensions.FileName = data.ReadNullTerminatedAnsiString();
if ((header.HeaderFlags & KWAJHeaderFlags.HasFileExtension) != 0)
extensions.FileExtension = data.ReadNullTerminatedAnsiString();
if ((header.HeaderFlags & KWAJHeaderFlags.HasPrefixedData) != 0)
{
extensions.ArbitraryTextLength = data.ReadUInt16();
extensions.ArbitraryText = data.ReadBytes((int)extensions.ArbitraryTextLength);
}
#else
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasDecompressedLength))
extensions.DecompressedLength = data.ReadUInt32();
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasUnknownFlag))
extensions.UnknownPurpose = data.ReadUInt16();
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasPrefixedData))
{
extensions.UnknownDataLength = data.ReadUInt16();
extensions.UnknownData = data.ReadBytes((int)extensions.UnknownDataLength);
}
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasFileName))
extensions.FileName = data.ReadNullTerminatedAnsiString();
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasFileExtension))
extensions.FileExtension = data.ReadNullTerminatedAnsiString();
if (header.HeaderFlags.HasFlag(KWAJHeaderFlags.HasPrefixedData))
{
extensions.ArbitraryTextLength = data.ReadUInt16();
extensions.ArbitraryText = data.ReadBytes((int)extensions.ArbitraryTextLength);
}
#endif
file.HeaderExtensions = extensions;
}
#endregion
return file;
}
catch
{
// Ignore the actual error
return null;
}
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static KWAJHeader? ParseHeader(Stream data)
{
var header = new KWAJHeader();
header.Magic = data.ReadBytes(8);
if (Encoding.ASCII.GetString(header.Magic) != Encoding.ASCII.GetString(KWAJSignatureBytes))
return null;
header.CompressionType = (KWAJCompressionType)data.ReadUInt16();
if (header.CompressionType > KWAJCompressionType.MSZIP)
return null;
header.DataOffset = data.ReadUInt16();
header.HeaderFlags = (KWAJHeaderFlags)data.ReadUInt16();
return header;
}
}
}

View File

@@ -0,0 +1,65 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.LZ;
using static SabreTools.Models.LZ.Constants;
namespace SabreTools.Serialization.Deserializers
{
public class LZQBasic : BaseBinaryDeserializer<QBasicFile>
{
/// <inheritdoc/>
public override QBasicFile? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new file to fill
var file = new QBasicFile();
#region File Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the header
file.Header = header;
#endregion
return file;
}
catch
{
// Ignore the actual error
return null;
}
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static QBasicHeader? ParseHeader(Stream data)
{
var header = new QBasicHeader();
header.Magic = data.ReadBytes(8);
if (Encoding.ASCII.GetString(header.Magic) != Encoding.ASCII.GetString(QBasicSignatureBytes))
return null;
header.RealLength = data.ReadUInt32();
return header;
}
}
}

View File

@@ -0,0 +1,70 @@
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.LZ;
using static SabreTools.Models.LZ.Constants;
namespace SabreTools.Serialization.Deserializers
{
public class LZSZDD : BaseBinaryDeserializer<SZDDFile>
{
/// <inheritdoc/>
public override SZDDFile? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Cache the current offset
int initialOffset = (int)data.Position;
// Create a new file to fill
var file = new SZDDFile();
#region File Header
// Try to parse the header
var header = ParseHeader(data);
if (header == null)
return null;
// Set the header
file.Header = header;
#endregion
return file;
}
catch
{
// Ignore the actual error
return null;
}
}
/// <summary>
/// Parse a Stream into a header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled header on success, null on error</returns>
private static SZDDHeader? ParseHeader(Stream data)
{
var header = new SZDDHeader();
header.Magic = data.ReadBytes(8);
if (Encoding.ASCII.GetString(header.Magic) != Encoding.ASCII.GetString(SZDDSignatureBytes))
return null;
header.CompressionType = (ExpandCompressionType)data.ReadByteValue();
if (header.CompressionType != ExpandCompressionType.A)
return null;
header.LastChar = (char)data.ReadByteValue();
header.RealLength = data.ReadUInt32();
return header;
}
}
}

View File

@@ -42,9 +42,13 @@ namespace SabreTools.Serialization
Wrapper.CHD item => item.PrettyPrint(),
Wrapper.CIA item => item.PrettyPrint(),
Wrapper.GCF item => item.PrettyPrint(),
Wrapper.InstallShieldArchiveV3 item => item.PrettyPrint(),
Wrapper.InstallShieldCabinet item => item.PrettyPrint(),
Wrapper.IRD item => item.PrettyPrint(),
Wrapper.LinearExecutable item => item.PrettyPrint(),
Wrapper.LZKWAJ item => item.PrettyPrint(),
Wrapper.LZQBasic item => item.PrettyPrint(),
Wrapper.LZSZDD item => item.PrettyPrint(),
Wrapper.MicrosoftCabinet item => item.PrettyPrint(),
Wrapper.MoPaQ item => item.PrettyPrint(),
Wrapper.MSDOS item => item.PrettyPrint(),
@@ -87,9 +91,13 @@ namespace SabreTools.Serialization
Wrapper.CHD item => item.ExportJSON(),
Wrapper.CIA item => item.ExportJSON(),
Wrapper.GCF item => item.ExportJSON(),
Wrapper.InstallShieldArchiveV3 item => item.ExportJSON(),
Wrapper.InstallShieldCabinet item => item.ExportJSON(),
Wrapper.IRD item => item.ExportJSON(),
Wrapper.LinearExecutable item => item.ExportJSON(),
Wrapper.LZKWAJ item => item.ExportJSON(),
Wrapper.LZQBasic item => item.ExportJSON(),
Wrapper.LZSZDD item => item.ExportJSON(),
Wrapper.MicrosoftCabinet item => item.ExportJSON(),
Wrapper.MoPaQ item => item.ExportJSON(),
Wrapper.MSDOS item => item.ExportJSON(),
@@ -199,6 +207,16 @@ namespace SabreTools.Serialization
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.InstallShieldArchiveV3 item)
{
var builder = new StringBuilder();
InstallShieldArchiveV3.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
@@ -229,6 +247,36 @@ namespace SabreTools.Serialization
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.LZKWAJ item)
{
var builder = new StringBuilder();
LZKWAJ.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.LZQBasic item)
{
var builder = new StringBuilder();
LZQBasic.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.LZSZDD item)
{
var builder = new StringBuilder();
LZSZDD.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>

View File

@@ -0,0 +1,114 @@
using System.Text;
using SabreTools.Models.InstallShieldArchiveV3;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class InstallShieldArchiveV3 : IPrinter<Archive>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, Archive model)
=> Print(builder, model);
public static void Print(StringBuilder builder, Archive archive)
{
builder.AppendLine("InstallShield Archive V3 Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, archive.Header);
Print(builder, archive.Directories);
Print(builder, archive.Files);
}
private static void Print(StringBuilder builder, Header? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header");
builder.AppendLine();
return;
}
builder.AppendLine(header.Signature1, " Signature 1");
builder.AppendLine(header.Signature2, " Signature 2");
builder.AppendLine(header.Reserved0, " Reserved 0");
builder.AppendLine(header.IsMultivolume, " Is multivolume");
builder.AppendLine(header.FileCount, " File count");
builder.AppendLine(header.DateTime, " Datetime");
builder.AppendLine(header.CompressedSize, " Compressed size");
builder.AppendLine(header.UncompressedSize, " Uncompressed size");
builder.AppendLine(header.Reserved1, " Reserved 1");
builder.AppendLine(header.VolumeTotal, " Volume total");
builder.AppendLine(header.VolumeNumber, " Volume number");
builder.AppendLine(header.Reserved2, " Reserved 2");
builder.AppendLine(header.SplitBeginAddress, " Split begin address");
builder.AppendLine(header.SplitEndAddress, " Split end address");
builder.AppendLine(header.TocAddress, " TOC address");
builder.AppendLine(header.Reserved3, " Reserved 3");
builder.AppendLine(header.DirCount, " Dir count");
builder.AppendLine(header.Reserved4, " Reserved 4");
builder.AppendLine(header.Reserved5, " Reserved 5");
builder.AppendLine();
}
private static void Print(StringBuilder builder, Directory[]? entries)
{
builder.AppendLine(" Directories:");
builder.AppendLine(" -------------------------");
if (entries == null || entries.Length == 0)
{
builder.AppendLine(" No directories");
builder.AppendLine();
return;
}
for (int i = 0; i < entries.Length; i++)
{
var entry = entries[i];
builder.AppendLine($" Directory {i}");
builder.AppendLine(entry.FileCount, " File count");
builder.AppendLine(entry.ChunkSize, " Chunk size");
builder.AppendLine(entry.NameLength, " Name length");
builder.AppendLine(entry.Name, " Name");
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, File[]? entries)
{
builder.AppendLine(" Files:");
builder.AppendLine(" -------------------------");
if (entries == null || entries.Length == 0)
{
builder.AppendLine(" No files");
builder.AppendLine();
return;
}
for (int i = 0; i < entries.Length; i++)
{
var entry = entries[i];
builder.AppendLine($" File {i}");
builder.AppendLine(entry.VolumeEnd, " Volume end");
builder.AppendLine(entry.Index, " Index");
builder.AppendLine(entry.UncompressedSize, " Uncompressed size");
builder.AppendLine(entry.CompressedSize, " Compressed size");
builder.AppendLine(entry.Offset, " Offset");
builder.AppendLine(entry.DateTime, " Datetime");
builder.AppendLine(entry.Reserved0, " Reserved 0");
builder.AppendLine(entry.ChunkSize, " Chunk size");
builder.AppendLine($" Attrib: {entry.Attrib} (0x{entry.Attrib:X})");
builder.AppendLine(entry.IsSplit, " Is split");
builder.AppendLine(entry.Reserved1, " Reserved 1");
builder.AppendLine(entry.VolumeStart, " Volume start");
builder.AppendLine(entry.Name, " Name");
}
builder.AppendLine();
}
}
}

View File

@@ -0,0 +1,63 @@
using System.Text;
using SabreTools.Models.LZ;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class LZKWAJ : IPrinter<KWAJFile>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, KWAJFile model)
=> Print(builder, model);
public static void Print(StringBuilder builder, KWAJFile file)
{
builder.AppendLine("LZ-compressed File, KWAJ Variant Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, file.Header);
Print(builder, file.HeaderExtensions);
}
private static void Print(StringBuilder builder, KWAJHeader? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header");
builder.AppendLine();
return;
}
builder.AppendLine(header.Magic, " Magic number");
builder.AppendLine($" Compression type: {header.CompressionType} (0x{header.CompressionType:X})");
builder.AppendLine(header.DataOffset, " Data offset");
builder.AppendLine($" Header flags: {header.HeaderFlags} (0x{header.HeaderFlags:X})");
builder.AppendLine();
}
private static void Print(StringBuilder builder, KWAJHeaderExtensions? header)
{
builder.AppendLine(" Header Extensions Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header extensions");
builder.AppendLine();
return;
}
builder.AppendLine(header.DecompressedLength, " Decompressed length");
builder.AppendLine(header.UnknownPurpose, " Unknown purpose");
builder.AppendLine(header.UnknownDataLength, " Unknown data length");
builder.AppendLine(header.UnknownData, " Unknown data");
builder.AppendLine(header.FileName, " File name");
builder.AppendLine(header.FileExtension, " File extension");
builder.AppendLine(header.ArbitraryTextLength, " Arbitrary text length");
builder.AppendLine(header.ArbitraryText, " Arbitrary text");
builder.AppendLine();
}
}
}

View File

@@ -0,0 +1,38 @@
using System.Text;
using SabreTools.Models.LZ;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class LZQBasic : IPrinter<QBasicFile>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, QBasicFile model)
=> Print(builder, model);
public static void Print(StringBuilder builder, QBasicFile file)
{
builder.AppendLine("LZ-compressed File, QBasic Variant Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, file.Header);
}
private static void Print(StringBuilder builder, QBasicHeader? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header");
builder.AppendLine();
return;
}
builder.AppendLine(header.Magic, " Magic number");
builder.AppendLine(header.RealLength, " Real length");
builder.AppendLine();
}
}
}

View File

@@ -0,0 +1,40 @@
using System.Text;
using SabreTools.Models.LZ;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class LZSZDD : IPrinter<SZDDFile>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, SZDDFile model)
=> Print(builder, model);
public static void Print(StringBuilder builder, SZDDFile file)
{
builder.AppendLine("LZ-compressed File, SZDD Variant Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, file.Header);
}
private static void Print(StringBuilder builder, SZDDHeader? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header");
builder.AppendLine();
return;
}
builder.AppendLine(header.Magic, " Magic number");
builder.AppendLine($" Compression type: {header.CompressionType} (0x{header.CompressionType:X})");
builder.AppendLine(header.LastChar, " Last char");
builder.AppendLine(header.RealLength, " Real length");
builder.AppendLine();
}
}
}

View File

@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.8.2</Version>
<Version>1.8.3</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
@@ -31,9 +31,10 @@
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.ASN1" Version="1.5.0" />
<PackageReference Include="SabreTools.Compression" Version="0.6.2" />
<PackageReference Include="SabreTools.Hashing" Version="1.4.0" />
<PackageReference Include="SabreTools.IO" Version="1.6.1" />
<PackageReference Include="SabreTools.Models" Version="1.5.6" />
<PackageReference Include="SabreTools.Models" Version="1.5.7" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,287 @@
using System.Collections.Generic;
using System.IO;
using SabreTools.Compression.Blast;
using SabreTools.Models.InstallShieldArchiveV3;
namespace SabreTools.Serialization.Wrappers
{
/// <remarks>
/// Reference (de)compressor: https://www.sac.sk/download/pack/icomp95.zip
/// </remarks>
/// <see href="https://github.com/wfr/unshieldv3"/>
public partial class InstallShieldArchiveV3 : WrapperBase<Archive>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "InstallShield Archive V3";
#endregion
#region Extension Properties
/// <inheritdoc cref="Header.DirCount"/>
public ushort DirCount => Model.Header?.DirCount ?? 0;
/// <inheritdoc cref="Header.FileCount"/>
public ushort FileCount => Model.Header?.FileCount ?? 0;
/// <inheritdoc cref="Archive.Directories"/>
public Models.InstallShieldArchiveV3.Directory[] Directories => Model.Directories ?? [];
/// <inheritdoc cref="Archive.Files"/>
public Models.InstallShieldArchiveV3.File[] Files => Model.Files ?? [];
/// <summary>
/// Map of all files to their parent directories by index
/// </summary>
public Dictionary<int, int> FileDirMap
{
get
{
// Return the prebuilt map
if (_fileDirMap != null)
return _fileDirMap;
// Build the file map
_fileDirMap = [];
int fileId = 0;
for (int i = 0; i < Directories.Length; i++)
{
var dir = Directories[i];
for (int j = 0; j < dir.FileCount; j++)
{
_fileDirMap[fileId++] = i;
}
}
return _fileDirMap;
}
}
private Dictionary<int, int>? _fileDirMap = null;
/// <summary>
/// Map of all files found in the archive
/// </summary>
public Dictionary<string, Models.InstallShieldArchiveV3.File> FileNameMap
{
get
{
// Return the prebuilt map
if (_fileNameMap != null)
return _fileNameMap;
// Build the file map
_fileNameMap = [];
for (int fileIndex = 0; fileIndex < Files.Length; fileIndex++)
{
// Get the current file
var file = Files[fileIndex];
// Get the parent directory
int dirIndex = FileDirMap[fileIndex];
if (dirIndex < 0 || dirIndex >= DirCount)
continue;
// Create the filename
string filename = Path.Combine(
Directories[dirIndex]?.Name ?? $"dir_{dirIndex}",
file.Name ?? $"file_{fileIndex}"
);
// Add to the map
_fileNameMap[filename] = file;
}
return _fileNameMap;
}
}
private Dictionary<string, Models.InstallShieldArchiveV3.File>? _fileNameMap = null;
/// <summary>
/// Data offset for all archives
/// </summary>
private const uint DataStart = 255;
#endregion
#region Constructors
/// <inheritdoc/>
public InstallShieldArchiveV3(Archive? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public InstallShieldArchiveV3(Archive? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an InstallShield Archive V3 from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A archive wrapper on success, null on failure</returns>
public static InstallShieldArchiveV3? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null || data.Length == 0)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a InstallShield Archive V3 from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A archive wrapper on success, null on failure</returns>
public static InstallShieldArchiveV3? Create(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
var archive = Deserializers.InstallShieldArchiveV3.DeserializeStream(data);
if (archive == null)
return null;
return new InstallShieldArchiveV3(archive, data);
}
catch
{
return null;
}
}
#endregion
#region Extraction
/// <summary>
/// Extract all files from the ISAv3 to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// Get the file count
int fileCount = Files.Length;
if (fileCount == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < fileCount; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the ISAv3 to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// If the files index is invalid
if (index < 0 || index >= FileCount)
return false;
// Get the file
var file = Files[index];
if (file == null)
return false;
// Create the filename
var filename = file.Name;
if (filename == null)
return false;
// Get the directory index
int dirIndex = FileDirMap[index];
if (dirIndex < 0 || dirIndex > DirCount)
return false;
// Get the directory name
var dirName = Directories[dirIndex].Name;
if (dirName != null)
filename = Path.Combine(dirName, filename);
// Get and adjust the file offset
long fileOffset = file.Offset + DataStart;
if (fileOffset < 0 || fileOffset >= Length)
return false;
// Get the file sizes
long fileSize = file.CompressedSize;
long outputFileSize = file.UncompressedSize;
// Read the compressed data directly
var compressedData = ReadFromDataSource((int)fileOffset, (int)fileSize);
if (compressedData == null)
return false;
// If the compressed and uncompressed sizes match
byte[] data;
if (fileSize == outputFileSize)
{
data = compressedData;
}
else
{
// Decompress the data
var decomp = Decompressor.Create();
var outData = new MemoryStream();
decomp.CopyTo(compressedData, outData);
data = outData.ToArray();
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null)
System.IO.Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = System.IO.File.OpenWrite(filename);
fs.Write(data, 0, data.Length);
}
catch
{
return false;
}
return false;
}
#endregion
}
}

View File

@@ -0,0 +1,139 @@
using System.IO;
using SabreTools.Compression.SZDD;
using SabreTools.Models.LZ;
namespace SabreTools.Serialization.Wrappers
{
public class LZKWAJ : WrapperBase<KWAJFile>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "LZ-compressed file, KWAJ variant";
#endregion
#region Constructors
/// <inheritdoc/>
public LZKWAJ(KWAJFile? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public LZKWAJ(KWAJFile? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an LZ (KWAJ variant) from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the LZ (KWAJ variant)</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An LZ (KWAJ variant) wrapper on success, null on failure</returns>
public static LZKWAJ? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null || data.Length == 0)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a LZ (KWAJ variant) from a Stream
/// </summary>
/// <param name="data">Stream representing the LZ (KWAJ variant)</param>
/// <returns>An LZ (KWAJ variant) wrapper on success, null on failure</returns>
public static LZKWAJ? Create(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
var file = Deserializers.LZKWAJ.DeserializeStream(data);
if (file == null)
return null;
return new LZKWAJ(file, data);
}
catch
{
return null;
}
}
#endregion
#region Extraction
/// <summary>
/// Extract the contents to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the contents extracted, false otherwise</returns>
public bool Extract(string outputDirectory)
{
// Get the length of the compressed data
long compressedSize = Length - Model.Header!.DataOffset;
if (compressedSize < Model.Header.DataOffset)
return false;
// Read in the data as an array
byte[]? contents = ReadFromDataSource(Model.Header.DataOffset, (int)compressedSize);
if (contents == null)
return false;
// Get the decompressor
var decompressor = Decompressor.CreateKWAJ(contents, Model.Header!.CompressionType);
if (decompressor == null)
return false;
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
string filename = "tempfile";
if (Model.HeaderExtensions?.FileName != null)
filename = Model.HeaderExtensions.FileName;
if (Model.HeaderExtensions?.FileExtension != null)
filename += $".{Model.HeaderExtensions.FileExtension}";
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = File.OpenWrite(filename);
decompressor.CopyTo(fs);
}
catch
{
return false;
}
return true;
}
#endregion
}
}

View File

@@ -0,0 +1,134 @@
using System.IO;
using SabreTools.Compression.SZDD;
using SabreTools.Models.LZ;
namespace SabreTools.Serialization.Wrappers
{
public class LZQBasic : WrapperBase<QBasicFile>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "LZ-compressed file, QBasic variant";
#endregion
#region Constructors
/// <inheritdoc/>
public LZQBasic(QBasicFile? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public LZQBasic(QBasicFile? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an LZ (QBasic variant) from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the LZ (QBasic variant)</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An LZ (QBasic variant) wrapper on success, null on failure</returns>
public static LZQBasic? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null || data.Length == 0)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a LZ (QBasic variant) from a Stream
/// </summary>
/// <param name="data">Stream representing the LZ (QBasic variant)</param>
/// <returns>An LZ (QBasic variant) wrapper on success, null on failure</returns>
public static LZQBasic? Create(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
var file = Deserializers.LZQBasic.DeserializeStream(data);
if (file == null)
return null;
return new LZQBasic(file, data);
}
catch
{
return null;
}
}
#endregion
#region Extraction
/// <summary>
/// Extract the contents to an output directory
/// </summary>
/// <param name="filename">Original filename to use as a base</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the contents extracted, false otherwise</returns>
public bool Extract(string outputDirectory)
{
// Get the length of the compressed data
long compressedSize = Length - 12;
if (compressedSize < 12)
return false;
// Read in the data as an array
byte[]? contents = ReadFromDataSource(12, (int)compressedSize);
if (contents == null)
return false;
// Get the decompressor
var decompressor = Decompressor.CreateQBasic(contents);
if (decompressor == null)
return false;
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
string filename = Path.Combine(outputDirectory, "tempfile.bin");
// Ensure the output directory is created
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = File.OpenWrite(filename);
decompressor.CopyTo(fs);
}
catch
{
return false;
}
return true;
}
#endregion
}
}

View File

@@ -0,0 +1,166 @@
using System.IO;
using SabreTools.Compression.SZDD;
using SabreTools.Models.LZ;
namespace SabreTools.Serialization.Wrappers
{
public class LZSZDD : WrapperBase<SZDDFile>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "LZ-compressed file, SZDD variant";
#endregion
#region Constructors
/// <inheritdoc/>
public LZSZDD(SZDDFile? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public LZSZDD(SZDDFile? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create an LZ (SZDD variant) from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the LZ (SZDD variant)</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>An LZ (SZDD variant) wrapper on success, null on failure</returns>
public static LZSZDD? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null || data.Length == 0)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a LZ (SZDD variant) from a Stream
/// </summary>
/// <param name="data">Stream representing the LZ (SZDD variant)</param>
/// <returns>An LZ (SZDD variant) wrapper on success, null on failure</returns>
public static LZSZDD? Create(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
var file = Deserializers.LZSZDD.DeserializeStream(data);
if (file == null)
return null;
return new LZSZDD(file, data);
}
catch
{
return null;
}
}
#endregion
#region Extraction
/// <summary>
/// Extract the contents to an output directory
/// </summary>
/// <param name="filename">Original filename to use as a base</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the contents extracted, false otherwise</returns>
public bool Extract(string filename, string outputDirectory)
{
// Get the length of the compressed data
long compressedSize = Length - 14;
if (compressedSize < 14)
return false;
// Read in the data as an array
byte[]? contents = ReadFromDataSource(14, (int)compressedSize);
if (contents == null)
return false;
// Get the decompressor
var decompressor = Decompressor.CreateSZDD(contents);
if (decompressor == null)
return false;
// Create the output file
filename = GetExpandedName(filename).TrimEnd('\0');
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null)
Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = File.OpenWrite(filename);
decompressor.CopyTo(fs);
}
catch
{
return false;
}
return true;
}
/// <summary>
/// Get the full name of the input file
/// </summary>
private string GetExpandedName(string input)
{
// If the extension is missing
string extension = Path.GetExtension(input).TrimStart('.');
if (string.IsNullOrEmpty(extension))
return Path.GetFileNameWithoutExtension(input);
// If the extension is a single character
if (extension.Length == 1)
{
if (extension == "_" || extension == "$")
return $"{Path.GetFileNameWithoutExtension(input)}.{char.ToLower(Model.Header!.LastChar)}";
return Path.GetFileNameWithoutExtension(input);
}
// If the extension isn't formatted
if (!extension.EndsWith("_"))
return Path.GetFileNameWithoutExtension(input);
// Handle replacing characters
char c = (char.IsUpper(input[0]) ? char.ToLower(Model.Header!.LastChar) : char.ToUpper(Model.Header!.LastChar));
string text2 = extension.Substring(0, extension.Length - 1) + c;
return Path.GetFileNameWithoutExtension(input) + "." + text2;
}
#endregion
}
}

View File

@@ -1,5 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using SabreTools.Compression.zlib;
using SabreTools.Models.SGA;
namespace SabreTools.Serialization.Wrappers
@@ -120,6 +122,148 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region Extraction
/// <summary>
/// Extract all files from the SGA to an output directory
/// </summary>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if all files extracted, false otherwise</returns>
public bool ExtractAll(string outputDirectory)
{
// Get the file count
int fileCount = FileCount;
if (fileCount == 0)
return false;
// Loop through and extract all files to the output
bool allExtracted = true;
for (int i = 0; i < fileCount; i++)
{
allExtracted &= ExtractFile(i, outputDirectory);
}
return allExtracted;
}
/// <summary>
/// Extract a file from the SGA to an output directory by index
/// </summary>
/// <param name="index">File index to extract</param>
/// <param name="outputDirectory">Output directory to write to</param>
/// <returns>True if the file extracted, false otherwise</returns>
public bool ExtractFile(int index, string outputDirectory)
{
// Get the file count
int fileCount = FileCount;
if (fileCount == 0)
return false;
// If the files index is invalid
if (index < 0 || index >= fileCount)
return false;
// Create the filename
var filename = GetFileName(index);
if (filename == null)
return false;
// Loop through and get all parent directories
var parentNames = new List<string> { filename };
// Get the parent directory
string? folderName = GetParentName(index);
if (folderName != null)
parentNames.Add(folderName);
// TODO: Should the section name/alias be used in the path as well?
// Reverse and assemble the filename
parentNames.Reverse();
#if NET20 || NET35
filename = parentNames[0];
for (int i = 1; i < parentNames.Count; i++)
{
filename = Path.Combine(filename, parentNames[i]);
}
#else
filename = Path.Combine([.. parentNames]);
#endif
// Get and adjust the file offset
long fileOffset = GetFileOffset(index);
fileOffset += FileDataOffset;
if (fileOffset < 0)
return false;
// Get the file sizes
long fileSize = GetCompressedSize(index);
long outputFileSize = GetUncompressedSize(index);
// Read the compressed data directly
var compressedData = ReadFromDataSource((int)fileOffset, (int)fileSize);
if (compressedData == null)
return false;
// If the compressed and uncompressed sizes match
byte[] data;
if (fileSize == outputFileSize)
{
data = compressedData;
}
else
{
// Inflate the data into the buffer
var zstream = new ZLib.z_stream_s();
data = new byte[outputFileSize];
unsafe
{
fixed (byte* payloadPtr = compressedData)
fixed (byte* dataPtr = data)
{
zstream.next_in = payloadPtr;
zstream.avail_in = (uint)compressedData.Length;
zstream.total_in = (uint)compressedData.Length;
zstream.next_out = dataPtr;
zstream.avail_out = (uint)data.Length;
zstream.total_out = 0;
ZLib.inflateInit_(zstream, ZLib.zlibVersion(), compressedData.Length);
int zret = ZLib.inflate(zstream, 1);
ZLib.inflateEnd(zstream);
}
}
}
// If we have an invalid output directory
if (string.IsNullOrEmpty(outputDirectory))
return false;
// Create the full output path
filename = Path.Combine(outputDirectory, filename);
// Ensure the output directory is created
var directoryName = Path.GetDirectoryName(filename);
if (directoryName != null)
System.IO.Directory.CreateDirectory(directoryName);
// Try to write the data
try
{
// Open the output file for writing
using Stream fs = System.IO.File.OpenWrite(filename);
fs.Write(data, 0, data.Length);
}
catch
{
return false;
}
return false;
}
#endregion
#region File
/// <summary>

View File

@@ -45,6 +45,24 @@ namespace SabreTools.Serialization.Wrappers
/// </summary>
public T Model { get; private set; }
/// <summary>
/// Length of the underlying data
/// </summary>
public long Length
{
get
{
return _dataSource switch
{
DataSource.ByteArray => _byteArrayData!.Length - _byteArrayOffset,
DataSource.Stream => _streamData!.Length,
// Everything else is invalid
_ => -1,
};
}
}
#endregion
#region Instance Variables

View File

@@ -28,11 +28,13 @@ namespace SabreTools.Serialization.Wrappers
WrapperType.GCF => GCF.Create(data),
WrapperType.GZIP => null,// TODO: Implement wrapper
WrapperType.IniFile => null,// TODO: Implement wrapper
WrapperType.InstallShieldArchiveV3 => null,// TODO: Implement wrapper
WrapperType.InstallShieldArchiveV3 => InstallShieldArchiveV3.Create(data),
WrapperType.InstallShieldCAB => InstallShieldCabinet.Create(data),
WrapperType.LDSCRYPT => null,// TODO: Implement wrapper
WrapperType.LZKWAJ => LZKWAJ.Create(data),
WrapperType.LZQBasic => LZQBasic.Create(data),
WrapperType.LZSZDD => LZSZDD.Create(data),
WrapperType.MicrosoftCAB => MicrosoftCabinet.Create(data),
WrapperType.MicrosoftLZ => null,// TODO: Implement wrapper
WrapperType.MoPaQ => MoPaQ.Create(data),
WrapperType.N3DS => N3DS.Create(data),
WrapperType.NCF => NCF.Create(data),
@@ -330,6 +332,19 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region LZ
if (magic.StartsWith([0x4B, 0x57, 0x41, 0x4A, 0x88, 0xF0, 0x27, 0xD1]))
return WrapperType.LZKWAJ;
if (magic.StartsWith([0x53, 0x5A, 0x20, 0x88, 0xF0, 0x27, 0x33, 0xD1]))
return WrapperType.LZQBasic;
if (magic.StartsWith([0x53, 0x5A, 0x44, 0x44, 0x88, 0xF0, 0x27, 0x33]))
return WrapperType.LZSZDD;
#endregion
#region MicrosoftCAB
if (magic.StartsWith([0x4d, 0x53, 0x43, 0x46]))
@@ -339,13 +354,6 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region MicrosoftLZ
if (magic.StartsWith([0x53, 0x5a, 0x44, 0x44, 0x88, 0xf0, 0x27, 0x33]))
return WrapperType.MicrosoftLZ;
#endregion
#region MoPaQ
if (magic.StartsWith([0x4d, 0x50, 0x51, 0x1a]))

View File

@@ -77,7 +77,6 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// InstallShield archive v3
/// </summary>
/// <remarks>Currently has no IWrapper implementation</remarks>
InstallShieldArchiveV3,
/// <summary>
@@ -91,17 +90,26 @@ namespace SabreTools.Serialization.Wrappers
/// <remarks>Currently has no IWrapper implementation</remarks>
LDSCRYPT,
/// <summary>
/// LZ-compressed file, KWAJ variant
/// </summary>
LZKWAJ,
/// <summary>
/// LZ-compressed file, QBasic variant
/// </summary>
LZQBasic,
/// <summary>
/// LZ-compressed file, SZDD variant
/// </summary>
LZSZDD,
/// <summary>
/// Microsoft cabinet file
/// </summary>
MicrosoftCAB,
/// <summary>
/// Microsoft LZ-compressed file
/// </summary>
/// <remarks>Currently has no IWrapper implementation</remarks>
MicrosoftLZ,
/// <summary>
/// MPQ game data archive
/// </summary>