Compare commits

..

28 Commits
2.0.0 ... 2.0.1

Author SHA1 Message Date
Matt Nadareski
e029fa4833 Skip warning around GC.SharpCompress inclusion 2025-10-05 17:02:44 -04:00
Matt Nadareski
2c3f229a6a Bump version 2025-10-05 16:59:33 -04:00
Matt Nadareski
3558d3532c Do not update offset on name offset 2025-10-02 13:33:51 -04:00
Matt Nadareski
ad5314dc22 Minor tweak 2025-10-01 20:08:33 -04:00
Matt Nadareski
eaa5bb5662 Sections can't be null 2025-10-01 20:06:05 -04:00
Matt Nadareski
fcdc703595 Update readme to be accurate again 2025-09-30 20:58:52 -04:00
Matt Nadareski
ef9fa562ab More BZip documenting 2025-09-30 20:35:40 -04:00
Matt Nadareski
ac285c48fe Start documenting BZip in code 2025-09-30 20:25:18 -04:00
Matt Nadareski
e57ad65210 Migrate to GrindCore fork of SharpCompress 2025-09-30 19:52:14 -04:00
Matt Nadareski
0fc3a30422 Print a couple more XZ fields 2025-09-30 14:09:40 -04:00
Matt Nadareski
49f6704694 Add initial XZ printer 2025-09-30 14:05:34 -04:00
Matt Nadareski
6df712c538 Add XZ parsing to wrapper 2025-09-30 13:56:21 -04:00
Matt Nadareski
bda3076a30 Further XZ parsing fixes 2025-09-30 13:56:11 -04:00
Matt Nadareski
89e8e7c706 Fix more XZ parsing; use read-from-end 2025-09-30 13:22:26 -04:00
Matt Nadareski
c10835d221 Start fixing XZ parsing 2025-09-30 13:08:53 -04:00
Matt Nadareski
a6801350ea Fix issue with XZ enum 2025-09-30 12:48:12 -04:00
Matt Nadareski
c7a5a62041 Require exact versions for build 2025-09-30 11:09:37 -04:00
Matt Nadareski
749b35e5cb Slight tweak to CHD v5 model 2025-09-30 10:59:44 -04:00
Matt Nadareski
3c520d33eb Add XZ reader, fix some minor issues 2025-09-30 09:47:19 -04:00
Matt Nadareski
635170a051 Add reference for BZ2 2025-09-29 23:38:40 -04:00
Matt Nadareski
9619311d11 Store variable-length numbers as ulong 2025-09-29 23:32:58 -04:00
Matt Nadareski
4bee14835c Fix last couple of commits 2025-09-29 23:12:23 -04:00
Matt Nadareski
f44059e16a Add XZ variable length helper methods 2025-09-29 23:07:05 -04:00
Matt Nadareski
bfb206a06d Add XZ models 2025-09-29 22:56:53 -04:00
Matt Nadareski
6b7b05eb31 Port some extensions for GZip from ST 2025-09-29 22:04:10 -04:00
Matt Nadareski
a9a2a04332 Store raw extras field for odd formatting 2025-09-29 21:49:20 -04:00
Matt Nadareski
50459645dd Add a couple GZ constants 2025-09-29 21:39:37 -04:00
Matt Nadareski
564386038f Try using Environment instead of compiler flags 2025-09-29 12:29:14 -04:00
39 changed files with 1199 additions and 73 deletions

View File

@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>2.0.0</Version>
<Version>2.0.1</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
@@ -27,14 +27,6 @@
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
</PropertyGroup>
<!-- Set a build flag for Windows specifically -->
<PropertyGroup Condition="'$(RuntimeIdentifier)'=='win-x86'">
<DefineConstants>$(DefineConstants);WINX86</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(RuntimeIdentifier)'=='win-x64'">
<DefineConstants>$(DefineConstants);WINX64</DefineConstants>
</PropertyGroup>
<!-- These are needed for dealing with native Windows DLLs -->
<ItemGroup Condition="'$(RuntimeIdentifier)'=='win-x86'">
<ContentWithTargetPath Include="..\SabreTools.Serialization\runtimes\win-x86\native\CascLib.dll">
@@ -66,7 +58,7 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.IO" Version="1.7.5" />
<PackageReference Include="SabreTools.IO" Version="[1.7.5]" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="9.0.9" Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))" />
</ItemGroup>

View File

@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>2.0.0</Version>
<Version>2.0.1</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
@@ -32,8 +32,8 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.IO" Version="1.7.5" />
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
<PackageReference Include="SabreTools.IO" Version="[1.7.5]" />
<PackageReference Include="SabreTools.Hashing" Version="[1.5.0]" />
</ItemGroup>
</Project>

View File

@@ -8,7 +8,7 @@ Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTo
The following non-project libraries (or ports thereof) are used for file handling:
- [SharpCompress](https://github.com/adamhathcock/sharpcompress) - Common archive format extraction
- [GrindCore.SharpCompress](https://github.com/Nanook/GrindCore.SharpCompress) - Common archive format extraction
- [StormLibSharp](https://github.com/robpaveza/stormlibsharp) - MoPaQ extraction [Unused in .NET Framework 2.0/3.5/4.0 and non-Windows builds due to Windows-specific libraries]
The following projects have influenced this library:
@@ -66,7 +66,7 @@ Options:
| InstallShield CAB | |
| Microsoft cabinet file | Does not support LZX or Quantum compression |
| Microsoft LZ-compressed files | KWAJ, QBasic, and SZDD variants |
| MoPaQ game data archive (MPQ) | Currently not working. Windows only. .NET Framework 4.5.2 and above |
| MoPaQ game data archive (MPQ) | Windows only. .NET Framework 4.5.2 and above |
| New Exectuable | Embedded archives and executables in the overlay and Wise installer |
| NovaLogic Game Archive Format (PFF) | |
| PKZIP and derived files (ZIP, etc.) | .NET Framework 4.6.2 and greater |

View File

@@ -0,0 +1,73 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Readers;
using Xunit;
namespace SabreTools.Serialization.Test.Readers
{
public class XZTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var deserializer = new XZ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var deserializer = new XZ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var deserializer = new XZ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var deserializer = new XZ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var deserializer = new XZ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var deserializer = new XZ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
}
}

View File

@@ -27,7 +27,7 @@
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
<PackageReference Include="SabreTools.Hashing" Version="[1.5.0]" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>

View File

@@ -0,0 +1,62 @@
using System.Collections.Generic;
namespace SabreTools.Data.Extensions
{
public static class XZ
{
/// <summary>
/// Decode a value from a variable-length integer
/// </summary>
/// <param name="value">Value to decode</param>
/// <param name="maxSize">Maximum number of bytes to parse</param>
/// <param name="length">Number of bytes parsed</param>
/// <returns>UInt64 representing the decoded integer</returns>
/// <see href="https://tukaani.org/xz/xz-file-format.txt"/>
public static ulong DecodeVariableLength(this byte[] value, int maxSize, out int length)
{
length = 0;
if (maxSize <= 0)
return 0;
if (maxSize > 9)
maxSize = 9;
ulong output = (ulong)(value[0] & 0x7F);
int i = 0;
while ((value[i++] & 0x80) != 0)
{
if (i >= maxSize || value[i] == 0x00)
return 0;
output |= (ulong)(value[i] & 0x7F) << (i * 7);
}
length = i;
return output;
}
/// <summary>
/// Encode a value to a variable-length integer
/// </summary>
/// <param name="value">Value to encode</param>
/// <returns>Byte array representing the encoded integer</returns>
/// <see href="https://tukaani.org/xz/xz-file-format.txt"/>
public static byte[] EncodeVariableLength(this ulong value)
{
if (value > long.MaxValue / 2)
return [];
var output = new List<byte>();
while (value >= 0x80)
{
output.Add((byte)(value | 0x80));
value >>= 7;
}
output.Add((byte)value);
return [.. output];
}
}
}

View File

@@ -1,10 +1,18 @@
namespace SabreTools.Data.Models.BZip2
{
/// <summary>
/// This is a placeholder model for future work
/// </summary>
/// <see href="https://github.com/dsnet/compress/blob/master/doc/bzip2-format.pdf"/>
public class Archive
{
/// <summary>
/// Stream header
/// </summary>
public Header? Header { get; set; }
// TODO: Implement remaining structures
/// <summary>
/// Stream footer
/// </summary>
public Footer? Footer { get; set; }
}
}

View File

@@ -0,0 +1,12 @@
namespace SabreTools.Data.Models.BZip2
{
public class Block
{
/// <summary>
/// Block header
/// </summary>
public BlockHeader? Header { get; set; }
// TODO: Implement remaining structures
}
}

View File

@@ -0,0 +1,37 @@
namespace SabreTools.Data.Models.BZip2
{
public class BlockHeader
{
/// <summary>
/// A 48-bit integer value 31 41 59 26 53 59, which
/// is the binary-coded decimal representation of
/// pi. It is used to differentiate the block
/// from the footer.
/// </summary>
/// <remarks>This may not be byte-aligned</remarks>
public byte[]? Magic { get; set; }
/// <summary>
/// The CRC-32 checksum of the uncompressed data contained
/// in <see cref="BlockData"/>. It is the same checksum
/// used in GZip, but is slightly different due to the
/// bit-packing differences.
/// </summary>
public uint Crc32 { get; set; }
/// <summary>
/// Should be 0. Previous versions of BZip2 allowed
/// the input data to be randomized to avoid
/// pathological strings from causing the runtime
/// to be exponential.
/// </summary>
/// <remarks>Actually a 1-bit value</remarks>
public byte Randomized { get; set; }
/// <summary>
/// Contains the origin pointer used in the BWT stage
/// </summary>
/// <remarks>Actually a 24-bit value</remarks>
public uint OrigPtr { get; set; }
}
}

View File

@@ -0,0 +1,26 @@
namespace SabreTools.Data.Models.BZip2
{
public class BlockTrees
{
// TODO: Implement SymMap
/// <summary>
/// Indicates the number of Huffman trees used in
/// the HUFF stage. It must between 2 and 6.
/// </summary>
/// <remarks>Actually a 3-bit value</remarks>
public byte NumTrees { get; set; }
/// <summary>
/// Indicates the number of selectors used in the
/// HUFF stage. There must be at least 1 selector
/// defined.
/// </summary>
/// <remarks>Actually a 15-bit value</remarks>
public ushort NumSels { get; set; }
// TODO: Implement Selectors
// TODO: Implement Trees
}
}

View File

@@ -6,4 +6,4 @@ namespace SabreTools.Data.Models.BZip2
public const string SignatureString = "BZh";
}
}
}

View File

@@ -0,0 +1,27 @@
namespace SabreTools.Data.Models.BZip2
{
public class Footer
{
/// <summary>
/// A 48-bit integer value 17 72 45 38 50 90, which
/// is the binary-coded decimal representation of
/// sqrt(pi). It is used to differentiate the block
/// from the footer.
/// </summary>
/// <remarks>This may not be byte-aligned</remarks>
public byte[]? Magic { get; set; }
/// <summary>
/// Contains a custom checksum computed using each of
/// the Block CRCs.
/// </summary>
/// <remarks>This may not be byte-aligned</remarks>
public uint Checksum { get; set; }
/// <summary>
/// Used to align the bit-stream to the next byte-aligned
/// edge and will contain between 0 and 7 bits.
/// </summary>
public byte Padding { get; set; }
}
}

View File

@@ -0,0 +1,25 @@
namespace SabreTools.Data.Models.BZip2
{
public class Header
{
/// <summary>
/// "BZ"
/// </summary>
public string? Signature { get; set; }
/// <summary>
/// Version byte
/// </summary>
/// <remarks>
/// '0' indicates a BZ1 file
/// 'h' indicates a BZ2 file
/// </remarks>
public byte Version { get; set; }
/// <summary>
/// ASCII value of the compression level
/// </summary>
/// <remarks>Valid values between '1' and '9'</remarks>
public byte Level { get; set; }
}
}

View File

@@ -9,7 +9,8 @@ namespace SabreTools.Data.Models.CHD
/// <summary>
/// Which custom compressors are used?
/// </summary>
public CodecType[] Compressors { get; set; } = new CodecType[4];
/// <remarks>There should be 4 entries</remarks>
public CodecType[] Compressors { get; set; }
/// <summary>
/// Logical size of the data (in bytes)

View File

@@ -6,5 +6,9 @@ namespace SabreTools.Data.Models.GZIP
public const byte ID1 = 0x1F;
public const byte ID2 = 0x8B;
public static readonly byte[] SignatureBytes = [0x1F, 0x8B];
public static readonly byte[] TorrentGZHeader = [0x1F, 0x8B, 0x08, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1C, 0x00];
}
}

View File

@@ -66,6 +66,17 @@ namespace SabreTools.Data.Models.GZIP
/// the header, with total length XLEN bytes. It consists of a
/// series of subfields, each of the form <see cref="ExtraFieldData"/>.
/// </summary>
/// <remarks>This is the raw version of <see cref="ExtraField"/></remarks>
public byte[] ExtraFieldBytes { get; set; }
/// <summary>
/// Extra field
///
/// If the FLG.FEXTRA bit is set, an "extra field" is present in
/// the header, with total length XLEN bytes. It consists of a
/// series of subfields, each of the form <see cref="ExtraFieldData"/>.
/// </summary>
/// <remarks>This is the processed version of <see cref="ExtraFieldBytes"/></remarks>
public ExtraFieldData[]? ExtraField { get; set; }
/// <summary>

View File

@@ -26,6 +26,7 @@ Not all of this information was able to be gathered directly from the files in q
| [IBM Documentation](https://www.ibm.com/docs/en) | TAR |
| [IETF](https://www.ietf.org/) | GZIP |
| [Independent Commodore Library](https://petlibrary.tripod.com/) | PKZIP |
| [Joe Tsai](https://github.com/dsnet/compress/blob/master/doc/bzip2-format.pdf) | BZip2 |
| [Ladislav Zezula](http://zezula.net/en/tools/main.html) | MoPaQ |
| [libaacs](https://code.videolan.org/videolan/libaacs/) | AACS |
| [libbdplus](https://github.com/mwgoldsmith/bdplus) | BD+ |
@@ -45,6 +46,7 @@ Not all of this information was able to be gathered directly from the files in q
| [Technical Committee T10](https://www.t10.org/) | PIC |
| [The Go tools for Windows + Assembler](https://www.godevtool.com/) | PortableExecutable |
| [The Whole Half-Life](https://twhl.info/wiki/page/Specification:_WAD3) | WAD3 |
| [Tukaani](https://tukaani.org/xz/format.html) | XZ |
| [Unshield](https://github.com/twogood/unshield) | InstallShieldCabinet |
| [unshieldv3](https://github.com/wfr/unshieldv3) | InstallShieldArchiveV3 |
| [Valve Developer Community](https://developer.valvesoftware.com/wiki/Main_Page) | BSP, VPK |

View File

@@ -1,10 +1,26 @@
namespace SabreTools.Data.Models.XZ
{
/// <summary>
/// This is a placeholder model for future work
/// </summary>
/// <see href="https://tukaani.org/xz/xz-file-format.txt"/>
public class Archive
{
/// <summary>
/// Pre-blocks header
/// </summary>
public Header? Header { get; set; }
/// <summary>
/// Sequence of 0 or more blocks
/// </summary>
public Block[]? Blocks { get; set; }
/// <summary>
/// Index structure
/// </summary>
public Index? Index { get; set; }
/// <summary>
/// Post-blocks footer
/// </summary>
public Footer? Footer { get; set; }
}
}

View File

@@ -0,0 +1,80 @@
namespace SabreTools.Data.Models.XZ
{
/// <summary>
/// Represents a single compressed block in the stream
/// </summary>
public class Block
{
/// <summary>
/// Size of the header
/// </summary>
/// <remarks>
/// The real header size can be calculated by the following:
/// (HeaderSize + 1) * 4
/// </remarks>
public byte HeaderSize { get; set; }
/// <summary>
/// The Block Flags field is a bit field
/// </summary>
public BlockFlags Flags { get; set; }
/// <summary>
/// Size of the compressed data
/// Present if <see cref="BlockFlags.CompressedSize"/> is set.
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong CompressedSize { get; set; }
/// <summary>
/// Size of the block after decompression
/// Present if <see cref="BlockFlags.UncompressedSize"/> is set.
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong UncompressedSize { get; set; }
/// <summary>
/// List of filter flags
/// </summary>
/// <remarks>
/// The number of filter flags is given by the first two
/// bits of <see cref="Flags"/>
/// </remarks>
public FilterFlag[]? FilterFlags { get; set; }
/// <summary>
/// This field contains as many null byte as it is needed to make
/// the Block Header have the size specified in Block Header Size.
/// </summary>
public byte[]? HeaderPadding { get; set; }
/// <summary>
/// The CRC32 is calculated over everything in the Block Header
/// field except the CRC32 field itself. It is stored as an
/// unsigned 32-bit little endian integer.
/// </summary>
public uint Crc32 { get; set; }
/// <summary>
/// The format of Compressed Data depends on Block Flags and List
/// of Filter Flags
/// </summary>
public byte[]? CompressedData { get; set; }
/// <summary>
/// Block Padding MUST contain 0-3 null bytes to make the size of
/// the Block a multiple of four bytes. This can be needed when
/// the size of Compressed Data is not a multiple of four.
/// </summary>
public byte[]? BlockPadding { get; set; }
/// <summary>
/// The type and size of the Check field depends on which bits
/// are set in the Stream Flags field.
///
/// The Check, when used, is calculated from the original
/// uncompressed data.
/// </summary>
public byte[]? Check { get; set; }
}
}

View File

@@ -2,6 +2,8 @@ namespace SabreTools.Data.Models.XZ
{
public static class Constants
{
public static readonly byte[] SignatureBytes = [0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00];
public static readonly byte[] HeaderSignatureBytes = [0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00];
public static readonly byte[] FooterSignatureBytes = [0x59, 0x5A];
}
}
}

View File

@@ -0,0 +1,47 @@
using System;
namespace SabreTools.Data.Models.XZ
{
[Flags]
public enum BlockFlags : byte
{
#region Bits 0-1 - Number of filters
OneFilter = 0x00,
TwoFilters = 0x01,
ThreeFiltrs = 0x02,
FourFilters = 0x03,
#endregion
/// <summary>
/// Compressed size field present
/// </summary>
CompressedSize = 0x40,
/// <summary>
/// Uncompressed size field present
/// </summary>
UncompressedSize = 0x80,
}
public enum HeaderFlags : ushort
{
None = 0x0000,
Crc32 = 0x0100,
Reserved0x02 = 0x0200,
Reserved0x03 = 0x0300,
Crc64 = 0x0400,
Reserved0x05 = 0x0500,
Reserved0x06 = 0x0600,
Reserved0x07 = 0x0700,
Reserved0x08 = 0x0800,
Reserved0x09 = 0x0900,
Sha256 = 0x0A00,
Reserved0x0B = 0x0B00,
Reserved0x0C = 0x0C00,
Reserved0x0D = 0x0D00,
Reserved0x0E = 0x0E00,
Reserved0x0F = 0x0F00,
}
}

View File

@@ -0,0 +1,23 @@
namespace SabreTools.Data.Models.XZ
{
public class FilterFlag
{
/// <summary>
/// Filter ID
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong FilterID { get; set; }
/// <summary>
/// Filter ID
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong SizeOfProperties { get; set; }
/// <summary>
/// Properties of the filter whose length is given by
/// <see cref="SizeOfProperties"/>
/// </summary>
public byte[]? Properties { get; set; }
}
}

View File

@@ -0,0 +1,39 @@
namespace SabreTools.Data.Models.XZ
{
/// <summary>
/// Represents the post-block data in the stream
/// </summary>
public class Footer
{
/// <summary>
/// The CRC32 is calculated from the Backward Size and Stream Flags
/// fields. It is stored as an unsigned 32-bit little endian
/// integer.
/// </summary>
public uint Crc32 { get; set; }
/// <summary>
/// Backward Size is stored as a 32-bit little endian integer,
/// which indicates the size of the Index field as multiple of
/// four bytes, minimum value being four bytes.
/// </summary>
/// <remarks>
/// The real index size can be calculated by the following:
/// (BackwardSize + 1) * 4
/// </remarks>
public uint BackwardSize { get; set; }
/// <summary>
/// This is a copy of the Stream Flags field from the Stream
/// Header. The information stored to Stream Flags is needed
/// when parsing the Stream backwards.
/// </summary>
public HeaderFlags Flags { get; set; }
/// <summary>
/// Header magic number ("YZ")
/// </summary>
/// <remarks>2 bytes</remarks>
public byte[]? Signature { get; set; }
}
}

View File

@@ -0,0 +1,27 @@
namespace SabreTools.Data.Models.XZ
{
/// <summary>
/// Represents the pre-block data in the stream
/// </summary>
public class Header
{
/// <summary>
/// Header magic number (0xFD, '7', 'z', 'X', 'Z', 0x00)
/// </summary>
/// <remarks>6 bytes</remarks>
public byte[]? Signature { get; set; }
/// <summary>
/// The first byte of Stream Flags is always a null byte. In the
/// future, this byte may be used to indicate a new Stream version
/// or other Stream properties.
/// </summary>
public HeaderFlags Flags { get; set; }
/// <summary>
/// The CRC32 is calculated from the Stream Flags field. It is
/// stored as an unsigned 32-bit little endian integer.
/// </summary>
public uint Crc32 { get; set; }
}
}

View File

@@ -0,0 +1,36 @@
namespace SabreTools.Data.Models.XZ
{
public class Index
{
/// <summary>
/// The value of Index Indicator is always 0x00
/// </summary>
public byte IndexIndicator { get; set; }
/// <summary>
/// This field indicates how many Records there are in the List
/// of Records field, and thus how many Blocks there are in the
/// Stream
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong NumberOfRecords { get; set; }
/// <summary>
/// One record per block
/// </summary>
public Record[]? Records { get; set; }
/// <summary>
/// This field MUST contain 0-3 null bytes to pad the Index to
/// a multiple of four bytes.
/// </summary>
public byte[]? Padding { get; set; }
/// <summary>
/// The CRC32 is calculated over everything in the Index field
/// except the CRC32 field itself. The CRC32 is stored as an
/// unsigned 32-bit little endian integer.
/// </summary>
public uint Crc32 { get; set; }
}
}

View File

@@ -0,0 +1,17 @@
namespace SabreTools.Data.Models.XZ
{
public class Record
{
/// <summary>
/// Unpadded size of the block
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong UnpaddedSize { get; set; }
/// <summary>
/// Uncompressed size of the block
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong UncompressedSize { get; set; }
}
}

View File

@@ -75,6 +75,7 @@ namespace SabreTools.Serialization
Wrapper.WiseSectionHeader item => item.PrettyPrint(),
Wrapper.XeMID item => item.PrettyPrint(),
Wrapper.XMID item => item.PrettyPrint(),
Wrapper.XZ item => item.PrettyPrint(),
Wrapper.XZP item => item.PrettyPrint(),
_ => null,
};
@@ -130,6 +131,7 @@ namespace SabreTools.Serialization
Wrapper.WiseSectionHeader item => item.ExportJSON(),
Wrapper.XeMID item => item.ExportJSON(),
Wrapper.XMID item => item.ExportJSON(),
Wrapper.XZ item => item.ExportJSON(),
Wrapper.XZP item => item.ExportJSON(),
_ => string.Empty,
};
@@ -558,6 +560,16 @@ namespace SabreTools.Serialization
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.XZ item)
{
var builder = new StringBuilder();
XZ.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>

View File

@@ -0,0 +1,133 @@
using System.Text;
using SabreTools.Data.Models.XZ;
namespace SabreTools.Data.Printers
{
public class XZ : IPrinter<Archive>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, Archive model)
=> Print(builder, model);
public static void Print(StringBuilder builder, Archive archive)
{
builder.AppendLine("xz Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, archive.Header);
Print(builder, archive.Blocks);
Print(builder, archive.Index);
Print(builder, archive.Footer);
}
private static void Print(StringBuilder builder, Header? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header");
builder.AppendLine();
return;
}
builder.AppendLine(header.Signature, " Signature");
builder.AppendLine($" Flags: {header.Flags} (0x{(ushort)header.Flags:X4})");
builder.AppendLine(header.Crc32, " CRC-32");
builder.AppendLine();
}
private static void Print(StringBuilder builder, Block[]? blocks)
{
builder.AppendLine(" Blocks Information:");
builder.AppendLine(" -------------------------");
if (blocks == null || blocks.Length == 0)
{
builder.AppendLine(" No blocks");
builder.AppendLine();
return;
}
for (int i = 0; i < blocks.Length; i++)
{
var block = blocks[i];
builder.AppendLine($" Block {i}:");
builder.AppendLine(block.HeaderSize, " Header size");
builder.AppendLine($" Flags: {block.Flags} (0x{(byte)block.Flags:X2})");
builder.AppendLine(block.CompressedSize, " Compressed size");
builder.AppendLine(block.UncompressedSize, " Uncompressed size");
// TODO: Print filter flags
builder.AppendLine(block.HeaderPadding, " Header padding");
builder.AppendLine(block.Crc32, " CRC-32");
if (block.CompressedData == null)
builder.AppendLine(" Compressed data length: [NULL]");
else
builder.AppendLine(block.CompressedData.Length, " Compressed data length");
builder.AppendLine(block.BlockPadding, " Block padding");
builder.AppendLine(block.Check, " Check");
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, Index? index)
{
builder.AppendLine(" Index Information:");
builder.AppendLine(" -------------------------");
if (index == null)
{
builder.AppendLine(" No index");
builder.AppendLine();
return;
}
builder.AppendLine(index.IndexIndicator, " Index indicator");
builder.AppendLine(index.NumberOfRecords, " Number of records");
Print(builder, index.Records);
builder.AppendLine(index.Padding, " Padding");
builder.AppendLine(index.Crc32, " CRC-32");
builder.AppendLine();
}
private static void Print(StringBuilder builder, Record[]? records)
{
builder.AppendLine(" Records Information:");
builder.AppendLine(" -------------------------");
if (records == null || records.Length == 0)
{
builder.AppendLine(" No records");
builder.AppendLine();
return;
}
for (int i = 0; i < records.Length; i++)
{
var record = records[i];
builder.AppendLine($" Block {i}:");
builder.AppendLine(record.UnpaddedSize, " Unpadded size");
builder.AppendLine(record.UncompressedSize, " Uncompressed size");
}
}
private static void Print(StringBuilder builder, Footer? footer)
{
builder.AppendLine(" Footer Information:");
builder.AppendLine(" -------------------------");
if (footer == null)
{
builder.AppendLine(" No footer");
builder.AppendLine();
return;
}
builder.AppendLine(footer.Crc32, " CRC-32");
builder.AppendLine(footer.BackwardSize, " Backward size");
builder.AppendLine($" Flags: {footer.Flags} (0x{(ushort)footer.Flags:X4})");
builder.AppendLine(footer.Signature, " Signature");
builder.AppendLine();
}
}
}

View File

@@ -85,6 +85,10 @@ namespace SabreTools.Serialization.Readers
// Cache the current position
long currentPosition = data.Position;
// Read the raw data first
obj.ExtraFieldBytes = data.ReadBytes(obj.ExtraLength);
data.Seek(currentPosition, SeekOrigin.Begin);
List<ExtraFieldData> extraFields = [];
while (data.Position < currentPosition + obj.ExtraLength)
{

View File

@@ -106,6 +106,10 @@ namespace SabreTools.Serialization.Readers
#endregion
// Cache the overlay offset
long endOfSectionData = optionalHeader?.SizeOfHeaders ?? 0;
Array.ForEach(pex.SectionTable, s => endOfSectionData += s.SizeOfRawData);
#region Symbol Table and String Table
offset = initialOffset + fileHeader.PointerToSymbolTable;
@@ -326,7 +330,9 @@ namespace SabreTools.Serialization.Readers
#region Hidden Resources
// If we have not used up the full size, parse the remaining chunk as a single resource
if (pex.ResourceDirectoryTable?.Entries != null && tableOffset < tableSize)
if (pex.ResourceDirectoryTable?.Entries != null
&& tableOffset < tableSize
&& (offset + tableOffset) != endOfSectionData)
{
// Resize the entry array to accomodate one more
var localEntries = pex.ResourceDirectoryTable.Entries;
@@ -1581,8 +1587,8 @@ namespace SabreTools.Serialization.Readers
// Read the name from the offset, if needed
if (nameEntry && obj.Entries[i].NameOffset > 0 && obj.Entries[i].NameOffset < tableData.Length)
{
offset = (int)obj.Entries[i].NameOffset;
obj.Entries[i].Name = ParseResourceDirectoryString(tableData, ref offset);
int nameOffset = (int)obj.Entries[i].NameOffset;
obj.Entries[i].Name = ParseResourceDirectoryString(tableData, ref nameOffset);
}
}

View File

@@ -0,0 +1,303 @@
using System;
using System.IO;
using SabreTools.Data.Extensions;
using SabreTools.Data.Models.XZ;
using SabreTools.IO.Extensions;
using static SabreTools.Data.Models.XZ.Constants;
namespace SabreTools.Serialization.Readers
{
public class XZ : BaseBinaryReader<Archive>
{
/// <inheritdoc/>
public override Archive? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Cache the current offset
long initialOffset = data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (!header.Signature.EqualsExactly(HeaderSignatureBytes))
return null;
// Set the stream header
archive.Header = header;
// Cache the current offset
long endOfHeader = data.Position;
#endregion
#region Footer
// Seek to the start of the footer
data.Seek(-12, SeekOrigin.End);
// Cache the current offset
long startOfFooter = data.Position;
// Try to parse the footer
var footer = ParseFooter(data);
if (!footer.Signature.EqualsExactly(FooterSignatureBytes))
return null;
// Set the footer
archive.Footer = footer;
#endregion
#region Index
// Seek to the start of the index
long indexOffset = startOfFooter - ((footer.BackwardSize + 1) * 4);
data.Seek(indexOffset, SeekOrigin.Begin);
// Try to parse the index
var index = ParseIndex(data);
if (index.IndexIndicator != 0x00)
return null;
if (index.Records == null)
return null;
// Set the index
archive.Index = index;
#endregion
#region Blocks
// Seek to the start of the blocks
data.Seek(endOfHeader, SeekOrigin.Begin);
// Create the block array
int blockCount = index.Records.Length;
archive.Blocks = new Block[blockCount];
// Try to parse the blocks
for (int i = 0; i < archive.Blocks.Length; i++)
{
// Get the record for this block
var record = index.Records[i];
// Try to parse the block
archive.Blocks[i] = ParseBlock(data, header.Flags, record.UnpaddedSize);
}
#endregion
return archive;
}
catch
{
// Ignore the actual error
return null;
}
}
/// <summary>
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var obj = new Header();
obj.Signature = data.ReadBytes(6);
obj.Flags = (HeaderFlags)data.ReadUInt16LittleEndian();
obj.Crc32 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a Block
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="headerFlags">HeaderFlags to for determining the check value</param>
/// <param name="unpaddedSize">Unpadded data size from the index</param>
/// <returns>Filled Block on success, null on error</returns>
public static Block ParseBlock(Stream data, HeaderFlags headerFlags, ulong unpaddedSize)
{
// Cache the current offset
long currentOffset = data.Position;
// Determine the size of the check field
int checkSize = 0;
if (headerFlags == HeaderFlags.Crc32)
checkSize = 4;
else if (headerFlags == HeaderFlags.Crc64)
checkSize = 8;
else if (headerFlags == HeaderFlags.Sha256)
checkSize = 32;
var obj = new Block();
obj.HeaderSize = data.ReadByteValue();
int realHeaderSize = (obj.HeaderSize + 1) * 4;
obj.Flags = (BlockFlags)data.ReadByteValue();
#if NET20 || NET35
if ((obj.Flags & BlockFlags.CompressedSize) != 0)
#else
if (obj.Flags.HasFlag(BlockFlags.CompressedSize))
#endif
obj.CompressedSize = ParseVariableLength(data);
#if NET20 || NET35
if ((obj.Flags & BlockFlags.UncompressedSize) != 0)
#else
if (obj.Flags.HasFlag(BlockFlags.UncompressedSize))
#endif
obj.UncompressedSize = ParseVariableLength(data);
// Determine the number of filters to read
int filterCount = ((byte)obj.Flags & 0x03) + 1;
// Try to parse the filters
obj.FilterFlags = new FilterFlag[filterCount];
for (int i = 0; i < obj.FilterFlags.Length; i++)
{
obj.FilterFlags[i] = ParseFilterFlag(data);
}
// Parse the padding as needed, adjusting for CRC size
int paddingLength = realHeaderSize - (int)(data.Position - currentOffset) - 4;
if (paddingLength >= 0)
obj.HeaderPadding = data.ReadBytes(paddingLength);
obj.Crc32 = data.ReadUInt32LittleEndian();
// Determine the compressed size
ulong compressedSize = obj.CompressedSize != 0
? obj.CompressedSize
: unpaddedSize - (ulong)(realHeaderSize + checkSize);
// TODO: How to handle large blocks?
if ((int)compressedSize > 0)
obj.CompressedData = data.ReadBytes((int)compressedSize);
// Parse the padding as needed
paddingLength = 4 - (int)(unpaddedSize % 4);
if (paddingLength >= 0)
obj.BlockPadding = data.ReadBytes(paddingLength);
// Read the Check as needed
obj.Check = data.ReadBytes(checkSize);
return obj;
}
/// <summary>
/// Parse a Stream into a FilterFlag
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled FilterFlag on success, null on error</returns>
public static FilterFlag ParseFilterFlag(Stream data)
{
var obj = new FilterFlag();
obj.FilterID = ParseVariableLength(data);
obj.SizeOfProperties = ParseVariableLength(data);
obj.Properties = data.ReadBytes((int)obj.SizeOfProperties);
return obj;
}
/// <summary>
/// Parse a Stream into a Index
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Index on success, null on error</returns>
public static Data.Models.XZ.Index ParseIndex(Stream data)
{
// Cache the current offset
long currentOffset = data.Position;
var obj = new Data.Models.XZ.Index();
obj.IndexIndicator = data.ReadByteValue();
obj.NumberOfRecords = ParseVariableLength(data);
obj.Records = new Record[obj.NumberOfRecords];
for (int i = 0; i < obj.Records.Length; i++)
{
obj.Records[i] = ParseRecord(data);
}
// Parse the padding as needed
int paddingLength = 4 - (int)(data.Position - currentOffset) % 4;
if (paddingLength >= 0)
obj.Padding = data.ReadBytes(paddingLength);
obj.Crc32 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a Record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Record on success, null on error</returns>
public static Record ParseRecord(Stream data)
{
var obj = new Record();
obj.UnpaddedSize = ParseVariableLength(data);
obj.UncompressedSize = ParseVariableLength(data);
return obj;
}
/// <summary>
/// Parse a Stream into a Footer
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Footer on success, null on error</returns>
public static Footer ParseFooter(Stream data)
{
var obj = new Footer();
obj.Crc32 = data.ReadUInt32LittleEndian();
obj.BackwardSize = data.ReadUInt32LittleEndian();
obj.Flags = (HeaderFlags)data.ReadUInt16LittleEndian();
obj.Signature = data.ReadBytes(2);
return obj;
}
/// <summary>
/// Parse a variable-length number from the stream
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Decoded variable-length value</returns>
private static ulong ParseVariableLength(Stream data)
{
// Cache the current offset
long currentOffset = data.Position;
// Read up to 9 bytes for decoding
int byteCount = (int)Math.Min(data.Length - data.Position, 9);
byte[] encoded = data.ReadBytes(byteCount);
// Attempt to decode the value
ulong output = encoded.DecodeVariableLength(byteCount, out int length);
// Seek the actual length processed and return
data.Seek(currentOffset + length, SeekOrigin.Begin);
return output;
}
}
}

View File

@@ -14,7 +14,8 @@
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>2.0.0</Version>
<Version>2.0.1</Version>
<WarningsNotAsErrors>NU5104</WarningsNotAsErrors>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
@@ -42,26 +43,19 @@
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
</PropertyGroup>
<!-- Set a build flag for Windows specifically -->
<PropertyGroup Condition="'$(RuntimeIdentifier)'=='win-x86'">
<DefineConstants>$(DefineConstants);WINX86</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(RuntimeIdentifier)'=='win-x64'">
<DefineConstants>$(DefineConstants);WINX64</DefineConstants>
</PropertyGroup>
<!-- Exclude certain parts of external modules for by default -->
<PropertyGroup>
<DefaultItemExcludes>
$(DefaultItemExcludes);
**\AssemblyInfo.cs;
_EXTERNAL\stormlibsharp\lib\**;
_EXTERNAL\stormlibsharp\src\CascLibSharp\**;
_EXTERNAL\stormlibsharp\src\TestConsole\**
</DefaultItemExcludes>
</PropertyGroup>
<!-- Exclude all external modules for .NET Framework 2.0, .NET Framework 3.5, or non-Windows builds -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR !($(RuntimeIdentifier.StartsWith(`win-x86`)) OR $(RuntimeIdentifier.StartsWith(`win-x64`)))">
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`))">
<DefaultItemExcludes>
$(DefaultItemExcludes);
_EXTERNAL\**
@@ -85,11 +79,11 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="GrindCore.SharpCompress" Version="0.40.4-alpha" Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))" />
<PackageReference Include="NetLegacySupport.Numerics" Version="1.0.1" Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`))" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.4" />
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
<PackageReference Include="SabreTools.IO" Version="1.7.5" />
<PackageReference Include="SharpCompress" Version="0.40.0" Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))" />
</ItemGroup>
<PackageReference Include="SabreTools.Hashing" Version="[1.5.0]" />
<PackageReference Include="SabreTools.IO" Version="[1.7.5]" />
</ItemGroup>
</Project>

View File

@@ -295,7 +295,7 @@ namespace SabreTools.Serialization
#region GZip
if (magic.StartsWith(new byte[] { Data.Models.GZIP.Constants.ID1, Data.Models.GZIP.Constants.ID2 }))
if (magic.StartsWith(Data.Models.GZIP.Constants.SignatureBytes))
return WrapperType.GZip;
if (extension.Equals("gz", StringComparison.OrdinalIgnoreCase))
@@ -788,7 +788,7 @@ namespace SabreTools.Serialization
#region XZ
if (magic.StartsWith(Data.Models.XZ.Constants.SignatureBytes))
if (magic.StartsWith(Data.Models.XZ.Constants.HeaderSignatureBytes))
return WrapperType.XZ;
if (extension.Equals("xz", StringComparison.OrdinalIgnoreCase))

View File

@@ -1,5 +1,6 @@
using System.IO;
using SabreTools.Data.Models.GZIP;
using SabreTools.IO.Extensions;
namespace SabreTools.Serialization.Wrappers
{
@@ -14,6 +15,60 @@ namespace SabreTools.Serialization.Wrappers
#region Extension Properties
/// <summary>
/// Content CRC-32 as stored in the extra field
/// </summary>
/// <remarks>Only guaranteed for Torrent GZip format</remarks>
public byte[]? ContentCrc32
{
get
{
// Only valid for Torrent GZip
if (Header == null || !IsTorrentGZip)
return null;
// CRC-32 is the second packed field
int extraIndex = 0x10;
return Header.ExtraFieldBytes.ReadBytes(ref extraIndex, 0x04);
}
}
/// <summary>
/// Content MD5 as stored in the extra field
/// </summary>
/// <remarks>Only guaranteed for Torrent GZip format</remarks>
public byte[]? ContentMd5
{
get
{
// Only valid for Torrent GZip
if (Header == null || !IsTorrentGZip)
return null;
// MD5 is the first packed field
int extraIndex = 0x00;
return Header.ExtraFieldBytes.ReadBytes(ref extraIndex, 0x10);
}
}
/// <summary>
/// Content size as stored in the extra field
/// </summary>
/// <remarks>Only guaranteed for Torrent GZip format</remarks>
public ulong ContentSize
{
get
{
// Only valid for Torrent GZip
if (Header == null || !IsTorrentGZip)
return 0;
// MD5 is the first packed field
int extraIndex = 0x00;
return Header.ExtraFieldBytes.ReadUInt64LittleEndian(ref extraIndex);
}
}
/// <summary>
/// Offset to the compressed data
/// </summary>
@@ -54,6 +109,51 @@ namespace SabreTools.Serialization.Wrappers
/// <inheritdoc cref="Archive.Header"/>
public Header? Header => Model.Header;
/// <summary>
/// Indicates if the archive is in the standard
/// "Torrent GZip" format. This format is used by
/// some programs to store extended hashes in the
/// header while maintaining the format otherwise.
/// </summary>
public bool IsTorrentGZip
{
get
{
// If the header is invalid
if (Header == null)
return false;
// Torrent GZip uses normal deflate, not GZIP deflate
if (Header.CompressionMethod != CompressionMethod.Deflate)
return false;
// Only the extra field should be present
if (Header.Flags != Flags.FEXTRA)
return false;
// The modification should be 0x00000000, but some implementations
// do not set this correctly, so it is skipped.
// No extra flags are set
if (Header.ExtraFlags != 0x00)
return false;
// The OS should be FAT, regardless of the original platform, but
// some implementations do not set this correctly, so it is skipped.
// The extra field is non-standard, using the following format:
// - 0x00-0x0F - MD5 hash of the internal file
// - 0x10-0x13 - CRC-32 checksum of the internal file
// - 0x14-0x1B - Little-endian file size of the internal file
if (Header.ExtraLength != 0x1C)
return false;
if (Header.ExtraFieldBytes == null || Header.ExtraFieldBytes.Length != 0x1C)
return false;
return true;
}
}
/// <inheritdoc cref="Archive.Trailer"/>
public Trailer? Trailer => Model.Trailer;

View File

@@ -1,5 +1,5 @@
using System;
#if (NET452_OR_GREATER || NETCOREAPP) && (WINX86 || WINX64)
#if NET452_OR_GREATER || NETCOREAPP
using System.IO;
using StormLibSharp;
#endif
@@ -11,9 +11,16 @@ namespace SabreTools.Serialization.Wrappers
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
#if (NET452_OR_GREATER || NETCOREAPP) && (WINX86 || WINX64)
#if NET452_OR_GREATER || NETCOREAPP
try
{
// Limit use to Windows only
if (Environment.OSVersion.Platform != PlatformID.Win32NT)
{
Console.WriteLine("Extraction is not supported for this operating system!");
return false;
}
if (Filename == null || !File.Exists(Filename))
return false;

View File

@@ -132,7 +132,7 @@ namespace SabreTools.Serialization.Wrappers
extension = "xml";
break;
}
else if (overlaySample.StartsWith(Data.Models.XZ.Constants.SignatureBytes))
else if (overlaySample.StartsWith(Data.Models.XZ.Constants.HeaderSignatureBytes))
{
extension = "xz";
break;

View File

@@ -271,7 +271,7 @@ namespace SabreTools.Serialization.Wrappers
extension = "xml";
break;
}
else if (overlaySample.StartsWith(Data.Models.XZ.Constants.SignatureBytes))
else if (overlaySample.StartsWith(Data.Models.XZ.Constants.HeaderSignatureBytes))
{
extension = "xz";
break;
@@ -474,7 +474,7 @@ namespace SabreTools.Serialization.Wrappers
extension = "xml";
break;
}
else if (resourceSample.StartsWith(Data.Models.XZ.Constants.SignatureBytes))
else if (resourceSample.StartsWith(Data.Models.XZ.Constants.HeaderSignatureBytes))
{
extension = "xz";
break;

View File

@@ -90,7 +90,7 @@ namespace SabreTools.Serialization.Wrappers
// If the entry point matches with the start of a section, use that
int entryPointSection = FindEntryPointSectionIndex();
if (entryPointSection >= 0 && OptionalHeader.AddressOfEntryPoint == SectionTable[entryPointSection]?.VirtualAddress)
if (entryPointSection >= 0 && OptionalHeader.AddressOfEntryPoint == SectionTable[entryPointSection].VirtualAddress)
{
_entryPointData = GetSectionData(entryPointSection) ?? [];
return _entryPointData;
@@ -143,15 +143,15 @@ namespace SabreTools.Serialization.Wrappers
// Populate the raw header padding data based on the source
uint headerStartAddress = Stub.Header.NewExeHeaderAddr;
uint firstSectionAddress = uint.MaxValue;
foreach (var s in SectionTable)
foreach (var section in SectionTable)
{
if (s == null || s.PointerToRawData == 0)
if (section.PointerToRawData == 0)
continue;
if (s.PointerToRawData < headerStartAddress)
if (section.PointerToRawData < headerStartAddress)
continue;
if (s.PointerToRawData < firstSectionAddress)
firstSectionAddress = s.PointerToRawData;
if (section.PointerToRawData < firstSectionAddress)
firstSectionAddress = section.PointerToRawData;
}
// Check if the header length is more than 0 before reading data
@@ -328,15 +328,7 @@ namespace SabreTools.Serialization.Wrappers
// Search through all sections and find the furthest a section goes
long endOfSectionData = OptionalHeader.SizeOfHeaders;
foreach (var section in SectionTable)
{
// If we have an invalid section
if (section == null)
continue;
// Add the raw data size
endOfSectionData += section.SizeOfRawData;
}
Array.ForEach(SectionTable, s => endOfSectionData += s.SizeOfRawData);
// If we didn't find the end of section data
if (endOfSectionData <= 0)
@@ -527,11 +519,8 @@ namespace SabreTools.Serialization.Wrappers
_sectionNames = new string[SectionTable.Length];
for (int i = 0; i < _sectionNames.Length; i++)
{
var section = SectionTable[i];
if (section == null)
continue;
// TODO: Handle long section names with leading `/`
var section = SectionTable[i];
byte[]? sectionNameBytes = section.Name;
if (sectionNameBytes != null)
{
@@ -2046,9 +2035,6 @@ namespace SabreTools.Serialization.Wrappers
// Get the section data from the table
var section = SectionTable[index];
if (section == null)
return null;
uint address = section.VirtualAddress.ConvertVirtualAddress(SectionTable);
if (address == 0)
return null;

View File

@@ -73,7 +73,21 @@ namespace SabreTools.Serialization.Wrappers
if (data == null || !data.CanRead)
return null;
return new XZ(new Archive(), data);
try
{
// Cache the current offset
long currentOffset = data.Position;
var model = new Readers.XZ().Deserialize(data);
if (model == null)
return null;
return new XZ(model, data, currentOffset);
}
catch
{
return null;
}
}
#endregion