Compare commits

...

36 Commits
2.0.0 ... 2.0.2

Author SHA1 Message Date
Matt Nadareski
1d9e12183f Bump version 2025-10-07 12:42:06 -04:00
Matt Nadareski
aaa8bbe709 This was inconsistent too 2025-10-07 11:25:35 -04:00
Matt Nadareski
805d1b9ad8 Not sure why this part was inconsistent 2025-10-07 11:21:39 -04:00
Matt Nadareski
d24d3e5adb Remove now-unused constants 2025-10-07 10:48:31 -04:00
Matt Nadareski
d3a7d552c3 Use main feature pattern with InfoPrint 2025-10-07 10:20:57 -04:00
Matt Nadareski
9f1c5e2bd2 Use main feature pattern with ExtractionTool 2025-10-07 10:04:19 -04:00
Matt Nadareski
1ec4ea8354 Update packages 2025-10-07 09:37:42 -04:00
Matt Nadareski
e4fab52489 Use CommandLine library for executables 2025-10-06 09:32:01 -04:00
Matt Nadareski
e029fa4833 Skip warning around GC.SharpCompress inclusion 2025-10-05 17:02:44 -04:00
Matt Nadareski
2c3f229a6a Bump version 2025-10-05 16:59:33 -04:00
Matt Nadareski
3558d3532c Do not update offset on name offset 2025-10-02 13:33:51 -04:00
Matt Nadareski
ad5314dc22 Minor tweak 2025-10-01 20:08:33 -04:00
Matt Nadareski
eaa5bb5662 Sections can't be null 2025-10-01 20:06:05 -04:00
Matt Nadareski
fcdc703595 Update readme to be accurate again 2025-09-30 20:58:52 -04:00
Matt Nadareski
ef9fa562ab More BZip documenting 2025-09-30 20:35:40 -04:00
Matt Nadareski
ac285c48fe Start documenting BZip in code 2025-09-30 20:25:18 -04:00
Matt Nadareski
e57ad65210 Migrate to GrindCore fork of SharpCompress 2025-09-30 19:52:14 -04:00
Matt Nadareski
0fc3a30422 Print a couple more XZ fields 2025-09-30 14:09:40 -04:00
Matt Nadareski
49f6704694 Add initial XZ printer 2025-09-30 14:05:34 -04:00
Matt Nadareski
6df712c538 Add XZ parsing to wrapper 2025-09-30 13:56:21 -04:00
Matt Nadareski
bda3076a30 Further XZ parsing fixes 2025-09-30 13:56:11 -04:00
Matt Nadareski
89e8e7c706 Fix more XZ parsing; use read-from-end 2025-09-30 13:22:26 -04:00
Matt Nadareski
c10835d221 Start fixing XZ parsing 2025-09-30 13:08:53 -04:00
Matt Nadareski
a6801350ea Fix issue with XZ enum 2025-09-30 12:48:12 -04:00
Matt Nadareski
c7a5a62041 Require exact versions for build 2025-09-30 11:09:37 -04:00
Matt Nadareski
749b35e5cb Slight tweak to CHD v5 model 2025-09-30 10:59:44 -04:00
Matt Nadareski
3c520d33eb Add XZ reader, fix some minor issues 2025-09-30 09:47:19 -04:00
Matt Nadareski
635170a051 Add reference for BZ2 2025-09-29 23:38:40 -04:00
Matt Nadareski
9619311d11 Store variable-length numbers as ulong 2025-09-29 23:32:58 -04:00
Matt Nadareski
4bee14835c Fix last couple of commits 2025-09-29 23:12:23 -04:00
Matt Nadareski
f44059e16a Add XZ variable length helper methods 2025-09-29 23:07:05 -04:00
Matt Nadareski
bfb206a06d Add XZ models 2025-09-29 22:56:53 -04:00
Matt Nadareski
6b7b05eb31 Port some extensions for GZip from ST 2025-09-29 22:04:10 -04:00
Matt Nadareski
a9a2a04332 Store raw extras field for odd formatting 2025-09-29 21:49:20 -04:00
Matt Nadareski
50459645dd Add a couple GZ constants 2025-09-29 21:39:37 -04:00
Matt Nadareski
564386038f Try using Environment instead of compiler flags 2025-09-29 12:29:14 -04:00
45 changed files with 1915 additions and 756 deletions

View File

@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>2.0.0</Version>
<Version>2.0.2</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
@@ -27,14 +27,6 @@
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
</PropertyGroup>
<!-- Set a build flag for Windows specifically -->
<PropertyGroup Condition="'$(RuntimeIdentifier)'=='win-x86'">
<DefineConstants>$(DefineConstants);WINX86</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(RuntimeIdentifier)'=='win-x64'">
<DefineConstants>$(DefineConstants);WINX64</DefineConstants>
</PropertyGroup>
<!-- These are needed for dealing with native Windows DLLs -->
<ItemGroup Condition="'$(RuntimeIdentifier)'=='win-x86'">
<ContentWithTargetPath Include="..\SabreTools.Serialization\runtimes\win-x86\native\CascLib.dll">
@@ -66,7 +58,8 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.IO" Version="1.7.5" />
<PackageReference Include="SabreTools.CommandLine" Version="[1.3.2]" />
<PackageReference Include="SabreTools.IO" Version="[1.7.6]" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="9.0.9" Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))" />
</ItemGroup>

View File

@@ -0,0 +1,325 @@
using System;
using System.IO;
using SabreTools.CommandLine;
using SabreTools.CommandLine.Inputs;
using SabreTools.IO.Extensions;
using SabreTools.Serialization;
using SabreTools.Serialization.Wrappers;
namespace ExtractionTool.Features
{
internal sealed class MainFeature : Feature
{
#region Feature Definition
public const string DisplayName = "main";
/// <remarks>Flags are unused</remarks>
private static readonly string[] _flags = [];
/// <remarks>Description is unused</remarks>
private const string _description = "";
#endregion
#region Inputs
private const string _debugName = "debug";
internal readonly FlagInput DebugInput = new(_debugName, ["-d", "--debug"], "Enable debug mode");
private const string _outputPathName = "output-path";
internal readonly StringInput OutputPathInput = new(_outputPathName, ["-o", "--outdir"], "Set output path for extraction (required)");
#endregion
#region Properties
/// <summary>
/// Enable debug output for relevant operations
/// </summary>
public bool Debug { get; private set; }
/// <summary>
/// Output path for archive extraction
/// </summary>
public string OutputPath { get; private set; } = string.Empty;
#endregion
public MainFeature()
: base(DisplayName, _flags, _description)
{
RequiresInputs = true;
Add(DebugInput);
Add(OutputPathInput);
}
/// <inheritdoc/>
public override bool Execute()
{
// Get the options from the arguments
Debug = GetBoolean(_debugName);
OutputPath = GetString(_outputPathName) ?? string.Empty;
// Validate the output path
if (!ValidateExtractionPath())
return false;
// Loop through the input paths
for (int i = 0; i < Inputs.Count; i++)
{
string arg = Inputs[i];
ExtractPath(arg);
}
return true;
}
/// <inheritdoc/>
public override bool VerifyInputs() => Inputs.Count > 0;
/// <summary>
/// Wrapper to extract data for a single path
/// </summary>
/// <param name="path">File or directory path</param>
private void ExtractPath(string path)
{
// Normalize by getting the full path
path = Path.GetFullPath(path);
Console.WriteLine($"Checking possible path: {path}");
// Check if the file or directory exists
if (File.Exists(path))
{
ExtractFile(path);
}
else if (Directory.Exists(path))
{
foreach (string file in path.SafeEnumerateFiles("*", SearchOption.AllDirectories))
{
ExtractFile(file);
}
}
else
{
Console.WriteLine($"{path} does not exist, skipping...");
}
}
/// <summary>
/// Print information for a single file, if possible
/// </summary>
/// <param name="path">File path</param>
private void ExtractFile(string file)
{
Console.WriteLine($"Attempting to extract all files from {file}");
using Stream stream = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
// Get the extension for certain checks
string extension = Path.GetExtension(file).ToLower().TrimStart('.');
// Get the first 16 bytes for matching
byte[] magic = new byte[16];
try
{
int read = stream.Read(magic, 0, 16);
stream.Seek(0, SeekOrigin.Begin);
}
catch (Exception ex)
{
if (Debug) Console.Error.WriteLine(ex);
return;
}
// Get the file type
WrapperType ft = WrapperFactory.GetFileType(magic, extension);
var wrapper = WrapperFactory.CreateWrapper(ft, stream);
// Create the output directory
Directory.CreateDirectory(OutputPath);
// Print the preamble
Console.WriteLine($"Attempting to extract from '{wrapper?.Description() ?? "UNKNOWN"}'");
Console.WriteLine();
switch (wrapper)
{
// 7-zip
case SevenZip sz:
sz.Extract(OutputPath, Debug);
break;
// BFPK archive
case BFPK bfpk:
bfpk.Extract(OutputPath, Debug);
break;
// BSP
case BSP bsp:
bsp.Extract(OutputPath, Debug);
break;
// bzip2
case BZip2 bzip2:
bzip2.Extract(OutputPath, Debug);
break;
// CFB
case CFB cfb:
cfb.Extract(OutputPath, Debug);
break;
// GCF
case GCF gcf:
gcf.Extract(OutputPath, Debug);
break;
// gzip
case GZip gzip:
gzip.Extract(OutputPath, Debug);
break;
// InstallShield Archive V3 (Z)
case InstallShieldArchiveV3 isv3:
isv3.Extract(OutputPath, Debug);
break;
// IS-CAB archive
case InstallShieldCabinet iscab:
iscab.Extract(OutputPath, Debug);
break;
// LZ-compressed file, KWAJ variant
case LZKWAJ kwaj:
kwaj.Extract(OutputPath, Debug);
break;
// LZ-compressed file, QBasic variant
case LZQBasic qbasic:
qbasic.Extract(OutputPath, Debug);
break;
// LZ-compressed file, SZDD variant
case LZSZDD szdd:
szdd.Extract(OutputPath, Debug);
break;
// Microsoft Cabinet archive
case MicrosoftCabinet mscab:
mscab.Extract(OutputPath, Debug);
break;
// MoPaQ (MPQ) archive
case MoPaQ mpq:
mpq.Extract(OutputPath, Debug);
break;
// New Executable
case NewExecutable nex:
nex.Extract(OutputPath, Debug);
break;
// PAK
case PAK pak:
pak.Extract(OutputPath, Debug);
break;
// PFF
case PFF pff:
pff.Extract(OutputPath, Debug);
break;
// PKZIP
case PKZIP pkzip:
pkzip.Extract(OutputPath, Debug);
break;
// Portable Executable
case PortableExecutable pex:
pex.Extract(OutputPath, Debug);
break;
// Quantum
case Quantum quantum:
quantum.Extract(OutputPath, Debug);
break;
// RAR
case RAR rar:
rar.Extract(OutputPath, Debug);
break;
// SGA
case SGA sga:
sga.Extract(OutputPath, Debug);
break;
// Tape Archive
case TapeArchive tar:
tar.Extract(OutputPath, Debug);
break;
// VBSP
case VBSP vbsp:
vbsp.Extract(OutputPath, Debug);
break;
// VPK
case VPK vpk:
vpk.Extract(OutputPath, Debug);
break;
// WAD3
case WAD3 wad:
wad.Extract(OutputPath, Debug);
break;
// xz
case XZ xz:
xz.Extract(OutputPath, Debug);
break;
// XZP
case XZP xzp:
xzp.Extract(OutputPath, Debug);
break;
// Everything else
default:
Console.WriteLine("Not a supported extractable file format, skipping...");
Console.WriteLine();
break;
}
}
/// <summary>
/// Validate the extraction path
/// </summary>
private bool ValidateExtractionPath()
{
// Null or empty output path
if (string.IsNullOrEmpty(OutputPath))
{
Console.WriteLine("Output directory required for extraction!");
Console.WriteLine();
return false;
}
// Malformed output path or invalid location
try
{
OutputPath = Path.GetFullPath(OutputPath);
Directory.CreateDirectory(OutputPath);
}
catch
{
Console.WriteLine("Output directory could not be created!");
Console.WriteLine();
return false;
}
return true;
}
}
}

View File

@@ -1,129 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace ExtractionTool
{
/// <summary>
/// Set of options for the test executable
/// </summary>
internal sealed class Options
{
#region Properties
/// <summary>
/// Enable debug output for relevant operations
/// </summary>
public bool Debug { get; private set; } = false;
/// <summary>
/// Set of input paths to use for operations
/// </summary>
public List<string> InputPaths { get; private set; } = [];
/// <summary>
/// Output path for archive extraction
/// </summary>
public string OutputPath { get; private set; } = string.Empty;
#endregion
/// <summary>
/// Parse commandline arguments into an Options object
/// </summary>
public static Options? ParseOptions(string[] args)
{
// If we have invalid arguments
if (args == null || args.Length == 0)
return null;
// Create an Options object
var options = new Options();
// Parse the options and paths
for (int index = 0; index < args.Length; index++)
{
string arg = args[index];
switch (arg)
{
case "-?":
case "-h":
case "--help":
return null;
case "-d":
case "--debug":
options.Debug = true;
break;
case "-o":
case "--outdir":
options.OutputPath = index + 1 < args.Length ? args[++index] : string.Empty;
break;
default:
options.InputPaths.Add(arg);
break;
}
}
// Validate we have any input paths to work on
if (options.InputPaths.Count == 0)
{
Console.WriteLine("At least one path is required!");
return null;
}
// Validate the output path
bool validPath = ValidateExtractionPath(options);
if (!validPath)
return null;
return options;
}
/// <summary>
/// Display help text
/// </summary>
public static void DisplayHelp()
{
Console.WriteLine("Extraction Tool");
Console.WriteLine();
Console.WriteLine("ExtractionTool.exe <options> file|directory ...");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine("-?, -h, --help Display this help text and quit");
Console.WriteLine("-d, --debug Enable debug mode");
Console.WriteLine("-o, --outdir [PATH] Set output path for extraction (required)");
}
/// <summary>
/// Validate the extraction path
/// </summary>
private static bool ValidateExtractionPath(Options options)
{
// Null or empty output path
if (string.IsNullOrEmpty(options.OutputPath))
{
Console.WriteLine("Output directory required for extraction!");
Console.WriteLine();
return false;
}
// Malformed output path or invalid location
try
{
options.OutputPath = Path.GetFullPath(options.OutputPath);
Directory.CreateDirectory(options.OutputPath);
}
catch
{
Console.WriteLine("Output directory could not be created!");
Console.WriteLine();
return false;
}
return true;
}
}
}

View File

@@ -1,250 +1,79 @@
using System;
using System.IO;
using SabreTools.IO.Extensions;
using SabreTools.Serialization;
using SabreTools.Serialization.Wrappers;
using System.Collections.Generic;
using ExtractionTool.Features;
using SabreTools.CommandLine;
using SabreTools.CommandLine.Features;
namespace ExtractionTool
{
class Program
public static class Program
{
static void Main(string[] args)
public static void Main(string[] args)
{
#if NET462_OR_GREATER || NETCOREAPP
// Register the codepages
System.Text.Encoding.RegisterProvider(System.Text.CodePagesEncodingProvider.Instance);
#endif
// Get the options from the arguments
var options = Options.ParseOptions(args);
// Create the command set
var mainFeature = new MainFeature();
var commandSet = CreateCommands(mainFeature);
// If we have an invalid state
if (options == null)
// If we have no args, show the help and quit
if (args == null || args.Length == 0)
{
Options.DisplayHelp();
commandSet.OutputAllHelp();
return;
}
// Loop through the input paths
foreach (string inputPath in options.InputPaths)
// Cache the first argument and starting index
string featureName = args[0];
// Try processing the standalone arguments
var topLevel = commandSet.GetTopLevel(featureName);
switch (topLevel)
{
ExtractPath(inputPath, options.OutputPath, options.Debug);
}
}
// Standalone Options
case Help help: help.ProcessArgs(args, 0, commandSet); return;
/// <summary>
/// Wrapper to extract data for a single path
/// </summary>
/// <param name="path">File or directory path</param>
/// <param name="outputDirectory">Output directory path</param>
/// <param name="includeDebug">Enable including debug information</param>
private static void ExtractPath(string path, string outputDirectory, bool includeDebug)
{
// Normalize by getting the full path
path = Path.GetFullPath(path);
Console.WriteLine($"Checking possible path: {path}");
// Check if the file or directory exists
if (File.Exists(path))
{
ExtractFile(path, outputDirectory, includeDebug);
}
else if (Directory.Exists(path))
{
foreach (string file in IOExtensions.SafeEnumerateFiles(path, "*", SearchOption.AllDirectories))
{
ExtractFile(file, outputDirectory, includeDebug);
}
}
else
{
Console.WriteLine($"{path} does not exist, skipping...");
}
}
/// <summary>
/// Print information for a single file, if possible
/// </summary>
private static void ExtractFile(string file, string outputDirectory, bool includeDebug)
{
Console.WriteLine($"Attempting to extract all files from {file}");
using Stream stream = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
// Get the extension for certain checks
string extension = Path.GetExtension(file).ToLower().TrimStart('.');
// Get the first 16 bytes for matching
byte[] magic = new byte[16];
try
{
int read = stream.Read(magic, 0, 16);
stream.Seek(0, SeekOrigin.Begin);
}
catch (Exception ex)
{
if (includeDebug) Console.Error.WriteLine(ex);
return;
}
// Get the file type
WrapperType ft = WrapperFactory.GetFileType(magic, extension);
var wrapper = WrapperFactory.CreateWrapper(ft, stream);
// Create the output directory
Directory.CreateDirectory(outputDirectory);
// Print the preamble
Console.WriteLine($"Attempting to extract from '{wrapper?.Description() ?? "UNKNOWN"}'");
Console.WriteLine();
switch (wrapper)
{
// 7-zip
case SevenZip sz:
sz.Extract(outputDirectory, includeDebug);
break;
// BFPK archive
case BFPK bfpk:
bfpk.Extract(outputDirectory, includeDebug);
break;
// BSP
case BSP bsp:
bsp.Extract(outputDirectory, includeDebug);
break;
// bzip2
case BZip2 bzip2:
bzip2.Extract(outputDirectory, includeDebug);
break;
// CFB
case CFB cfb:
cfb.Extract(outputDirectory, includeDebug);
break;
// GCF
case GCF gcf:
gcf.Extract(outputDirectory, includeDebug);
break;
// gzip
case GZip gzip:
gzip.Extract(outputDirectory, includeDebug);
break;
// InstallShield Archive V3 (Z)
case InstallShieldArchiveV3 isv3:
isv3.Extract(outputDirectory, includeDebug);
break;
// IS-CAB archive
case InstallShieldCabinet iscab:
iscab.Extract(outputDirectory, includeDebug);
break;
// LZ-compressed file, KWAJ variant
case LZKWAJ kwaj:
kwaj.Extract(outputDirectory, includeDebug);
break;
// LZ-compressed file, QBasic variant
case LZQBasic qbasic:
qbasic.Extract(outputDirectory, includeDebug);
break;
// LZ-compressed file, SZDD variant
case LZSZDD szdd:
szdd.Extract(outputDirectory, includeDebug);
break;
// Microsoft Cabinet archive
case MicrosoftCabinet mscab:
mscab.Extract(outputDirectory, includeDebug);
break;
// MoPaQ (MPQ) archive
case MoPaQ mpq:
mpq.Extract(outputDirectory, includeDebug);
break;
// New Executable
case NewExecutable nex:
nex.Extract(outputDirectory, includeDebug);
break;
// PAK
case PAK pak:
pak.Extract(outputDirectory, includeDebug);
break;
// PFF
case PFF pff:
pff.Extract(outputDirectory, includeDebug);
break;
// PKZIP
case PKZIP pkzip:
pkzip.Extract(outputDirectory, includeDebug);
break;
// Portable Executable
case PortableExecutable pex:
pex.Extract(outputDirectory, includeDebug);
break;
// Quantum
case Quantum quantum:
quantum.Extract(outputDirectory, includeDebug);
break;
// RAR
case RAR rar:
rar.Extract(outputDirectory, includeDebug);
break;
// SGA
case SGA sga:
sga.Extract(outputDirectory, includeDebug);
break;
// Tape Archive
case TapeArchive tar:
tar.Extract(outputDirectory, includeDebug);
break;
// VBSP
case VBSP vbsp:
vbsp.Extract(outputDirectory, includeDebug);
break;
// VPK
case VPK vpk:
vpk.Extract(outputDirectory, includeDebug);
break;
// WAD3
case WAD3 wad:
wad.Extract(outputDirectory, includeDebug);
break;
// xz
case XZ xz:
xz.Extract(outputDirectory, includeDebug);
break;
// XZP
case XZP xzp:
xzp.Extract(outputDirectory, includeDebug);
break;
// Everything else
// Default Behavior
default:
Console.WriteLine("Not a supported extractable file format, skipping...");
Console.WriteLine();
if (!mainFeature.ProcessArgs(args, 0))
{
commandSet.OutputAllHelp();
return;
}
else if (!mainFeature.VerifyInputs())
{
Console.Error.WriteLine("At least one input is required");
commandSet.OutputAllHelp();
return;
}
mainFeature.Execute();
break;
}
}
/// <summary>
/// Create the command set for the program
/// </summary>
private static CommandSet CreateCommands(MainFeature mainFeature)
{
List<string> header = [
"Extraction Tool",
string.Empty,
"ExtractionTool <options> file|directory ...",
string.Empty,
];
var commandSet = new CommandSet(header);
commandSet.Add(new Help(["-?", "-h", "--help"]));
commandSet.Add(mainFeature.DebugInput);
commandSet.Add(mainFeature.OutputPathInput);
return commandSet;
}
}
}

View File

@@ -0,0 +1,279 @@
using System;
using System.IO;
using System.Text;
using SabreTools.CommandLine;
using SabreTools.CommandLine.Inputs;
using SabreTools.Hashing;
using SabreTools.IO.Extensions;
using SabreTools.Serialization;
using SabreTools.Serialization.Wrappers;
namespace InfoPrint.Features
{
internal sealed class MainFeature : Feature
{
#region Feature Definition
public const string DisplayName = "main";
/// <remarks>Flags are unused</remarks>
private static readonly string[] _flags = [];
/// <remarks>Description is unused</remarks>
private const string _description = "";
#endregion
#region Inputs
private const string _debugName = "debug";
internal readonly FlagInput DebugInput = new(_debugName, ["-d", "--debug"], "Enable debug mode");
private const string _fileOnlyName = "file-only";
internal readonly FlagInput FileOnlyInput = new(_fileOnlyName, ["-f", "--file"], "Print to file only");
private const string _hashName = "hash";
internal readonly FlagInput HashInput = new(_hashName, ["-c", "--hash"], "Output file hashes");
#if NETCOREAPP
private const string _jsonName = "json";
internal readonly FlagInput JsonInput = new(_jsonName, ["-j", "--json"], "Print info as JSON");
#endif
#endregion
/// <summary>
/// Enable debug output for relevant operations
/// </summary>
public bool Debug { get; private set; }
/// <summary>
/// Output information to file only, skip printing to console
/// </summary>
public bool FileOnly { get; private set; }
/// <summary>
/// Print external file hashes
/// </summary>
public bool Hash { get; private set; }
#if NETCOREAPP
/// <summary>
/// Enable JSON output
/// </summary>
public bool Json { get; private set; }
#endif
public MainFeature()
: base(DisplayName, _flags, _description)
{
RequiresInputs = true;
Add(DebugInput);
Add(HashInput);
Add(FileOnlyInput);
#if NETCOREAPP
Add(JsonInput);
#endif
}
/// <inheritdoc/>
public override bool Execute()
{
// Get the options from the arguments
Debug = GetBoolean(_debugName);
Hash = GetBoolean(_hashName);
FileOnly = GetBoolean(_fileOnlyName);
#if NETCOREAPP
Json = GetBoolean(_jsonName);
#endif
// Loop through the input paths
for (int i = 0; i < Inputs.Count; i++)
{
string arg = Inputs[i];
PrintPathInfo(arg);
}
return true;
}
/// <inheritdoc/>
public override bool VerifyInputs() => Inputs.Count > 0;
/// <summary>
/// Wrapper to print information for a single path
/// </summary>
/// <param name="path">File or directory path</param>
private void PrintPathInfo(string path)
{
Console.WriteLine($"Checking possible path: {path}");
// Check if the file or directory exists
if (File.Exists(path))
{
PrintFileInfo(path);
}
else if (Directory.Exists(path))
{
foreach (string file in path.SafeEnumerateFiles("*", SearchOption.AllDirectories))
{
PrintFileInfo(file);
}
}
else
{
Console.WriteLine($"{path} does not exist, skipping...");
}
}
/// <summary>
/// Print information for a single file, if possible
/// </summary>
/// <param name="file">File path</param>
private void PrintFileInfo(string file)
{
Console.WriteLine($"Attempting to print info for {file}");
// Get the base info output name
string filenameBase = $"info-{DateTime.Now:yyyy-MM-dd_HHmmss.ffff}";
// If we have the hash flag
if (Hash)
{
var hashBuilder = PrintHashInfo(file);
if (hashBuilder != null)
{
// Create the output data
string hashData = hashBuilder.ToString();
// Write the output data
using var hsw = new StreamWriter(File.OpenWrite($"{filenameBase}.hashes"));
hsw.WriteLine(hashData);
hsw.Flush();
}
}
try
{
using Stream stream = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
// Read the first 8 bytes
byte[]? magic = stream.ReadBytes(8);
stream.Seek(0, SeekOrigin.Begin);
// Get the file type
string extension = Path.GetExtension(file).TrimStart('.');
WrapperType ft = WrapperFactory.GetFileType(magic ?? [], extension);
// Print out the file format
Console.WriteLine($"File format found: {ft}");
// Setup the wrapper to print
var wrapper = WrapperFactory.CreateWrapper(ft, stream);
// If we don't have a wrapper
if (wrapper == null)
{
Console.WriteLine($"Either {ft} is not supported or something went wrong during parsing!");
Console.WriteLine();
return;
}
#if NETCOREAPP
// If we have the JSON flag
if (Json)
{
// Create the output data
string serializedData = wrapper.ExportJSON();
// Write the output data
using var jsw = new StreamWriter(File.OpenWrite($"{filenameBase}.json"));
jsw.WriteLine(serializedData);
jsw.Flush();
}
#endif
// Create the output data
var builder = wrapper.ExportStringBuilder();
if (builder == null)
{
Console.WriteLine("No item information could be generated");
return;
}
// Only print to console if enabled
if (FileOnly)
Console.WriteLine(builder);
using var sw = new StreamWriter(File.OpenWrite($"{filenameBase}.txt"));
sw.WriteLine(file);
sw.WriteLine();
sw.WriteLine(builder.ToString());
sw.Flush();
}
catch (Exception ex)
{
Console.WriteLine(Debug ? ex : "[Exception opening file, please try again]");
Console.WriteLine();
}
}
/// <summary>
/// Print hash information for a single file, if possible
/// </summary>
/// <param name="file">File path</param>
/// <returns>StringBuilder representing the hash information, if possible</returns>
private StringBuilder? PrintHashInfo(string file)
{
// Ignore missing files
if (!File.Exists(file))
return null;
Console.WriteLine($"Attempting to hash {file}, this may take a while...");
try
{
// Get all file hashes for flexibility
var hashes = HashTool.GetFileHashes(file);
if (hashes == null)
{
if (Debug) Console.WriteLine($"Hashes for {file} could not be retrieved");
return null;
}
// Output subset of available hashes
var builder = new StringBuilder();
if (hashes.TryGetValue(HashType.CRC16, out string? crc16) && crc16 != null)
builder.AppendLine($"CRC-16 checksum: {crc16}");
if (hashes.TryGetValue(HashType.CRC32, out string? crc32) && crc32 != null)
builder.AppendLine($"CRC-32 checksum: {crc32}");
if (hashes.TryGetValue(HashType.MD2, out string? md2) && md2 != null)
builder.AppendLine($"MD2 hash: {md2}");
if (hashes.TryGetValue(HashType.MD4, out string? md4) && md4 != null)
builder.AppendLine($"MD4 hash: {md4}");
if (hashes.TryGetValue(HashType.MD5, out string? md5) && md5 != null)
builder.AppendLine($"MD5 hash: {md5}");
if (hashes.TryGetValue(HashType.RIPEMD128, out string? ripemd128) && ripemd128 != null)
builder.AppendLine($"RIPEMD-128 hash: {ripemd128}");
if (hashes.TryGetValue(HashType.RIPEMD160, out string? ripemd160) && ripemd160 != null)
builder.AppendLine($"RIPEMD-160 hash: {ripemd160}");
if (hashes.TryGetValue(HashType.SHA1, out string? sha1) && sha1 != null)
builder.AppendLine($"SHA-1 hash: {sha1}");
if (hashes.TryGetValue(HashType.SHA256, out string? sha256) && sha256 != null)
builder.AppendLine($"SHA-256 hash: {sha256}");
if (hashes.TryGetValue(HashType.SHA384, out string? sha384) && sha384 != null)
builder.AppendLine($"SHA-384 hash: {sha384}");
if (hashes.TryGetValue(HashType.SHA512, out string? sha512) && sha512 != null)
builder.AppendLine($"SHA-512 hash: {sha512}");
return builder;
}
catch (Exception ex)
{
Console.WriteLine(Debug ? ex : "[Exception opening file, please try again]");
return null;
}
}
}
}

View File

@@ -10,7 +10,7 @@
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>2.0.0</Version>
<Version>2.0.2</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
@@ -32,8 +32,9 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="SabreTools.IO" Version="1.7.5" />
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
<PackageReference Include="SabreTools.CommandLine" Version="[1.3.2]" />
<PackageReference Include="SabreTools.Hashing" Version="[1.5.1]" />
<PackageReference Include="SabreTools.IO" Version="[1.7.6]" />
</ItemGroup>
</Project>

View File

@@ -1,141 +0,0 @@
using System;
using System.Collections.Generic;
namespace InfoPrint
{
/// <summary>
/// Set of options for the test executable
/// </summary>
internal sealed class Options
{
#region Properties
/// <summary>
/// Enable debug output for relevant operations
/// </summary>
public bool Debug { get; private set; } = false;
/// <summary>
/// Output information to file only, skip printing to console
/// </summary>
public bool FileOnly { get; private set; } = false;
/// <summary>
/// Print external file hashes
/// </summary>
public bool Hash { get; private set; } = false;
/// <summary>
/// Set of input paths to use for operations
/// </summary>
public List<string> InputPaths { get; private set; } = [];
#if NETCOREAPP
/// <summary>
/// Enable JSON output
/// </summary>
public bool Json { get; private set; } = false;
#endif
#endregion
/// <summary>
/// Parse commandline arguments into an Options object
/// </summary>
public static Options? ParseOptions(string[] args)
{
// If we have invalid arguments
if (args == null || args.Length == 0)
return null;
// Create an Options object
var options = new Options();
// Parse the features
int index = 0;
for (; index < args.Length; index++)
{
string arg = args[index];
bool featureFound = false;
switch (arg)
{
case "-?":
case "-h":
case "--help":
return null;
default:
break;
}
// If the flag wasn't a feature
if (!featureFound)
break;
}
// Parse the options and paths
for (; index < args.Length; index++)
{
string arg = args[index];
switch (arg)
{
case "-d":
case "--debug":
options.Debug = true;
break;
case "-c":
case "--hash":
options.Hash = true;
break;
case "-f":
case "--file":
options.FileOnly = true;
break;
case "-j":
case "--json":
#if NETCOREAPP
options.Json = true;
#else
Console.WriteLine("JSON output not available in .NET Framework");
#endif
break;
default:
options.InputPaths.Add(arg);
break;
}
}
// Validate we have any input paths to work on
if (options.InputPaths.Count == 0)
{
Console.WriteLine("At least one path is required!");
return null;
}
return options;
}
/// <summary>
/// Display help text
/// </summary>
public static void DisplayHelp()
{
Console.WriteLine("Information Printing Program");
Console.WriteLine();
Console.WriteLine("InfoPrint <options> file|directory ...");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine("-?, -h, --help Display this help text and quit");
Console.WriteLine("-d, --debug Enable debug mode");
Console.WriteLine("-c, --hash Output file hashes");
Console.WriteLine("-f, --file Print to file only");
#if NETCOREAPP
Console.WriteLine("-j, --json Print info as JSON");
#endif
}
}
}

View File

@@ -1,10 +1,8 @@
using System;
using System.IO;
using System.Text;
using SabreTools.Hashing;
using SabreTools.IO.Extensions;
using SabreTools.Serialization;
using SabreTools.Serialization.Wrappers;
using System.Collections.Generic;
using InfoPrint.Features;
using SabreTools.CommandLine;
using SabreTools.CommandLine.Features;
namespace InfoPrint
{
@@ -12,199 +10,69 @@ namespace InfoPrint
{
public static void Main(string[] args)
{
// Get the options from the arguments
var options = Options.ParseOptions(args);
// Create the command set
var mainFeature = new MainFeature();
var commandSet = CreateCommands(mainFeature);
// If we have an invalid state
if (options == null)
// If we have no args, show the help and quit
if (args == null || args.Length == 0)
{
Options.DisplayHelp();
commandSet.OutputAllHelp();
return;
}
// Loop through the input paths
foreach (string inputPath in options.InputPaths)
// Cache the first argument and starting index
string featureName = args[0];
// Try processing the standalone arguments
var topLevel = commandSet.GetTopLevel(featureName);
switch (topLevel)
{
PrintPathInfo(inputPath, options);
// Standalone Options
case Help help: help.ProcessArgs(args, 0, commandSet); return;
// Default Behavior
default:
if (!mainFeature.ProcessArgs(args, 0))
{
commandSet.OutputAllHelp();
return;
}
else if (!mainFeature.VerifyInputs())
{
Console.Error.WriteLine("At least one input is required");
commandSet.OutputAllHelp();
return;
}
mainFeature.Execute();
break;
}
}
/// <summary>
/// Wrapper to print information for a single path
/// Create the command set for the program
/// </summary>
/// <param name="path">File or directory path</param>
/// <param name="options">User-defined options</param>
private static void PrintPathInfo(string path, Options options)
private static CommandSet CreateCommands(MainFeature mainFeature)
{
Console.WriteLine($"Checking possible path: {path}");
List<string> header = [
"Information Printing Program",
string.Empty,
"InfoPrint <options> file|directory ...",
string.Empty,
];
// Check if the file or directory exists
if (File.Exists(path))
{
PrintFileInfo(path, options);
}
else if (Directory.Exists(path))
{
foreach (string file in IOExtensions.SafeEnumerateFiles(path, "*", SearchOption.AllDirectories))
{
PrintFileInfo(file, options);
}
}
else
{
Console.WriteLine($"{path} does not exist, skipping...");
}
}
/// <summary>
/// Print information for a single file, if possible
/// </summary>
/// <param name="file">File path</param>
/// <param name="options">User-defined options</param>
private static void PrintFileInfo(string file, Options options)
{
Console.WriteLine($"Attempting to print info for {file}");
// Get the base info output name
string filenameBase = $"info-{DateTime.Now:yyyy-MM-dd_HHmmss.ffff}";
// If we have the hash flag
if (options.Hash)
{
var hashBuilder = PrintHashInfo(file, options.Debug);
if (hashBuilder != null)
{
// Create the output data
string hashData = hashBuilder.ToString();
// Write the output data
using var hsw = new StreamWriter(File.OpenWrite($"{filenameBase}.hashes"));
hsw.WriteLine(hashData);
hsw.Flush();
}
}
try
{
using Stream stream = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
// Read the first 8 bytes
byte[]? magic = stream.ReadBytes(8);
stream.Seek(0, SeekOrigin.Begin);
// Get the file type
string extension = Path.GetExtension(file).TrimStart('.');
WrapperType ft = WrapperFactory.GetFileType(magic ?? [], extension);
// Print out the file format
Console.WriteLine($"File format found: {ft}");
// Setup the wrapper to print
var wrapper = WrapperFactory.CreateWrapper(ft, stream);
// If we don't have a wrapper
if (wrapper == null)
{
Console.WriteLine($"Either {ft} is not supported or something went wrong during parsing!");
Console.WriteLine();
return;
}
var commandSet = new CommandSet(header);
commandSet.Add(new Help(["-?", "-h", "--help"]));
commandSet.Add(mainFeature.DebugInput);
commandSet.Add(mainFeature.HashInput);
commandSet.Add(mainFeature.FileOnlyInput);
#if NETCOREAPP
// If we have the JSON flag
if (options.Json)
{
// Create the output data
string serializedData = wrapper.ExportJSON();
// Write the output data
using var jsw = new StreamWriter(File.OpenWrite($"{filenameBase}.json"));
jsw.WriteLine(serializedData);
jsw.Flush();
}
commandSet.Add(mainFeature.JsonInput);
#endif
// Create the output data
var builder = wrapper.ExportStringBuilder();
if (builder == null)
{
Console.WriteLine("No item information could be generated");
return;
}
// Only print to console if enabled
if (!options.FileOnly)
Console.WriteLine(builder);
using var sw = new StreamWriter(File.OpenWrite($"{filenameBase}.txt"));
sw.WriteLine(file);
sw.WriteLine();
sw.WriteLine(builder.ToString());
sw.Flush();
}
catch (Exception ex)
{
Console.WriteLine(options.Debug ? ex : "[Exception opening file, please try again]");
Console.WriteLine();
}
}
/// <summary>
/// Print hash information for a single file, if possible
/// </summary>
/// <param name="file">File path</param>
/// <param name="debug">Enable debug output</param>
/// <returns>StringBuilder representing the hash information, if possible</returns>
private static StringBuilder? PrintHashInfo(string file, bool debug)
{
// Ignore missing files
if (!File.Exists(file))
return null;
Console.WriteLine($"Attempting to hash {file}, this may take a while...");
try
{
// Get all file hashes for flexibility
var hashes = HashTool.GetFileHashes(file);
if (hashes == null)
{
if (debug) Console.WriteLine($"Hashes for {file} could not be retrieved");
return null;
}
// Output subset of available hashes
var builder = new StringBuilder();
if (hashes.TryGetValue(HashType.CRC16, out string? crc16) && crc16 != null)
builder.AppendLine($"CRC-16 checksum: {crc16}");
if (hashes.TryGetValue(HashType.CRC32, out string? crc32) && crc32 != null)
builder.AppendLine($"CRC-32 checksum: {crc32}");
if (hashes.TryGetValue(HashType.MD2, out string? md2) && md2 != null)
builder.AppendLine($"MD2 hash: {md2}");
if (hashes.TryGetValue(HashType.MD4, out string? md4) && md4 != null)
builder.AppendLine($"MD4 hash: {md4}");
if (hashes.TryGetValue(HashType.MD5, out string? md5) && md5 != null)
builder.AppendLine($"MD5 hash: {md5}");
if (hashes.TryGetValue(HashType.RIPEMD128, out string? ripemd128) && ripemd128 != null)
builder.AppendLine($"RIPEMD-128 hash: {ripemd128}");
if (hashes.TryGetValue(HashType.RIPEMD160, out string? ripemd160) && ripemd160 != null)
builder.AppendLine($"RIPEMD-160 hash: {ripemd160}");
if (hashes.TryGetValue(HashType.SHA1, out string? sha1) && sha1 != null)
builder.AppendLine($"SHA-1 hash: {sha1}");
if (hashes.TryGetValue(HashType.SHA256, out string? sha256) && sha256 != null)
builder.AppendLine($"SHA-256 hash: {sha256}");
if (hashes.TryGetValue(HashType.SHA384, out string? sha384) && sha384 != null)
builder.AppendLine($"SHA-384 hash: {sha384}");
if (hashes.TryGetValue(HashType.SHA512, out string? sha512) && sha512 != null)
builder.AppendLine($"SHA-512 hash: {sha512}");
return builder;
}
catch (Exception ex)
{
Console.WriteLine(debug ? ex : "[Exception opening file, please try again]");
return null;
}
return commandSet;
}
}
}

View File

@@ -8,7 +8,7 @@ Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTo
The following non-project libraries (or ports thereof) are used for file handling:
- [SharpCompress](https://github.com/adamhathcock/sharpcompress) - Common archive format extraction
- [GrindCore.SharpCompress](https://github.com/Nanook/GrindCore.SharpCompress) - Common archive format extraction
- [StormLibSharp](https://github.com/robpaveza/stormlibsharp) - MoPaQ extraction [Unused in .NET Framework 2.0/3.5/4.0 and non-Windows builds due to Windows-specific libraries]
The following projects have influenced this library:
@@ -30,7 +30,7 @@ For the latest WIP build here: [Rolling Release](https://github.com/SabreTools/S
InfoPrint <options> file|directory ...
Options:
-?, -h, --help Display this help text and quit
-?, -h, --help Display this help text
-d, --debug Enable debug mode
-c, --hash Output file hashes
-f, --file Print to file only
@@ -42,10 +42,10 @@ Options:
**ExtractionTool** is a reference implementation for the extraction features of the library, packaged as a standalone executable for all supported platforms. It will attempt to detect and extract many supported file types. See the table below for supported extraction functionality.
```text
ExtractionTool.exe <options> file|directory ...
ExtractionTool <options> file|directory ...
Options:
-?, -h, --help Display this help text and quit
-?, -h, --help Display this help text
-d, --debug Enable debug mode
-o, --outdir [PATH] Set output path for extraction (required)
```
@@ -66,7 +66,7 @@ Options:
| InstallShield CAB | |
| Microsoft cabinet file | Does not support LZX or Quantum compression |
| Microsoft LZ-compressed files | KWAJ, QBasic, and SZDD variants |
| MoPaQ game data archive (MPQ) | Currently not working. Windows only. .NET Framework 4.5.2 and above |
| MoPaQ game data archive (MPQ) | Windows only. .NET Framework 4.5.2 and above |
| New Exectuable | Embedded archives and executables in the overlay and Wise installer |
| NovaLogic Game Archive Format (PFF) | |
| PKZIP and derived files (ZIP, etc.) | .NET Framework 4.6.2 and greater |

View File

@@ -0,0 +1,73 @@
using System.IO;
using System.Linq;
using SabreTools.Serialization.Readers;
using Xunit;
namespace SabreTools.Serialization.Test.Readers
{
public class XZTests
{
[Fact]
public void NullArray_Null()
{
byte[]? data = null;
int offset = 0;
var deserializer = new XZ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void EmptyArray_Null()
{
byte[]? data = [];
int offset = 0;
var deserializer = new XZ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void InvalidArray_Null()
{
byte[]? data = [.. Enumerable.Repeat<byte>(0xFF, 1024)];
int offset = 0;
var deserializer = new XZ();
var actual = deserializer.Deserialize(data, offset);
Assert.Null(actual);
}
[Fact]
public void NullStream_Null()
{
Stream? data = null;
var deserializer = new XZ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void EmptyStream_Null()
{
Stream? data = new MemoryStream([]);
var deserializer = new XZ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
[Fact]
public void InvalidStream_Null()
{
Stream? data = new MemoryStream([.. Enumerable.Repeat<byte>(0xFF, 1024)]);
var deserializer = new XZ();
var actual = deserializer.Deserialize(data);
Assert.Null(actual);
}
}
}

View File

@@ -26,10 +26,10 @@
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="18.0.0" />
<PackageReference Include="SabreTools.Hashing" Version="[1.5.1]" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4">
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.5">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>

View File

@@ -0,0 +1,62 @@
using System.Collections.Generic;
namespace SabreTools.Data.Extensions
{
public static class XZ
{
/// <summary>
/// Decode a value from a variable-length integer
/// </summary>
/// <param name="value">Value to decode</param>
/// <param name="maxSize">Maximum number of bytes to parse</param>
/// <param name="length">Number of bytes parsed</param>
/// <returns>UInt64 representing the decoded integer</returns>
/// <see href="https://tukaani.org/xz/xz-file-format.txt"/>
public static ulong DecodeVariableLength(this byte[] value, int maxSize, out int length)
{
length = 0;
if (maxSize <= 0)
return 0;
if (maxSize > 9)
maxSize = 9;
ulong output = (ulong)(value[0] & 0x7F);
int i = 0;
while ((value[i++] & 0x80) != 0)
{
if (i >= maxSize || value[i] == 0x00)
return 0;
output |= (ulong)(value[i] & 0x7F) << (i * 7);
}
length = i;
return output;
}
/// <summary>
/// Encode a value to a variable-length integer
/// </summary>
/// <param name="value">Value to encode</param>
/// <returns>Byte array representing the encoded integer</returns>
/// <see href="https://tukaani.org/xz/xz-file-format.txt"/>
public static byte[] EncodeVariableLength(this ulong value)
{
if (value > long.MaxValue / 2)
return [];
var output = new List<byte>();
while (value >= 0x80)
{
output.Add((byte)(value | 0x80));
value >>= 7;
}
output.Add((byte)value);
return [.. output];
}
}
}

View File

@@ -1,10 +1,18 @@
namespace SabreTools.Data.Models.BZip2
{
/// <summary>
/// This is a placeholder model for future work
/// </summary>
/// <see href="https://github.com/dsnet/compress/blob/master/doc/bzip2-format.pdf"/>
public class Archive
{
/// <summary>
/// Stream header
/// </summary>
public Header? Header { get; set; }
// TODO: Implement remaining structures
/// <summary>
/// Stream footer
/// </summary>
public Footer? Footer { get; set; }
}
}

View File

@@ -0,0 +1,12 @@
namespace SabreTools.Data.Models.BZip2
{
public class Block
{
/// <summary>
/// Block header
/// </summary>
public BlockHeader? Header { get; set; }
// TODO: Implement remaining structures
}
}

View File

@@ -0,0 +1,37 @@
namespace SabreTools.Data.Models.BZip2
{
public class BlockHeader
{
/// <summary>
/// A 48-bit integer value 31 41 59 26 53 59, which
/// is the binary-coded decimal representation of
/// pi. It is used to differentiate the block
/// from the footer.
/// </summary>
/// <remarks>This may not be byte-aligned</remarks>
public byte[]? Magic { get; set; }
/// <summary>
/// The CRC-32 checksum of the uncompressed data contained
/// in <see cref="BlockData"/>. It is the same checksum
/// used in GZip, but is slightly different due to the
/// bit-packing differences.
/// </summary>
public uint Crc32 { get; set; }
/// <summary>
/// Should be 0. Previous versions of BZip2 allowed
/// the input data to be randomized to avoid
/// pathological strings from causing the runtime
/// to be exponential.
/// </summary>
/// <remarks>Actually a 1-bit value</remarks>
public byte Randomized { get; set; }
/// <summary>
/// Contains the origin pointer used in the BWT stage
/// </summary>
/// <remarks>Actually a 24-bit value</remarks>
public uint OrigPtr { get; set; }
}
}

View File

@@ -0,0 +1,26 @@
namespace SabreTools.Data.Models.BZip2
{
public class BlockTrees
{
// TODO: Implement SymMap
/// <summary>
/// Indicates the number of Huffman trees used in
/// the HUFF stage. It must between 2 and 6.
/// </summary>
/// <remarks>Actually a 3-bit value</remarks>
public byte NumTrees { get; set; }
/// <summary>
/// Indicates the number of selectors used in the
/// HUFF stage. There must be at least 1 selector
/// defined.
/// </summary>
/// <remarks>Actually a 15-bit value</remarks>
public ushort NumSels { get; set; }
// TODO: Implement Selectors
// TODO: Implement Trees
}
}

View File

@@ -6,4 +6,4 @@ namespace SabreTools.Data.Models.BZip2
public const string SignatureString = "BZh";
}
}
}

View File

@@ -0,0 +1,27 @@
namespace SabreTools.Data.Models.BZip2
{
public class Footer
{
/// <summary>
/// A 48-bit integer value 17 72 45 38 50 90, which
/// is the binary-coded decimal representation of
/// sqrt(pi). It is used to differentiate the block
/// from the footer.
/// </summary>
/// <remarks>This may not be byte-aligned</remarks>
public byte[]? Magic { get; set; }
/// <summary>
/// Contains a custom checksum computed using each of
/// the Block CRCs.
/// </summary>
/// <remarks>This may not be byte-aligned</remarks>
public uint Checksum { get; set; }
/// <summary>
/// Used to align the bit-stream to the next byte-aligned
/// edge and will contain between 0 and 7 bits.
/// </summary>
public byte Padding { get; set; }
}
}

View File

@@ -0,0 +1,25 @@
namespace SabreTools.Data.Models.BZip2
{
public class Header
{
/// <summary>
/// "BZ"
/// </summary>
public string? Signature { get; set; }
/// <summary>
/// Version byte
/// </summary>
/// <remarks>
/// '0' indicates a BZ1 file
/// 'h' indicates a BZ2 file
/// </remarks>
public byte Version { get; set; }
/// <summary>
/// ASCII value of the compression level
/// </summary>
/// <remarks>Valid values between '1' and '9'</remarks>
public byte Level { get; set; }
}
}

View File

@@ -9,7 +9,8 @@ namespace SabreTools.Data.Models.CHD
/// <summary>
/// Which custom compressors are used?
/// </summary>
public CodecType[] Compressors { get; set; } = new CodecType[4];
/// <remarks>There should be 4 entries</remarks>
public CodecType[] Compressors { get; set; }
/// <summary>
/// Logical size of the data (in bytes)

View File

@@ -6,5 +6,9 @@ namespace SabreTools.Data.Models.GZIP
public const byte ID1 = 0x1F;
public const byte ID2 = 0x8B;
public static readonly byte[] SignatureBytes = [0x1F, 0x8B];
public static readonly byte[] TorrentGZHeader = [0x1F, 0x8B, 0x08, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1C, 0x00];
}
}

View File

@@ -66,6 +66,17 @@ namespace SabreTools.Data.Models.GZIP
/// the header, with total length XLEN bytes. It consists of a
/// series of subfields, each of the form <see cref="ExtraFieldData"/>.
/// </summary>
/// <remarks>This is the raw version of <see cref="ExtraField"/></remarks>
public byte[] ExtraFieldBytes { get; set; }
/// <summary>
/// Extra field
///
/// If the FLG.FEXTRA bit is set, an "extra field" is present in
/// the header, with total length XLEN bytes. It consists of a
/// series of subfields, each of the form <see cref="ExtraFieldData"/>.
/// </summary>
/// <remarks>This is the processed version of <see cref="ExtraFieldBytes"/></remarks>
public ExtraFieldData[]? ExtraField { get; set; }
/// <summary>

View File

@@ -26,6 +26,7 @@ Not all of this information was able to be gathered directly from the files in q
| [IBM Documentation](https://www.ibm.com/docs/en) | TAR |
| [IETF](https://www.ietf.org/) | GZIP |
| [Independent Commodore Library](https://petlibrary.tripod.com/) | PKZIP |
| [Joe Tsai](https://github.com/dsnet/compress/blob/master/doc/bzip2-format.pdf) | BZip2 |
| [Ladislav Zezula](http://zezula.net/en/tools/main.html) | MoPaQ |
| [libaacs](https://code.videolan.org/videolan/libaacs/) | AACS |
| [libbdplus](https://github.com/mwgoldsmith/bdplus) | BD+ |
@@ -45,6 +46,7 @@ Not all of this information was able to be gathered directly from the files in q
| [Technical Committee T10](https://www.t10.org/) | PIC |
| [The Go tools for Windows + Assembler](https://www.godevtool.com/) | PortableExecutable |
| [The Whole Half-Life](https://twhl.info/wiki/page/Specification:_WAD3) | WAD3 |
| [Tukaani](https://tukaani.org/xz/format.html) | XZ |
| [Unshield](https://github.com/twogood/unshield) | InstallShieldCabinet |
| [unshieldv3](https://github.com/wfr/unshieldv3) | InstallShieldArchiveV3 |
| [Valve Developer Community](https://developer.valvesoftware.com/wiki/Main_Page) | BSP, VPK |

View File

@@ -1,10 +1,26 @@
namespace SabreTools.Data.Models.XZ
{
/// <summary>
/// This is a placeholder model for future work
/// </summary>
/// <see href="https://tukaani.org/xz/xz-file-format.txt"/>
public class Archive
{
/// <summary>
/// Pre-blocks header
/// </summary>
public Header? Header { get; set; }
/// <summary>
/// Sequence of 0 or more blocks
/// </summary>
public Block[]? Blocks { get; set; }
/// <summary>
/// Index structure
/// </summary>
public Index? Index { get; set; }
/// <summary>
/// Post-blocks footer
/// </summary>
public Footer? Footer { get; set; }
}
}

View File

@@ -0,0 +1,80 @@
namespace SabreTools.Data.Models.XZ
{
/// <summary>
/// Represents a single compressed block in the stream
/// </summary>
public class Block
{
/// <summary>
/// Size of the header
/// </summary>
/// <remarks>
/// The real header size can be calculated by the following:
/// (HeaderSize + 1) * 4
/// </remarks>
public byte HeaderSize { get; set; }
/// <summary>
/// The Block Flags field is a bit field
/// </summary>
public BlockFlags Flags { get; set; }
/// <summary>
/// Size of the compressed data
/// Present if <see cref="BlockFlags.CompressedSize"/> is set.
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong CompressedSize { get; set; }
/// <summary>
/// Size of the block after decompression
/// Present if <see cref="BlockFlags.UncompressedSize"/> is set.
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong UncompressedSize { get; set; }
/// <summary>
/// List of filter flags
/// </summary>
/// <remarks>
/// The number of filter flags is given by the first two
/// bits of <see cref="Flags"/>
/// </remarks>
public FilterFlag[]? FilterFlags { get; set; }
/// <summary>
/// This field contains as many null byte as it is needed to make
/// the Block Header have the size specified in Block Header Size.
/// </summary>
public byte[]? HeaderPadding { get; set; }
/// <summary>
/// The CRC32 is calculated over everything in the Block Header
/// field except the CRC32 field itself. It is stored as an
/// unsigned 32-bit little endian integer.
/// </summary>
public uint Crc32 { get; set; }
/// <summary>
/// The format of Compressed Data depends on Block Flags and List
/// of Filter Flags
/// </summary>
public byte[]? CompressedData { get; set; }
/// <summary>
/// Block Padding MUST contain 0-3 null bytes to make the size of
/// the Block a multiple of four bytes. This can be needed when
/// the size of Compressed Data is not a multiple of four.
/// </summary>
public byte[]? BlockPadding { get; set; }
/// <summary>
/// The type and size of the Check field depends on which bits
/// are set in the Stream Flags field.
///
/// The Check, when used, is calculated from the original
/// uncompressed data.
/// </summary>
public byte[]? Check { get; set; }
}
}

View File

@@ -2,6 +2,8 @@ namespace SabreTools.Data.Models.XZ
{
public static class Constants
{
public static readonly byte[] SignatureBytes = [0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00];
public static readonly byte[] HeaderSignatureBytes = [0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00];
public static readonly byte[] FooterSignatureBytes = [0x59, 0x5A];
}
}
}

View File

@@ -0,0 +1,47 @@
using System;
namespace SabreTools.Data.Models.XZ
{
[Flags]
public enum BlockFlags : byte
{
#region Bits 0-1 - Number of filters
OneFilter = 0x00,
TwoFilters = 0x01,
ThreeFiltrs = 0x02,
FourFilters = 0x03,
#endregion
/// <summary>
/// Compressed size field present
/// </summary>
CompressedSize = 0x40,
/// <summary>
/// Uncompressed size field present
/// </summary>
UncompressedSize = 0x80,
}
public enum HeaderFlags : ushort
{
None = 0x0000,
Crc32 = 0x0100,
Reserved0x02 = 0x0200,
Reserved0x03 = 0x0300,
Crc64 = 0x0400,
Reserved0x05 = 0x0500,
Reserved0x06 = 0x0600,
Reserved0x07 = 0x0700,
Reserved0x08 = 0x0800,
Reserved0x09 = 0x0900,
Sha256 = 0x0A00,
Reserved0x0B = 0x0B00,
Reserved0x0C = 0x0C00,
Reserved0x0D = 0x0D00,
Reserved0x0E = 0x0E00,
Reserved0x0F = 0x0F00,
}
}

View File

@@ -0,0 +1,23 @@
namespace SabreTools.Data.Models.XZ
{
public class FilterFlag
{
/// <summary>
/// Filter ID
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong FilterID { get; set; }
/// <summary>
/// Filter ID
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong SizeOfProperties { get; set; }
/// <summary>
/// Properties of the filter whose length is given by
/// <see cref="SizeOfProperties"/>
/// </summary>
public byte[]? Properties { get; set; }
}
}

View File

@@ -0,0 +1,39 @@
namespace SabreTools.Data.Models.XZ
{
/// <summary>
/// Represents the post-block data in the stream
/// </summary>
public class Footer
{
/// <summary>
/// The CRC32 is calculated from the Backward Size and Stream Flags
/// fields. It is stored as an unsigned 32-bit little endian
/// integer.
/// </summary>
public uint Crc32 { get; set; }
/// <summary>
/// Backward Size is stored as a 32-bit little endian integer,
/// which indicates the size of the Index field as multiple of
/// four bytes, minimum value being four bytes.
/// </summary>
/// <remarks>
/// The real index size can be calculated by the following:
/// (BackwardSize + 1) * 4
/// </remarks>
public uint BackwardSize { get; set; }
/// <summary>
/// This is a copy of the Stream Flags field from the Stream
/// Header. The information stored to Stream Flags is needed
/// when parsing the Stream backwards.
/// </summary>
public HeaderFlags Flags { get; set; }
/// <summary>
/// Header magic number ("YZ")
/// </summary>
/// <remarks>2 bytes</remarks>
public byte[]? Signature { get; set; }
}
}

View File

@@ -0,0 +1,27 @@
namespace SabreTools.Data.Models.XZ
{
/// <summary>
/// Represents the pre-block data in the stream
/// </summary>
public class Header
{
/// <summary>
/// Header magic number (0xFD, '7', 'z', 'X', 'Z', 0x00)
/// </summary>
/// <remarks>6 bytes</remarks>
public byte[]? Signature { get; set; }
/// <summary>
/// The first byte of Stream Flags is always a null byte. In the
/// future, this byte may be used to indicate a new Stream version
/// or other Stream properties.
/// </summary>
public HeaderFlags Flags { get; set; }
/// <summary>
/// The CRC32 is calculated from the Stream Flags field. It is
/// stored as an unsigned 32-bit little endian integer.
/// </summary>
public uint Crc32 { get; set; }
}
}

View File

@@ -0,0 +1,36 @@
namespace SabreTools.Data.Models.XZ
{
public class Index
{
/// <summary>
/// The value of Index Indicator is always 0x00
/// </summary>
public byte IndexIndicator { get; set; }
/// <summary>
/// This field indicates how many Records there are in the List
/// of Records field, and thus how many Blocks there are in the
/// Stream
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong NumberOfRecords { get; set; }
/// <summary>
/// One record per block
/// </summary>
public Record[]? Records { get; set; }
/// <summary>
/// This field MUST contain 0-3 null bytes to pad the Index to
/// a multiple of four bytes.
/// </summary>
public byte[]? Padding { get; set; }
/// <summary>
/// The CRC32 is calculated over everything in the Index field
/// except the CRC32 field itself. The CRC32 is stored as an
/// unsigned 32-bit little endian integer.
/// </summary>
public uint Crc32 { get; set; }
}
}

View File

@@ -0,0 +1,17 @@
namespace SabreTools.Data.Models.XZ
{
public class Record
{
/// <summary>
/// Unpadded size of the block
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong UnpaddedSize { get; set; }
/// <summary>
/// Uncompressed size of the block
/// </summary>
/// <remarks>Stored as a variable-length integer</remarks>
public ulong UncompressedSize { get; set; }
}
}

View File

@@ -75,6 +75,7 @@ namespace SabreTools.Serialization
Wrapper.WiseSectionHeader item => item.PrettyPrint(),
Wrapper.XeMID item => item.PrettyPrint(),
Wrapper.XMID item => item.PrettyPrint(),
Wrapper.XZ item => item.PrettyPrint(),
Wrapper.XZP item => item.PrettyPrint(),
_ => null,
};
@@ -130,6 +131,7 @@ namespace SabreTools.Serialization
Wrapper.WiseSectionHeader item => item.ExportJSON(),
Wrapper.XeMID item => item.ExportJSON(),
Wrapper.XMID item => item.ExportJSON(),
Wrapper.XZ item => item.ExportJSON(),
Wrapper.XZP item => item.ExportJSON(),
_ => string.Empty,
};
@@ -558,6 +560,16 @@ namespace SabreTools.Serialization
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.XZ item)
{
var builder = new StringBuilder();
XZ.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>

View File

@@ -0,0 +1,133 @@
using System.Text;
using SabreTools.Data.Models.XZ;
namespace SabreTools.Data.Printers
{
public class XZ : IPrinter<Archive>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, Archive model)
=> Print(builder, model);
public static void Print(StringBuilder builder, Archive archive)
{
builder.AppendLine("xz Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, archive.Header);
Print(builder, archive.Blocks);
Print(builder, archive.Index);
Print(builder, archive.Footer);
}
private static void Print(StringBuilder builder, Header? header)
{
builder.AppendLine(" Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No header");
builder.AppendLine();
return;
}
builder.AppendLine(header.Signature, " Signature");
builder.AppendLine($" Flags: {header.Flags} (0x{(ushort)header.Flags:X4})");
builder.AppendLine(header.Crc32, " CRC-32");
builder.AppendLine();
}
private static void Print(StringBuilder builder, Block[]? blocks)
{
builder.AppendLine(" Blocks Information:");
builder.AppendLine(" -------------------------");
if (blocks == null || blocks.Length == 0)
{
builder.AppendLine(" No blocks");
builder.AppendLine();
return;
}
for (int i = 0; i < blocks.Length; i++)
{
var block = blocks[i];
builder.AppendLine($" Block {i}:");
builder.AppendLine(block.HeaderSize, " Header size");
builder.AppendLine($" Flags: {block.Flags} (0x{(byte)block.Flags:X2})");
builder.AppendLine(block.CompressedSize, " Compressed size");
builder.AppendLine(block.UncompressedSize, " Uncompressed size");
// TODO: Print filter flags
builder.AppendLine(block.HeaderPadding, " Header padding");
builder.AppendLine(block.Crc32, " CRC-32");
if (block.CompressedData == null)
builder.AppendLine(" Compressed data length: [NULL]");
else
builder.AppendLine(block.CompressedData.Length, " Compressed data length");
builder.AppendLine(block.BlockPadding, " Block padding");
builder.AppendLine(block.Check, " Check");
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, Index? index)
{
builder.AppendLine(" Index Information:");
builder.AppendLine(" -------------------------");
if (index == null)
{
builder.AppendLine(" No index");
builder.AppendLine();
return;
}
builder.AppendLine(index.IndexIndicator, " Index indicator");
builder.AppendLine(index.NumberOfRecords, " Number of records");
Print(builder, index.Records);
builder.AppendLine(index.Padding, " Padding");
builder.AppendLine(index.Crc32, " CRC-32");
builder.AppendLine();
}
private static void Print(StringBuilder builder, Record[]? records)
{
builder.AppendLine(" Records Information:");
builder.AppendLine(" -------------------------");
if (records == null || records.Length == 0)
{
builder.AppendLine(" No records");
builder.AppendLine();
return;
}
for (int i = 0; i < records.Length; i++)
{
var record = records[i];
builder.AppendLine($" Block {i}:");
builder.AppendLine(record.UnpaddedSize, " Unpadded size");
builder.AppendLine(record.UncompressedSize, " Uncompressed size");
}
}
private static void Print(StringBuilder builder, Footer? footer)
{
builder.AppendLine(" Footer Information:");
builder.AppendLine(" -------------------------");
if (footer == null)
{
builder.AppendLine(" No footer");
builder.AppendLine();
return;
}
builder.AppendLine(footer.Crc32, " CRC-32");
builder.AppendLine(footer.BackwardSize, " Backward size");
builder.AppendLine($" Flags: {footer.Flags} (0x{(ushort)footer.Flags:X4})");
builder.AppendLine(footer.Signature, " Signature");
builder.AppendLine();
}
}
}

View File

@@ -85,6 +85,10 @@ namespace SabreTools.Serialization.Readers
// Cache the current position
long currentPosition = data.Position;
// Read the raw data first
obj.ExtraFieldBytes = data.ReadBytes(obj.ExtraLength);
data.Seek(currentPosition, SeekOrigin.Begin);
List<ExtraFieldData> extraFields = [];
while (data.Position < currentPosition + obj.ExtraLength)
{

View File

@@ -106,6 +106,10 @@ namespace SabreTools.Serialization.Readers
#endregion
// Cache the overlay offset
long endOfSectionData = optionalHeader?.SizeOfHeaders ?? 0;
Array.ForEach(pex.SectionTable, s => endOfSectionData += s.SizeOfRawData);
#region Symbol Table and String Table
offset = initialOffset + fileHeader.PointerToSymbolTable;
@@ -326,7 +330,9 @@ namespace SabreTools.Serialization.Readers
#region Hidden Resources
// If we have not used up the full size, parse the remaining chunk as a single resource
if (pex.ResourceDirectoryTable?.Entries != null && tableOffset < tableSize)
if (pex.ResourceDirectoryTable?.Entries != null
&& tableOffset < tableSize
&& (offset + tableOffset) != endOfSectionData)
{
// Resize the entry array to accomodate one more
var localEntries = pex.ResourceDirectoryTable.Entries;
@@ -1581,8 +1587,8 @@ namespace SabreTools.Serialization.Readers
// Read the name from the offset, if needed
if (nameEntry && obj.Entries[i].NameOffset > 0 && obj.Entries[i].NameOffset < tableData.Length)
{
offset = (int)obj.Entries[i].NameOffset;
obj.Entries[i].Name = ParseResourceDirectoryString(tableData, ref offset);
int nameOffset = (int)obj.Entries[i].NameOffset;
obj.Entries[i].Name = ParseResourceDirectoryString(tableData, ref nameOffset);
}
}

View File

@@ -0,0 +1,303 @@
using System;
using System.IO;
using SabreTools.Data.Extensions;
using SabreTools.Data.Models.XZ;
using SabreTools.IO.Extensions;
using static SabreTools.Data.Models.XZ.Constants;
namespace SabreTools.Serialization.Readers
{
public class XZ : BaseBinaryReader<Archive>
{
/// <inheritdoc/>
public override Archive? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || !data.CanRead)
return null;
try
{
// Cache the current offset
long initialOffset = data.Position;
// Create a new archive to fill
var archive = new Archive();
#region Header
// Try to parse the header
var header = ParseHeader(data);
if (!header.Signature.EqualsExactly(HeaderSignatureBytes))
return null;
// Set the stream header
archive.Header = header;
// Cache the current offset
long endOfHeader = data.Position;
#endregion
#region Footer
// Seek to the start of the footer
data.Seek(-12, SeekOrigin.End);
// Cache the current offset
long startOfFooter = data.Position;
// Try to parse the footer
var footer = ParseFooter(data);
if (!footer.Signature.EqualsExactly(FooterSignatureBytes))
return null;
// Set the footer
archive.Footer = footer;
#endregion
#region Index
// Seek to the start of the index
long indexOffset = startOfFooter - ((footer.BackwardSize + 1) * 4);
data.Seek(indexOffset, SeekOrigin.Begin);
// Try to parse the index
var index = ParseIndex(data);
if (index.IndexIndicator != 0x00)
return null;
if (index.Records == null)
return null;
// Set the index
archive.Index = index;
#endregion
#region Blocks
// Seek to the start of the blocks
data.Seek(endOfHeader, SeekOrigin.Begin);
// Create the block array
int blockCount = index.Records.Length;
archive.Blocks = new Block[blockCount];
// Try to parse the blocks
for (int i = 0; i < archive.Blocks.Length; i++)
{
// Get the record for this block
var record = index.Records[i];
// Try to parse the block
archive.Blocks[i] = ParseBlock(data, header.Flags, record.UnpaddedSize);
}
#endregion
return archive;
}
catch
{
// Ignore the actual error
return null;
}
}
/// <summary>
/// Parse a Stream into a Header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Header on success, null on error</returns>
public static Header ParseHeader(Stream data)
{
var obj = new Header();
obj.Signature = data.ReadBytes(6);
obj.Flags = (HeaderFlags)data.ReadUInt16LittleEndian();
obj.Crc32 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a Block
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="headerFlags">HeaderFlags to for determining the check value</param>
/// <param name="unpaddedSize">Unpadded data size from the index</param>
/// <returns>Filled Block on success, null on error</returns>
public static Block ParseBlock(Stream data, HeaderFlags headerFlags, ulong unpaddedSize)
{
// Cache the current offset
long currentOffset = data.Position;
// Determine the size of the check field
int checkSize = 0;
if (headerFlags == HeaderFlags.Crc32)
checkSize = 4;
else if (headerFlags == HeaderFlags.Crc64)
checkSize = 8;
else if (headerFlags == HeaderFlags.Sha256)
checkSize = 32;
var obj = new Block();
obj.HeaderSize = data.ReadByteValue();
int realHeaderSize = (obj.HeaderSize + 1) * 4;
obj.Flags = (BlockFlags)data.ReadByteValue();
#if NET20 || NET35
if ((obj.Flags & BlockFlags.CompressedSize) != 0)
#else
if (obj.Flags.HasFlag(BlockFlags.CompressedSize))
#endif
obj.CompressedSize = ParseVariableLength(data);
#if NET20 || NET35
if ((obj.Flags & BlockFlags.UncompressedSize) != 0)
#else
if (obj.Flags.HasFlag(BlockFlags.UncompressedSize))
#endif
obj.UncompressedSize = ParseVariableLength(data);
// Determine the number of filters to read
int filterCount = ((byte)obj.Flags & 0x03) + 1;
// Try to parse the filters
obj.FilterFlags = new FilterFlag[filterCount];
for (int i = 0; i < obj.FilterFlags.Length; i++)
{
obj.FilterFlags[i] = ParseFilterFlag(data);
}
// Parse the padding as needed, adjusting for CRC size
int paddingLength = realHeaderSize - (int)(data.Position - currentOffset) - 4;
if (paddingLength >= 0)
obj.HeaderPadding = data.ReadBytes(paddingLength);
obj.Crc32 = data.ReadUInt32LittleEndian();
// Determine the compressed size
ulong compressedSize = obj.CompressedSize != 0
? obj.CompressedSize
: unpaddedSize - (ulong)(realHeaderSize + checkSize);
// TODO: How to handle large blocks?
if ((int)compressedSize > 0)
obj.CompressedData = data.ReadBytes((int)compressedSize);
// Parse the padding as needed
paddingLength = 4 - (int)(unpaddedSize % 4);
if (paddingLength >= 0)
obj.BlockPadding = data.ReadBytes(paddingLength);
// Read the Check as needed
obj.Check = data.ReadBytes(checkSize);
return obj;
}
/// <summary>
/// Parse a Stream into a FilterFlag
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled FilterFlag on success, null on error</returns>
public static FilterFlag ParseFilterFlag(Stream data)
{
var obj = new FilterFlag();
obj.FilterID = ParseVariableLength(data);
obj.SizeOfProperties = ParseVariableLength(data);
obj.Properties = data.ReadBytes((int)obj.SizeOfProperties);
return obj;
}
/// <summary>
/// Parse a Stream into a Index
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Index on success, null on error</returns>
public static Data.Models.XZ.Index ParseIndex(Stream data)
{
// Cache the current offset
long currentOffset = data.Position;
var obj = new Data.Models.XZ.Index();
obj.IndexIndicator = data.ReadByteValue();
obj.NumberOfRecords = ParseVariableLength(data);
obj.Records = new Record[obj.NumberOfRecords];
for (int i = 0; i < obj.Records.Length; i++)
{
obj.Records[i] = ParseRecord(data);
}
// Parse the padding as needed
int paddingLength = 4 - (int)(data.Position - currentOffset) % 4;
if (paddingLength >= 0)
obj.Padding = data.ReadBytes(paddingLength);
obj.Crc32 = data.ReadUInt32LittleEndian();
return obj;
}
/// <summary>
/// Parse a Stream into a Record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Record on success, null on error</returns>
public static Record ParseRecord(Stream data)
{
var obj = new Record();
obj.UnpaddedSize = ParseVariableLength(data);
obj.UncompressedSize = ParseVariableLength(data);
return obj;
}
/// <summary>
/// Parse a Stream into a Footer
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled Footer on success, null on error</returns>
public static Footer ParseFooter(Stream data)
{
var obj = new Footer();
obj.Crc32 = data.ReadUInt32LittleEndian();
obj.BackwardSize = data.ReadUInt32LittleEndian();
obj.Flags = (HeaderFlags)data.ReadUInt16LittleEndian();
obj.Signature = data.ReadBytes(2);
return obj;
}
/// <summary>
/// Parse a variable-length number from the stream
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Decoded variable-length value</returns>
private static ulong ParseVariableLength(Stream data)
{
// Cache the current offset
long currentOffset = data.Position;
// Read up to 9 bytes for decoding
int byteCount = (int)Math.Min(data.Length - data.Position, 9);
byte[] encoded = data.ReadBytes(byteCount);
// Attempt to decode the value
ulong output = encoded.DecodeVariableLength(byteCount, out int length);
// Seek the actual length processed and return
data.Seek(currentOffset + length, SeekOrigin.Begin);
return output;
}
}
}

View File

@@ -14,7 +14,8 @@
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>2.0.0</Version>
<Version>2.0.2</Version>
<WarningsNotAsErrors>NU5104</WarningsNotAsErrors>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
@@ -42,26 +43,19 @@
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
</PropertyGroup>
<!-- Set a build flag for Windows specifically -->
<PropertyGroup Condition="'$(RuntimeIdentifier)'=='win-x86'">
<DefineConstants>$(DefineConstants);WINX86</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(RuntimeIdentifier)'=='win-x64'">
<DefineConstants>$(DefineConstants);WINX64</DefineConstants>
</PropertyGroup>
<!-- Exclude certain parts of external modules for by default -->
<PropertyGroup>
<DefaultItemExcludes>
$(DefaultItemExcludes);
**\AssemblyInfo.cs;
_EXTERNAL\stormlibsharp\lib\**;
_EXTERNAL\stormlibsharp\src\CascLibSharp\**;
_EXTERNAL\stormlibsharp\src\TestConsole\**
</DefaultItemExcludes>
</PropertyGroup>
<!-- Exclude all external modules for .NET Framework 2.0, .NET Framework 3.5, or non-Windows builds -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR !($(RuntimeIdentifier.StartsWith(`win-x86`)) OR $(RuntimeIdentifier.StartsWith(`win-x64`)))">
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`))">
<DefaultItemExcludes>
$(DefaultItemExcludes);
_EXTERNAL\**
@@ -85,11 +79,11 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="GrindCore.SharpCompress" Version="0.40.4-alpha" Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))" />
<PackageReference Include="NetLegacySupport.Numerics" Version="1.0.1" Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`))" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.4" />
<PackageReference Include="SabreTools.Hashing" Version="1.5.0" />
<PackageReference Include="SabreTools.IO" Version="1.7.5" />
<PackageReference Include="SharpCompress" Version="0.40.0" Condition="!$(TargetFramework.StartsWith(`net2`)) AND !$(TargetFramework.StartsWith(`net3`)) AND !$(TargetFramework.StartsWith(`net40`)) AND !$(TargetFramework.StartsWith(`net452`))" />
</ItemGroup>
<PackageReference Include="SabreTools.Hashing" Version="[1.5.1]" />
<PackageReference Include="SabreTools.IO" Version="[1.7.6]" />
</ItemGroup>
</Project>

View File

@@ -295,7 +295,7 @@ namespace SabreTools.Serialization
#region GZip
if (magic.StartsWith(new byte[] { Data.Models.GZIP.Constants.ID1, Data.Models.GZIP.Constants.ID2 }))
if (magic.StartsWith(Data.Models.GZIP.Constants.SignatureBytes))
return WrapperType.GZip;
if (extension.Equals("gz", StringComparison.OrdinalIgnoreCase))
@@ -788,7 +788,7 @@ namespace SabreTools.Serialization
#region XZ
if (magic.StartsWith(Data.Models.XZ.Constants.SignatureBytes))
if (magic.StartsWith(Data.Models.XZ.Constants.HeaderSignatureBytes))
return WrapperType.XZ;
if (extension.Equals("xz", StringComparison.OrdinalIgnoreCase))

View File

@@ -1,5 +1,6 @@
using System.IO;
using SabreTools.Data.Models.GZIP;
using SabreTools.IO.Extensions;
namespace SabreTools.Serialization.Wrappers
{
@@ -14,6 +15,60 @@ namespace SabreTools.Serialization.Wrappers
#region Extension Properties
/// <summary>
/// Content CRC-32 as stored in the extra field
/// </summary>
/// <remarks>Only guaranteed for Torrent GZip format</remarks>
public byte[]? ContentCrc32
{
get
{
// Only valid for Torrent GZip
if (Header == null || !IsTorrentGZip)
return null;
// CRC-32 is the second packed field
int extraIndex = 0x10;
return Header.ExtraFieldBytes.ReadBytes(ref extraIndex, 0x04);
}
}
/// <summary>
/// Content MD5 as stored in the extra field
/// </summary>
/// <remarks>Only guaranteed for Torrent GZip format</remarks>
public byte[]? ContentMd5
{
get
{
// Only valid for Torrent GZip
if (Header == null || !IsTorrentGZip)
return null;
// MD5 is the first packed field
int extraIndex = 0x00;
return Header.ExtraFieldBytes.ReadBytes(ref extraIndex, 0x10);
}
}
/// <summary>
/// Content size as stored in the extra field
/// </summary>
/// <remarks>Only guaranteed for Torrent GZip format</remarks>
public ulong ContentSize
{
get
{
// Only valid for Torrent GZip
if (Header == null || !IsTorrentGZip)
return 0;
// MD5 is the first packed field
int extraIndex = 0x00;
return Header.ExtraFieldBytes.ReadUInt64LittleEndian(ref extraIndex);
}
}
/// <summary>
/// Offset to the compressed data
/// </summary>
@@ -54,6 +109,51 @@ namespace SabreTools.Serialization.Wrappers
/// <inheritdoc cref="Archive.Header"/>
public Header? Header => Model.Header;
/// <summary>
/// Indicates if the archive is in the standard
/// "Torrent GZip" format. This format is used by
/// some programs to store extended hashes in the
/// header while maintaining the format otherwise.
/// </summary>
public bool IsTorrentGZip
{
get
{
// If the header is invalid
if (Header == null)
return false;
// Torrent GZip uses normal deflate, not GZIP deflate
if (Header.CompressionMethod != CompressionMethod.Deflate)
return false;
// Only the extra field should be present
if (Header.Flags != Flags.FEXTRA)
return false;
// The modification should be 0x00000000, but some implementations
// do not set this correctly, so it is skipped.
// No extra flags are set
if (Header.ExtraFlags != 0x00)
return false;
// The OS should be FAT, regardless of the original platform, but
// some implementations do not set this correctly, so it is skipped.
// The extra field is non-standard, using the following format:
// - 0x00-0x0F - MD5 hash of the internal file
// - 0x10-0x13 - CRC-32 checksum of the internal file
// - 0x14-0x1B - Little-endian file size of the internal file
if (Header.ExtraLength != 0x1C)
return false;
if (Header.ExtraFieldBytes == null || Header.ExtraFieldBytes.Length != 0x1C)
return false;
return true;
}
}
/// <inheritdoc cref="Archive.Trailer"/>
public Trailer? Trailer => Model.Trailer;

View File

@@ -1,5 +1,5 @@
using System;
#if (NET452_OR_GREATER || NETCOREAPP) && (WINX86 || WINX64)
#if NET452_OR_GREATER || NETCOREAPP
using System.IO;
using StormLibSharp;
#endif
@@ -11,9 +11,16 @@ namespace SabreTools.Serialization.Wrappers
/// <inheritdoc/>
public bool Extract(string outputDirectory, bool includeDebug)
{
#if (NET452_OR_GREATER || NETCOREAPP) && (WINX86 || WINX64)
#if NET452_OR_GREATER || NETCOREAPP
try
{
// Limit use to Windows only
if (Environment.OSVersion.Platform != PlatformID.Win32NT)
{
Console.WriteLine("Extraction is not supported for this operating system!");
return false;
}
if (Filename == null || !File.Exists(Filename))
return false;

View File

@@ -132,7 +132,7 @@ namespace SabreTools.Serialization.Wrappers
extension = "xml";
break;
}
else if (overlaySample.StartsWith(Data.Models.XZ.Constants.SignatureBytes))
else if (overlaySample.StartsWith(Data.Models.XZ.Constants.HeaderSignatureBytes))
{
extension = "xz";
break;

View File

@@ -271,7 +271,7 @@ namespace SabreTools.Serialization.Wrappers
extension = "xml";
break;
}
else if (overlaySample.StartsWith(Data.Models.XZ.Constants.SignatureBytes))
else if (overlaySample.StartsWith(Data.Models.XZ.Constants.HeaderSignatureBytes))
{
extension = "xz";
break;
@@ -474,7 +474,7 @@ namespace SabreTools.Serialization.Wrappers
extension = "xml";
break;
}
else if (resourceSample.StartsWith(Data.Models.XZ.Constants.SignatureBytes))
else if (resourceSample.StartsWith(Data.Models.XZ.Constants.HeaderSignatureBytes))
{
extension = "xz";
break;

View File

@@ -90,7 +90,7 @@ namespace SabreTools.Serialization.Wrappers
// If the entry point matches with the start of a section, use that
int entryPointSection = FindEntryPointSectionIndex();
if (entryPointSection >= 0 && OptionalHeader.AddressOfEntryPoint == SectionTable[entryPointSection]?.VirtualAddress)
if (entryPointSection >= 0 && OptionalHeader.AddressOfEntryPoint == SectionTable[entryPointSection].VirtualAddress)
{
_entryPointData = GetSectionData(entryPointSection) ?? [];
return _entryPointData;
@@ -143,15 +143,15 @@ namespace SabreTools.Serialization.Wrappers
// Populate the raw header padding data based on the source
uint headerStartAddress = Stub.Header.NewExeHeaderAddr;
uint firstSectionAddress = uint.MaxValue;
foreach (var s in SectionTable)
foreach (var section in SectionTable)
{
if (s == null || s.PointerToRawData == 0)
if (section.PointerToRawData == 0)
continue;
if (s.PointerToRawData < headerStartAddress)
if (section.PointerToRawData < headerStartAddress)
continue;
if (s.PointerToRawData < firstSectionAddress)
firstSectionAddress = s.PointerToRawData;
if (section.PointerToRawData < firstSectionAddress)
firstSectionAddress = section.PointerToRawData;
}
// Check if the header length is more than 0 before reading data
@@ -328,15 +328,7 @@ namespace SabreTools.Serialization.Wrappers
// Search through all sections and find the furthest a section goes
long endOfSectionData = OptionalHeader.SizeOfHeaders;
foreach (var section in SectionTable)
{
// If we have an invalid section
if (section == null)
continue;
// Add the raw data size
endOfSectionData += section.SizeOfRawData;
}
Array.ForEach(SectionTable, s => endOfSectionData += s.SizeOfRawData);
// If we didn't find the end of section data
if (endOfSectionData <= 0)
@@ -527,11 +519,8 @@ namespace SabreTools.Serialization.Wrappers
_sectionNames = new string[SectionTable.Length];
for (int i = 0; i < _sectionNames.Length; i++)
{
var section = SectionTable[i];
if (section == null)
continue;
// TODO: Handle long section names with leading `/`
var section = SectionTable[i];
byte[]? sectionNameBytes = section.Name;
if (sectionNameBytes != null)
{
@@ -2046,9 +2035,6 @@ namespace SabreTools.Serialization.Wrappers
// Get the section data from the table
var section = SectionTable[index];
if (section == null)
return null;
uint address = section.VirtualAddress.ConvertVirtualAddress(SectionTable);
if (address == 0)
return null;

View File

@@ -73,7 +73,21 @@ namespace SabreTools.Serialization.Wrappers
if (data == null || !data.CanRead)
return null;
return new XZ(new Archive(), data);
try
{
// Cache the current offset
long currentOffset = data.Position;
var model = new Readers.XZ().Deserialize(data);
if (model == null)
return null;
return new XZ(model, data, currentOffset);
}
catch
{
return null;
}
}
#endregion