Compare commits

...

66 Commits
1.6.0 ... 1.6.9

Author SHA1 Message Date
Matt Nadareski
fe95b894d7 Bump version 2024-10-31 15:23:59 -04:00
Matt Nadareski
38a2712a8f Fake readable compressor names 2024-10-31 13:51:29 -04:00
Matt Nadareski
d1ea091574 Remove "press enter" on failure 2024-10-31 13:49:08 -04:00
Matt Nadareski
6bc812fc2f Fix formatting for CHD printing 2024-10-31 13:38:42 -04:00
Matt Nadareski
61b89fbd72 Fix typo in N3DS 2024-10-31 12:10:53 -04:00
Matt Nadareski
a2c065bdf2 Add CHD to factory 2024-10-31 12:09:36 -04:00
Matt Nadareski
88479f674b Add CHD printer 2024-10-31 12:06:25 -04:00
Matt Nadareski
5edbacde74 Add CHD printer 2024-10-31 12:03:34 -04:00
Matt Nadareski
67fc51224b Fix lack of ValueTuple in switch 2024-10-31 11:51:14 -04:00
Matt Nadareski
101f3294b4 Add CHD wrapper 2024-10-31 11:47:58 -04:00
Matt Nadareski
6c5622f732 Add CHD header deserialization 2024-10-31 11:40:50 -04:00
Matt Nadareski
f2a6fe1445 Update Models to 1.4.11 2024-10-31 11:34:45 -04:00
Matt Nadareski
b0b593443f Update packages 2024-10-24 17:27:55 -04:00
Matt Nadareski
9b05185add Fix old .NET compatibility 2024-10-14 00:20:02 -04:00
Matt Nadareski
17316da536 Port numerous extensions from NDecrypt 2024-10-14 00:15:14 -04:00
Matt Nadareski
f3ca4dd989 Port logic from UnshieldSharp 2024-10-03 11:14:41 -04:00
Matt Nadareski
e2b7bdac8c Temporary fix for IS-CAB file group parsing 2024-10-03 02:51:37 -04:00
Matt Nadareski
f86f6dc438 Bump version 2024-10-01 14:08:18 -04:00
Matt Nadareski
2bac0ed505 Update packages 2024-10-01 14:06:53 -04:00
Matt Nadareski
ae4078bb7f Fix inconsistencies in build and publish 2024-08-08 20:17:42 -04:00
Matt Nadareski
afaffbd9a2 Fix 3DS serialization and printing 2024-08-08 19:46:05 -04:00
TheRogueArchivist
b878e59e2e Fix typo in PortableExecutable Printer (#11) 2024-07-12 11:08:50 -04:00
Matt Nadareski
4bb3f625dd Make PE debug table parsing safer 2024-06-20 11:23:28 -04:00
Matt Nadareski
b7978cafa5 Bump version 2024-06-13 11:12:40 -04:00
Matt Nadareski
17f376c76f Remove all instances of this. 2024-06-05 22:49:27 -04:00
Matt Nadareski
2774fdf158 Clean up enumerables and namespace use 2024-06-05 22:48:42 -04:00
Matt Nadareski
11081efcb0 Make PE header reading even saferer 2024-06-05 22:22:22 -04:00
TheRogueArchivist
1b412c3027 Add header length safeguards to PortableExecutable wrapper (#9) 2024-06-05 22:19:35 -04:00
Matt Nadareski
73ec66e627 Fix ISv3 deserialization 2024-06-03 11:55:12 -04:00
Matt Nadareski
4ae4cd80b1 Bump version 2024-05-30 21:07:04 -04:00
Matt Nadareski
6eb27c66fc Merge pull request #8 from TheRogueArchivist/streamdatalock
Add lock for reading data from stream
2024-05-30 12:30:33 -04:00
TheRogueArchivist
f96fd17fd3 Add lock for reading data from stream 2024-05-27 15:36:04 -06:00
Matt Nadareski
c255a2494d Fix IS-CAB file group name parsing 2024-05-18 21:27:09 -04:00
Matt Nadareski
86a9846300 Bump version 2024-05-15 15:10:58 -04:00
Matt Nadareski
db877d253c Update Models, fix build 2024-05-15 14:59:55 -04:00
Matt Nadareski
0acf1e3b08 Handle bounds-defying reads 2024-05-15 13:38:44 -04:00
Matt Nadareski
362ed3a9b6 Protect against odd end-of-stream issues 2024-05-15 13:08:51 -04:00
Matt Nadareski
758878a229 Bump version 2024-05-15 12:02:21 -04:00
Matt Nadareski
ffb6dfc333 Update packages 2024-05-13 16:29:53 -04:00
Matt Nadareski
66da74e00a Fix resource table issues with NE 2024-05-12 11:46:05 -04:00
Matt Nadareski
d41a0045cb Fix input paths for test program 2024-05-09 21:54:30 -04:00
Matt Nadareski
b65629ba0e Combine magic and extension checks; helps with complex situations 2024-05-09 21:34:58 -04:00
Matt Nadareski
9518e6d1a0 Unicode (UTF-16) not UTF-8 2024-05-08 12:09:11 -04:00
Matt Nadareski
4f374ee885 Only read resources that are valid 2024-05-08 12:02:48 -04:00
Matt Nadareski
afa239056e Handle future model fix 2024-05-07 08:55:54 -04:00
Matt Nadareski
886825af11 Bump version 2024-05-07 05:17:06 -04:00
Matt Nadareski
198de925aa Update IO 2024-05-07 05:13:30 -04:00
Matt Nadareski
3f7b71e9a5 Bump version 2024-05-06 22:23:45 -04:00
Matt Nadareski
95baaf8603 Update SabreTools.IO 2024-05-06 22:12:14 -04:00
Matt Nadareski
3673264bab Bump version 2024-04-28 19:37:10 -04:00
Matt Nadareski
64fb5a6b63 Update SabreTools.IO 2024-04-28 19:32:06 -04:00
Matt Nadareski
e9c959ccdb Update SabreTools.IO 2024-04-28 17:39:30 -04:00
Matt Nadareski
4b7487e92e More rudimentary ZIP64 fixes 2024-04-28 00:24:35 -04:00
Matt Nadareski
52dbcffd8e Add shortcut if any other valid PKZIP blocks found 2024-04-27 23:57:32 -04:00
Matt Nadareski
24ae354bc2 Fix an indicator for ZIP64 2024-04-27 23:50:03 -04:00
Matt Nadareski
b30b91fd91 Remove redunant fix in StringBuilderExtensions 2024-04-27 23:48:55 -04:00
Matt Nadareski
efb63afc74 Fix PKZIP data printing 2024-04-27 23:45:33 -04:00
Matt Nadareski
16706f7169 Force writing values with proper width 2024-04-27 23:42:37 -04:00
Matt Nadareski
d7c32676b5 Add PKZIP printer implementation 2024-04-27 23:40:02 -04:00
Matt Nadareski
c8c45446bc Add PKZIP archive extra data record parsing 2024-04-27 23:01:50 -04:00
Matt Nadareski
f4de2e27d7 Notes cleanup 2024-04-27 22:49:09 -04:00
Matt Nadareski
970fcbd93b Add PKZIP shell wrapper 2024-04-27 22:45:49 -04:00
Matt Nadareski
57d1cd7f1e Initial code for PKZIP deserialization 2024-04-27 22:41:22 -04:00
Matt Nadareski
522fc372fa Fix instance of wrong extension 2024-04-27 22:23:33 -04:00
Matt Nadareski
7141690fcb Add override for compression handling 2024-04-27 22:04:52 -04:00
Matt Nadareski
c7d9177e68 Allow decompression to be skipped 2024-04-27 22:04:38 -04:00
45 changed files with 3203 additions and 913 deletions

328
.gitignore vendored
View File

@@ -1,15 +1,7 @@
*.swp
*.*~
project.lock.json
.DS_Store
*.pyc
nupkg/
# Visual Studio Code
.vscode
# Rider
.idea
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.suo
@@ -17,6 +9,9 @@ nupkg/
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
@@ -24,15 +19,312 @@ nupkg/
[Rr]eleases/
x64/
x86/
build/
bld/
[Bb]in/
[Oo]bj/
[Oo]ut/
msbuild.log
msbuild.err
msbuild.wrn
[Ll]og/
# Visual Studio 2015
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
**/Properties/launchSettings.json
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# JetBrains Rider
.idea/
*.sln.iml
# CodeRush
.cr/
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/

28
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,28 @@
{
// Use IntelliSense to find out which attributes exist for C# debugging
// Use hover for the description of the existing attributes
// For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md
"version": "0.2.0",
"configurations": [
{
"name": ".NET Core Launch (Test)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
// If you have changed target frameworks, make sure to update the program path.
"program": "${workspaceFolder}/Test/bin/Debug/net8.0/Test.dll",
"args": [],
"cwd": "${workspaceFolder}",
// For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console
"console": "internalConsole",
"stopAtEntry": false,
"justMyCode": false
},
{
"name": ".NET Core Attach",
"type": "coreclr",
"request": "attach",
"processId": "${command:pickProcess}"
}
]
}

24
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,24 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build",
"command": "dotnet",
"type": "shell",
"args": [
"build",
// Ask dotnet build to generate full paths for file names.
"/property:GenerateFullPaths=true",
// Do not generate summary otherwise it leads to duplicate errors in Problems panel
"/consoleloggerparameters:NoSummary"
],
"group": "build",
"presentation": {
"reveal": "silent"
},
"problemMatcher": "$msCompile"
}
]
}

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net6.0;net8.0</TargetFrameworks>
@@ -26,10 +26,10 @@
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.9.0" />
<PackageReference Include="SabreTools.Models" Version="1.4.5" />
<PackageReference Include="xunit" Version="2.7.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.8">
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageReference Include="SabreTools.Models" Version="1.4.11" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>

View File

@@ -17,6 +17,11 @@ namespace SabreTools.Serialization.Deserializers
IFileDeserializer<TModel>,
IStreamDeserializer<TModel>
{
/// <summary>
/// Indicates if compressed files should be decompressed before processing
/// </summary>
protected virtual bool SkipCompression => false;
#region IByteDeserializer
/// <inheritdoc/>
@@ -42,7 +47,7 @@ namespace SabreTools.Serialization.Deserializers
/// <inheritdoc/>
public virtual TModel? Deserialize(string? path)
{
using var stream = PathProcessor.OpenStream(path);
using var stream = PathProcessor.OpenStream(path, SkipCompression);
return DeserializeStream(stream);
}

View File

@@ -0,0 +1,253 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.CHD;
namespace SabreTools.Serialization.Deserializers
{
// TODO: Expand this to full CHD files eventually
public class CHD : BaseBinaryDeserializer<Header>
{
/// <inheritdoc/>
public override Header? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Determine the header version
uint version = GetVersion(data);
// Read and return the current CHD
return version switch
{
1 => ParseHeaderV1(data),
2 => ParseHeaderV2(data),
3 => ParseHeaderV3(data),
4 => ParseHeaderV4(data),
5 => ParseHeaderV5(data),
_ => null,
};
}
/// <summary>
/// Get the matching CHD version, if possible
/// </summary>
/// <returns>Matching version, 0 if none</returns>
private static uint GetVersion(Stream data)
{
// Read the header values
byte[] tagBytes = data.ReadBytes(8);
string tag = Encoding.ASCII.GetString(tagBytes);
uint length = data.ReadUInt32BigEndian();
uint version = data.ReadUInt32BigEndian();
// Seek back to start
data.SeekIfPossible();
// Check the signature
if (!string.Equals(tag, Constants.SignatureString, StringComparison.Ordinal))
return 0;
// Match the version to header length
#if NET472_OR_GREATER || NETCOREAPP
return (version, length) switch
{
(1, Constants.HeaderV1Size) => version,
(2, Constants.HeaderV2Size) => version,
(3, Constants.HeaderV3Size) => version,
(4, Constants.HeaderV4Size) => version,
(5, Constants.HeaderV5Size) => version,
_ => 0,
};
#else
return version switch
{
1 => length == Constants.HeaderV1Size ? version : 0,
2 => length == Constants.HeaderV2Size ? version : 0,
3 => length == Constants.HeaderV3Size ? version : 0,
4 => length == Constants.HeaderV4Size ? version : 0,
5 => length == Constants.HeaderV5Size ? version : 0,
_ => 0,
};
#endif
}
/// <summary>
/// Parse a Stream into a V1 header
/// </summary>
private static HeaderV1? ParseHeaderV1(Stream data)
{
var header = new HeaderV1();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV1Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
header.HunkSize = data.ReadUInt32BigEndian();
header.TotalHunks = data.ReadUInt32BigEndian();
header.Cylinders = data.ReadUInt32BigEndian();
header.Heads = data.ReadUInt32BigEndian();
header.Sectors = data.ReadUInt32BigEndian();
header.MD5 = data.ReadBytes(16);
header.ParentMD5 = data.ReadBytes(16);
return header;
}
/// <summary>
/// Parse a Stream into a V2 header
/// </summary>
private static HeaderV2? ParseHeaderV2(Stream data)
{
var header = new HeaderV2();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV2Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
header.HunkSize = data.ReadUInt32BigEndian();
header.TotalHunks = data.ReadUInt32BigEndian();
header.Cylinders = data.ReadUInt32BigEndian();
header.Heads = data.ReadUInt32BigEndian();
header.Sectors = data.ReadUInt32BigEndian();
header.MD5 = data.ReadBytes(16);
header.ParentMD5 = data.ReadBytes(16);
header.BytesPerSector = data.ReadUInt32BigEndian();
return header;
}
/// <summary>
/// Parse a Stream into a V3 header
/// </summary>
private static HeaderV3? ParseHeaderV3(Stream data)
{
var header = new HeaderV3();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV3Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB_PLUS)
return null;
header.TotalHunks = data.ReadUInt32BigEndian();
header.LogicalBytes = data.ReadUInt64BigEndian();
header.MetaOffset = data.ReadUInt64BigEndian();
header.MD5 = data.ReadBytes(16);
header.ParentMD5 = data.ReadBytes(16);
header.HunkBytes = data.ReadUInt32BigEndian();
header.SHA1 = data.ReadBytes(20);
header.ParentSHA1 = data.ReadBytes(20);
return header;
}
/// <summary>
/// Parse a Stream into a V4 header
/// </summary>
private static HeaderV4? ParseHeaderV4(Stream data)
{
var header = new HeaderV4();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV4Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_AV)
return null;
header.TotalHunks = data.ReadUInt32BigEndian();
header.LogicalBytes = data.ReadUInt64BigEndian();
header.MetaOffset = data.ReadUInt64BigEndian();
header.HunkBytes = data.ReadUInt32BigEndian();
header.SHA1 = data.ReadBytes(20);
header.ParentSHA1 = data.ReadBytes(20);
header.RawSHA1 = data.ReadBytes(20);
return header;
}
/// <summary>
/// Parse a Stream into a V5 header
/// </summary>
private static HeaderV5? ParseHeaderV5(Stream data)
{
var header = new HeaderV5();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV5Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Compressors = new uint[4];
for (int i = 0; i < header.Compressors.Length; i++)
{
header.Compressors[i] = data.ReadUInt32BigEndian();
}
header.LogicalBytes = data.ReadUInt64BigEndian();
header.MapOffset = data.ReadUInt64BigEndian();
header.MetaOffset = data.ReadUInt64BigEndian();
header.HunkBytes = data.ReadUInt32BigEndian();
header.UnitBytes = data.ReadUInt32BigEndian();
header.RawSHA1 = data.ReadBytes(20);
header.SHA1 = data.ReadBytes(20);
header.ParentSHA1 = data.ReadBytes(20);
return header;
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.InstallShieldArchiveV3;
@@ -114,7 +115,17 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled directory on success, null on error</returns>
public static Models.InstallShieldArchiveV3.Directory? ParseDirectory(Stream data)
{
return data.ReadType<Models.InstallShieldArchiveV3.Directory>();
var directory = new Models.InstallShieldArchiveV3.Directory();
directory.FileCount = data.ReadUInt16();
directory.ChunkSize = data.ReadUInt16();
// TODO: Is there any equivilent automatic type for UInt16-prefixed ANSI?
ushort nameLength = data.ReadUInt16();
byte[] nameBytes = data.ReadBytes(nameLength);
directory.Name = Encoding.ASCII.GetString(nameBytes);
return directory;
}
/// <summary>

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using SabreTools.IO.Extensions;
@@ -447,14 +448,9 @@ namespace SabreTools.Serialization.Deserializers
var fileGroup = new FileGroup();
fileGroup.NameOffset = data.ReadUInt32();
fileGroup.ExpandedSize = data.ReadUInt32();
fileGroup.Reserved0 = data.ReadBytes(4);
fileGroup.CompressedSize = data.ReadUInt32();
fileGroup.Reserved1 = data.ReadBytes(4);
fileGroup.Reserved2 = data.ReadBytes(2);
fileGroup.Attribute1 = data.ReadUInt16();
fileGroup.Attribute2 = data.ReadUInt16();
fileGroup.Attributes = (FileGroupAttributes)data.ReadUInt16();
// TODO: Figure out what data lives in this area for V5 and below
if (majorVersion <= 5)
@@ -462,19 +458,19 @@ namespace SabreTools.Serialization.Deserializers
fileGroup.FirstFile = data.ReadUInt32();
fileGroup.LastFile = data.ReadUInt32();
fileGroup.UnknownOffset = data.ReadUInt32();
fileGroup.Var4Offset = data.ReadUInt32();
fileGroup.Var1Offset = data.ReadUInt32();
fileGroup.UnknownStringOffset = data.ReadUInt32();
fileGroup.OperatingSystemOffset = data.ReadUInt32();
fileGroup.LanguageOffset = data.ReadUInt32();
fileGroup.HTTPLocationOffset = data.ReadUInt32();
fileGroup.FTPLocationOffset = data.ReadUInt32();
fileGroup.MiscOffset = data.ReadUInt32();
fileGroup.Var2Offset = data.ReadUInt32();
fileGroup.TargetDirectoryOffset = data.ReadUInt32();
fileGroup.Reserved3 = data.ReadBytes(2);
fileGroup.Reserved4 = data.ReadBytes(2);
fileGroup.Reserved5 = data.ReadBytes(2);
fileGroup.Reserved6 = data.ReadBytes(2);
fileGroup.Reserved7 = data.ReadBytes(2);
fileGroup.OverwriteFlags = (FileGroupFlags)data.ReadUInt32();
fileGroup.Reserved = new uint[4];
for (int i = 0; i < fileGroup.Reserved.Length; i++)
{
fileGroup.Reserved[i] = data.ReadUInt32();
}
// Cache the current position
long currentPosition = data.Position;
@@ -489,7 +485,7 @@ namespace SabreTools.Serialization.Deserializers
if (majorVersion >= 17)
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
else
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
fileGroup.Name = data.ReadNullTerminatedAnsiString();
}
// Seek back to the correct offset
@@ -512,15 +508,19 @@ namespace SabreTools.Serialization.Deserializers
component.IdentifierOffset = data.ReadUInt32();
component.DescriptorOffset = data.ReadUInt32();
component.DisplayNameOffset = data.ReadUInt32();
component.Reserved0 = data.ReadUInt16();
component.ReservedOffset0 = data.ReadUInt32();
component.ReservedOffset1 = data.ReadUInt32();
component.Status = (ComponentStatus)data.ReadUInt16();
component.PasswordOffset = data.ReadUInt32();
component.MiscOffset = data.ReadUInt32();
component.ComponentIndex = data.ReadUInt16();
component.NameOffset = data.ReadUInt32();
component.ReservedOffset2 = data.ReadUInt32();
component.ReservedOffset3 = data.ReadUInt32();
component.ReservedOffset4 = data.ReadUInt32();
component.Reserved1 = data.ReadBytes(32);
component.CDRomFolderOffset = data.ReadUInt32();
component.HTTPLocationOffset = data.ReadUInt32();
component.FTPLocationOffset = data.ReadUInt32();
component.Guid = new Guid[2];
for (int i = 0; i < component.Guid.Length; i++)
{
component.Guid[i] = data.ReadGuid();
}
component.CLSIDOffset = data.ReadUInt32();
component.Reserved2 = data.ReadBytes(28);
component.Reserved3 = data.ReadBytes(majorVersion <= 5 ? 2 : 1);
@@ -533,10 +533,10 @@ namespace SabreTools.Serialization.Deserializers
component.SubComponentsCount = data.ReadUInt16();
component.SubComponentsOffset = data.ReadUInt32();
component.NextComponentOffset = data.ReadUInt32();
component.ReservedOffset5 = data.ReadUInt32();
component.ReservedOffset6 = data.ReadUInt32();
component.ReservedOffset7 = data.ReadUInt32();
component.ReservedOffset8 = data.ReadUInt32();
component.OnInstallingOffset = data.ReadUInt32();
component.OnInstalledOffset = data.ReadUInt32();
component.OnUninstallingOffset = data.ReadUInt32();
component.OnUninstalledOffset = data.ReadUInt32();
// Cache the current position
long currentPosition = data.Position;

View File

@@ -476,7 +476,7 @@ namespace SabreTools.Serialization.Deserializers
var entry = new ResidentNamesTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
if (entry.Length > 0 && data.Position + entry.Length <= data.Length)
{
byte[]? name = data.ReadBytes(entry.Length);
if (name != null)
@@ -810,7 +810,7 @@ namespace SabreTools.Serialization.Deserializers
var entry = new ImportModuleNameTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
if (entry.Length > 0 && data.Position + entry.Length <= data.Length)
{
byte[]? name = data.ReadBytes(entry.Length);
if (name != null)
@@ -831,7 +831,7 @@ namespace SabreTools.Serialization.Deserializers
var entry = new ImportModuleProcedureNameTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
if (entry.Length > 0 && data.Position + entry.Length <= data.Length)
{
byte[]? name = data.ReadBytes(entry.Length);
if (name != null)
@@ -862,7 +862,7 @@ namespace SabreTools.Serialization.Deserializers
var entry = new NonResidentNamesTableEntry();
entry.Length = data.ReadByteValue();
if (entry.Length > 0)
if (entry.Length > 0 && data.Position + entry.Length <= data.Length)
{
byte[]? name = data.ReadBytes(entry.Length);
if (name != null)

View File

@@ -259,7 +259,18 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled card info header on success, null on error</returns>
public static CardInfoHeader? ParseCardInfoHeader(Stream data)
{
return data.ReadType<CardInfoHeader>();
// TODO: Use marshalling here instead of building
var header = new CardInfoHeader();
header.WritableAddressMediaUnits = data.ReadUInt32();
header.CardInfoBitmask = data.ReadUInt32();
header.Reserved3 = data.ReadBytes(0x108);
header.TitleVersion = data.ReadUInt16();
header.CardRevision = data.ReadUInt16();
header.Reserved4 = data.ReadBytes(0xCD6);
header.InitialData = ParseInitialData(data);
return header;
}
/// <summary>
@@ -272,6 +283,26 @@ namespace SabreTools.Serialization.Deserializers
return data.ReadType<DevelopmentCardInfoHeader>();
}
/// <summary>
/// Parse a Stream into initial data
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled initial data on success, null on error</returns>
public static InitialData? ParseInitialData(Stream data)
{
// TODO: Use marshalling here instead of building
var id = new InitialData();
id.CardSeedKeyY = data.ReadBytes(0x10);
id.EncryptedCardSeed = data.ReadBytes(0x10);
id.CardSeedAESMAC = data.ReadBytes(0x10);
id.CardSeedNonce = data.ReadBytes(0x0C);
id.Reserved = data.ReadBytes(0xC4);
id.BackupHeader = ParseNCCHHeader(data, skipSignature: true);
return id;
}
/// <summary>
/// Parse a Stream into an NCCH header
/// </summary>

View File

@@ -76,7 +76,7 @@ namespace SabreTools.Serialization.Deserializers
// If the offset for the segment table doesn't exist
tableAddress = initialOffset
+ (int)stub.Header.NewExeHeaderAddr
+ executableHeader.SegmentTableOffset;
+ executableHeader.ResourceTableOffset;
if (tableAddress >= data.Length)
return executable;
@@ -262,7 +262,7 @@ namespace SabreTools.Serialization.Deserializers
/// <param name="data">Stream to parse</param>
/// <param name="count">Number of resource table entries to read</param>
/// <returns>Filled resource table on success, null on error</returns>
public static ResourceTable? ParseResourceTable(Stream data, int count)
public static ResourceTable? ParseResourceTable(Stream data, ushort count)
{
long initialOffset = data.Position;
@@ -270,13 +270,23 @@ namespace SabreTools.Serialization.Deserializers
var resourceTable = new ResourceTable();
resourceTable.AlignmentShiftCount = data.ReadUInt16();
resourceTable.ResourceTypes = new ResourceTypeInformationEntry[count];
for (int i = 0; i < resourceTable.ResourceTypes.Length; i++)
var resourceTypes = new List<ResourceTypeInformationEntry>();
for (int i = 0; i < count; i++)
{
var entry = new ResourceTypeInformationEntry();
entry.TypeID = data.ReadUInt16();
entry.ResourceCount = data.ReadUInt16();
entry.Reserved = data.ReadUInt32();
// A zero type ID marks the end of the resource type information blocks.
if (entry.TypeID == 0)
{
resourceTypes.Add(entry);
break;
}
entry.Resources = new ResourceTypeResourceEntry[entry.ResourceCount];
for (int j = 0; j < entry.ResourceCount; j++)
{
@@ -287,20 +297,23 @@ namespace SabreTools.Serialization.Deserializers
entry.Resources[j] = resource;
}
resourceTable.ResourceTypes[i] = entry;
resourceTypes.Add(entry);
}
resourceTable.ResourceTypes = [.. resourceTypes];
// Get the full list of unique string offsets
var stringOffsets = resourceTable.ResourceTypes
.Where(rt => rt != null)
.Where(rt => rt!.IsIntegerType() == false)
.Where(rt => !rt!.IsIntegerType() && rt!.TypeID != 0)
.Select(rt => rt!.TypeID)
.Union(resourceTable.ResourceTypes
.Where(rt => rt != null)
.Where(rt => rt != null && rt!.TypeID != 0)
.SelectMany(rt => rt!.Resources ?? [])
.Where(r => r!.IsIntegerType() == false)
.Where(r => !r!.IsIntegerType())
.Select(r => r!.ResourceID))
.Distinct()
.Where(o => o != 0)
.OrderBy(o => o)
.ToList();
@@ -359,7 +372,7 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Use marshalling here instead of building
var residentNameTable = new List<ResidentNameTableEntry>();
while (data.Position < endOffset)
while (data.Position < endOffset && data.Position < data.Length)
{
var entry = ParseResidentNameTableEntry(data);
if (entry == null)
@@ -432,7 +445,7 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Use marshalling here instead of building
var importedNameTable = new Dictionary<ushort, ImportedNameTableEntry>();
while (data.Position < endOffset)
while (data.Position < endOffset && data.Position < data.Length)
{
ushort currentOffset = (ushort)data.Position;
var entry = ParseImportedNameTableEntry(data);
@@ -472,7 +485,7 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Use marshalling here instead of building
var entryTable = new List<EntryTableBundle>();
while (data.Position < endOffset)
while (data.Position < endOffset && data.Position < data.Length)
{
var entry = new EntryTableBundle();
entry.EntryCount = data.ReadByteValue();
@@ -511,7 +524,7 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Use marshalling here instead of building
var residentNameTable = new List<NonResidentNameTableEntry>();
while (data.Position < endOffset)
while (data.Position < endOffset && data.Position < data.Length)
{
var entry = ParseNonResidentNameTableEntry(data);
if (entry == null)

View File

@@ -0,0 +1,697 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.PKZIP;
using static SabreTools.Models.PKZIP.Constants;
namespace SabreTools.Serialization.Deserializers
{
public class PKZIP : BaseBinaryDeserializer<Archive>
{
/// <inheritdoc/>
protected override bool SkipCompression => true;
/// <inheritdoc/>
public override Archive? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
var archive = new Archive();
#region End of Central Directory Record
// Find the end of central directory record
long eocdrOffset = SearchForEndOfCentralDirectoryRecord(data);
if (eocdrOffset < 0 || eocdrOffset >= data.Length)
return null;
// Seek to the end of central directory record
data.Seek(eocdrOffset, SeekOrigin.Begin);
// Read the end of central directory record
var eocdr = ParseEndOfCentralDirectoryRecord(data);
if (eocdr == null)
return null;
// Assign the end of central directory record
archive.EndOfCentralDirectoryRecord = eocdr;
#endregion
#region ZIP64 End of Central Directory Locator and Record
// Set a flag for ZIP64 not found by default
bool zip64 = false;
// Process ZIP64 if any fields are set to max value
if (eocdr.DiskNumber == 0xFFFF
|| eocdr.StartDiskNumber == 0xFFFF
|| eocdr.TotalEntriesOnDisk == 0xFFFF
|| eocdr.TotalEntries == 0xFFFF
|| eocdr.CentralDirectorySize == 0xFFFFFFFF
|| eocdr.CentralDirectoryOffset == 0xFFFFFFFF)
{
// Set the ZIP64 flag
zip64 = true;
// Find the ZIP64 end of central directory locator
long eocdlOffset = SearchForZIP64EndOfCentralDirectoryLocator(data);
if (eocdlOffset < 0 || eocdlOffset >= data.Length)
return null;
// Seek to the ZIP64 end of central directory locator
data.Seek(eocdlOffset, SeekOrigin.Begin);
// Read the ZIP64 end of central directory locator
var eocdl64 = ParseEndOfCentralDirectoryLocator64(data);
if (eocdl64 == null)
return null;
// Assign the ZIP64 end of central directory record
archive.ZIP64EndOfCentralDirectoryLocator = eocdl64;
// Try to get the ZIP64 end of central directory record offset
if ((long)eocdl64.CentralDirectoryOffset < 0 || (long)eocdl64.CentralDirectoryOffset >= data.Length)
return null;
// Seek to the ZIP64 end of central directory record
data.Seek((long)eocdl64.CentralDirectoryOffset, SeekOrigin.Begin);
// Read the ZIP64 end of central directory record
var eocdr64 = ParseEndOfCentralDirectoryRecord64(data);
if (eocdr64 == null)
return null;
// Assign the ZIP64 end of central directory record
archive.ZIP64EndOfCentralDirectoryRecord = eocdr64;
}
#endregion
#region Central Directory Records
// Try to get the central directory record offset
long cdrOffset, cdrSize;
if (zip64 && archive.ZIP64EndOfCentralDirectoryRecord != null)
{
cdrOffset = (long)archive.ZIP64EndOfCentralDirectoryRecord.CentralDirectoryOffset;
cdrSize = (long)archive.ZIP64EndOfCentralDirectoryRecord.CentralDirectorySize;
}
else if (archive.EndOfCentralDirectoryRecord != null)
{
cdrOffset = archive.EndOfCentralDirectoryRecord.CentralDirectoryOffset;
cdrSize = archive.EndOfCentralDirectoryRecord.CentralDirectorySize;
}
else
{
return null;
}
// Try to get the central directory record offset
if (cdrOffset < 0 || cdrOffset >= data.Length)
return null;
// Seek to the first central directory record
data.Seek(cdrOffset, SeekOrigin.Begin);
// Cache the current offset
long currentOffset = data.Position;
// Read the central directory records
var cdrs = new List<CentralDirectoryFileHeader>();
while (data.Position < currentOffset + cdrSize)
{
// Read the central directory record
var cdr = ParseCentralDirectoryFileHeader(data);
if (cdr == null)
return null;
// Add the central directory record
cdrs.Add(cdr);
}
// Assign the central directory records
archive.CentralDirectoryHeaders = [.. cdrs];
#endregion
// TODO: Handle digital signature -- immediately following central directory records
#region Archive Extra Data Record
// Find the archive extra data record
long aedrOffset = SearchForArchiveExtraDataRecord(data, cdrOffset);
if (aedrOffset >= 0 && aedrOffset < data.Length)
{
// Seek to the archive extra data record
data.Seek(aedrOffset, SeekOrigin.Begin);
// Read the archive extra data record
var aedr = ParseArchiveExtraDataRecord(data);
if (aedr == null)
return null;
// Assign the archive extra data record
archive.ArchiveExtraDataRecord = aedr;
}
#endregion
#region Local File
// Setup all of the collections
var localFileHeaders = new List<LocalFileHeader?>();
var encryptionHeaders = new List<byte[]?>();
var fileData = new List<byte[]>(); // TODO: Should this data be read here?
var dataDescriptors = new List<DataDescriptor?>();
var zip64DataDescriptors = new List<DataDescriptor64?>();
// Read the local file headers
for (int i = 0; i < archive.CentralDirectoryHeaders.Length; i++)
{
var header = archive.CentralDirectoryHeaders[i];
// Get the local file header offset
long headerOffset = header.RelativeOffsetOfLocalHeader;
if (headerOffset == 0xFFFFFFFF && header.ExtraField != null)
{
// TODO: Parse into a proper structure instead of this
byte[] extraData = header.ExtraField;
if (BitConverter.ToUInt16(extraData, 0) == 0x0001)
headerOffset = BitConverter.ToInt64(extraData, 4);
}
if (headerOffset < 0 || headerOffset >= data.Length)
return null;
// Seek to the local file header
data.Seek(headerOffset, SeekOrigin.Begin);
// Try to parse the local header
var localFileHeader = ParseLocalFileHeader(data);
if (localFileHeader == null)
{
// Add a placeholder null item
localFileHeaders.Add(null);
encryptionHeaders.Add(null);
fileData.Add([]);
dataDescriptors.Add(null);
zip64DataDescriptors.Add(null);
continue;
}
// Add the local file header
localFileHeaders.Add(localFileHeader);
// Only read the encryption header if necessary
#if NET20 || NET35
if ((header.Flags & GeneralPurposeBitFlags.FileEncrypted) != 0)
#else
if (header.Flags.HasFlag(GeneralPurposeBitFlags.FileEncrypted))
#endif
{
// Try to read the encryption header data -- TODO: Verify amount to read
byte[] encryptionHeader = data.ReadBytes(12);
if (encryptionHeader.Length != 12)
return null;
// Add the encryption header
encryptionHeaders.Add(encryptionHeader);
}
else
{
// Add the null encryption header
encryptionHeaders.Add(null);
}
// Try to read the file data
byte[] fileDatum = data.ReadBytes((int)header.CompressedSize);
if (fileDatum.Length < header.CompressedSize)
return null;
// Add the file data
fileData.Add(fileDatum);
// Only read the data descriptor if necessary
#if NET20 || NET35
if ((header.Flags & GeneralPurposeBitFlags.NoCRC) != 0)
#else
if (header.Flags.HasFlag(GeneralPurposeBitFlags.NoCRC))
#endif
{
// Select the data descriptor that is being used
if (zip64)
{
// Try to parse the data descriptor
var dataDescriptor64 = ParseDataDescriptor64(data);
if (dataDescriptor64 == null)
return null;
// Add the data descriptor
dataDescriptors.Add(null);
zip64DataDescriptors.Add(dataDescriptor64);
}
else
{
// Try to parse the data descriptor
var dataDescriptor = ParseDataDescriptor(data);
if (dataDescriptor == null)
return null;
// Add the data descriptor
dataDescriptors.Add(dataDescriptor);
zip64DataDescriptors.Add(null);
}
}
else
{
// Add the null data descriptor
dataDescriptors.Add(null);
zip64DataDescriptors.Add(null);
}
}
// Assign the local file headers
archive.LocalFileHeaders = [.. localFileHeaders];
// Assign the encryption headers
archive.EncryptionHeaders = [.. encryptionHeaders];
// Assign the file data
archive.FileData = [.. fileData];
// Assign the data descriptors
archive.DataDescriptors = [.. dataDescriptors];
archive.ZIP64DataDescriptors = [.. zip64DataDescriptors];
#endregion
// TODO: Handle archive decryption header
return archive;
}
/// <summary>
/// Search for the end of central directory record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Position of the end of central directory record, -1 on error</returns>
public static long SearchForEndOfCentralDirectoryRecord(Stream data)
{
// Cache the current offset
long current = data.Position;
// Seek to the minimum size of the record from the end
data.Seek(-22, SeekOrigin.End);
// Attempt to find the end of central directory signature
while (data.Position > 0)
{
// Read the potential signature
uint possibleSignature = data.ReadUInt32();
if (possibleSignature == EndOfCentralDirectoryRecordSignature)
{
long signaturePosition = data.Position - 4;
data.Seek(current, SeekOrigin.Begin);
return signaturePosition;
}
// If we find any other signature
switch (possibleSignature)
{
case ArchiveExtraDataRecordSignature:
case CentralDirectoryFileHeaderSignature:
case DataDescriptorSignature:
case DigitalSignatureSignature:
case EndOfCentralDirectoryLocator64Signature:
case EndOfCentralDirectoryRecord64Signature:
case LocalFileHeaderSignature:
data.Seek(current, SeekOrigin.Begin);
return -1;
}
// Seek backward 5 bytes, if possible
data.Seek(-5, SeekOrigin.Current);
}
// No signature was found
data.Seek(current, SeekOrigin.Begin);
return -1;
}
/// <summary>
/// Parse a Stream into an end of central directory record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled end of central directory record on success, null on error</returns>
public static EndOfCentralDirectoryRecord? ParseEndOfCentralDirectoryRecord(Stream data)
{
// TODO: Use marshalling here instead of building
var record = new EndOfCentralDirectoryRecord();
record.Signature = data.ReadUInt32();
if (record.Signature != EndOfCentralDirectoryRecordSignature)
return null;
record.DiskNumber = data.ReadUInt16();
record.StartDiskNumber = data.ReadUInt16();
record.TotalEntriesOnDisk = data.ReadUInt16();
record.TotalEntries = data.ReadUInt16();
record.CentralDirectorySize = data.ReadUInt32();
record.CentralDirectoryOffset = data.ReadUInt32();
record.FileCommentLength = data.ReadUInt16();
if (record.FileCommentLength > 0 && data.Position + record.FileCommentLength <= data.Length)
{
byte[] commentBytes = data.ReadBytes(record.FileCommentLength);
if (commentBytes.Length != record.FileCommentLength)
return null;
record.FileComment = Encoding.ASCII.GetString(commentBytes);
}
return record;
}
/// <summary>
/// Search for the ZIP64 end of central directory locator
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Position of the ZIP64 end of central directory locator, -1 on error</returns>
public static long SearchForZIP64EndOfCentralDirectoryLocator(Stream data)
{
// Cache the current offset
long current = data.Position;
// Seek to the minimum size of the record from the minimum start
// of theend of central directory record
data.Seek(-22 + -20, SeekOrigin.Current);
// Attempt to find the ZIP64 end of central directory locator signature
while (data.Position > 0)
{
// Read the potential signature
uint possibleSignature = data.ReadUInt32();
if (possibleSignature == EndOfCentralDirectoryLocator64Signature)
{
long signaturePosition = data.Position - 4;
data.Seek(current, SeekOrigin.Begin);
return signaturePosition;
}
// If we find any other signature
switch (possibleSignature)
{
case ArchiveExtraDataRecordSignature:
case CentralDirectoryFileHeaderSignature:
case DataDescriptorSignature:
case DigitalSignatureSignature:
case EndOfCentralDirectoryRecordSignature:
case EndOfCentralDirectoryRecord64Signature:
case LocalFileHeaderSignature:
data.Seek(current, SeekOrigin.Begin);
return -1;
}
// Seek backward 5 bytes, if possible
data.Seek(-5, SeekOrigin.Current);
}
// No signature was found
data.Seek(current, SeekOrigin.Begin);
return -1;
}
/// <summary>
/// Parse a Stream into a ZIP64 end of central directory locator
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ZIP64 end of central directory locator on success, null on error</returns>
public static EndOfCentralDirectoryLocator64? ParseEndOfCentralDirectoryLocator64(Stream data)
{
return data.ReadType<EndOfCentralDirectoryLocator64>();
}
/// <summary>
/// Parse a Stream into a ZIP64 end of central directory record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ZIP64 end of central directory record on success, null on error</returns>
public static EndOfCentralDirectoryRecord64? ParseEndOfCentralDirectoryRecord64(Stream data)
{
// TODO: Use marshalling here instead of building
var record = new EndOfCentralDirectoryRecord64();
record.Signature = data.ReadUInt32();
if (record.Signature != EndOfCentralDirectoryRecord64Signature)
return null;
record.DirectoryRecordSize = data.ReadUInt64();
record.HostSystem = (HostSystem)data.ReadByteValue();
record.VersionMadeBy = data.ReadByteValue();
record.VersionNeededToExtract = data.ReadUInt16();
record.DiskNumber = data.ReadUInt32();
record.StartDiskNumber = data.ReadUInt32();
record.TotalEntriesOnDisk = data.ReadUInt64();
record.TotalEntries = data.ReadUInt64();
record.CentralDirectorySize = data.ReadUInt64();
record.CentralDirectoryOffset = data.ReadUInt64();
// TODO: Handle the ExtensibleDataSector -- How to detect if exists?
return record;
}
/// <summary>
/// Parse a Stream into a central directory file header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled central directory file header on success, null on error</returns>
public static CentralDirectoryFileHeader? ParseCentralDirectoryFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new CentralDirectoryFileHeader();
header.Signature = data.ReadUInt32();
if (header.Signature != CentralDirectoryFileHeaderSignature)
return null;
header.HostSystem = (HostSystem)data.ReadByteValue();
header.VersionMadeBy = data.ReadByteValue();
header.VersionNeededToExtract = data.ReadUInt16();
header.Flags = (GeneralPurposeBitFlags)data.ReadUInt16();
header.CompressionMethod = (CompressionMethod)data.ReadUInt16();
header.LastModifedFileTime = data.ReadUInt16();
header.LastModifiedFileDate = data.ReadUInt16();
header.CRC32 = data.ReadUInt32();
header.CompressedSize = data.ReadUInt32();
header.UncompressedSize = data.ReadUInt32();
header.FileNameLength = data.ReadUInt16();
header.ExtraFieldLength = data.ReadUInt16();
header.FileCommentLength = data.ReadUInt16();
header.DiskNumberStart = data.ReadUInt16();
header.InternalFileAttributes = (InternalFileAttributes)data.ReadUInt16();
header.ExternalFileAttributes = data.ReadUInt32();
header.RelativeOffsetOfLocalHeader = data.ReadUInt32();
if (header.FileNameLength > 0 && data.Position + header.FileNameLength <= data.Length)
{
byte[] filenameBytes = data.ReadBytes(header.FileNameLength);
if (filenameBytes.Length != header.FileNameLength)
return null;
header.FileName = Encoding.ASCII.GetString(filenameBytes);
}
if (header.ExtraFieldLength > 0 && data.Position + header.ExtraFieldLength <= data.Length)
{
byte[] extraBytes = data.ReadBytes(header.ExtraFieldLength);
if (extraBytes.Length != header.ExtraFieldLength)
return null;
header.ExtraField = extraBytes;
}
if (header.FileCommentLength > 0 && data.Position + header.FileCommentLength <= data.Length)
{
byte[] commentBytes = data.ReadBytes(header.FileCommentLength);
if (commentBytes.Length != header.FileCommentLength)
return null;
header.FileComment = Encoding.ASCII.GetString(commentBytes);
}
return header;
}
/// <summary>
/// Search for the archive extra data record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <param name="centralDirectoryoffset">Offset to the first central directory record</param>
/// <returns>Position of the archive extra data record, -1 on error</returns>
public static long SearchForArchiveExtraDataRecord(Stream data, long centralDirectoryoffset)
{
// Cache the current offset
long current = data.Position;
// Seek to the minimum size of the record from the central directory
data.Seek(centralDirectoryoffset - 8, SeekOrigin.Begin);
// Attempt to find the end of central directory signature
while (data.Position > 0)
{
// Read the potential signature
uint possibleSignature = data.ReadUInt32();
if (possibleSignature == ArchiveExtraDataRecordSignature)
{
long signaturePosition = data.Position - 4;
data.Seek(current, SeekOrigin.Begin);
return signaturePosition;
}
// If we find any other signature
switch (possibleSignature)
{
case CentralDirectoryFileHeaderSignature:
case DataDescriptorSignature:
case DigitalSignatureSignature:
case EndOfCentralDirectoryLocator64Signature:
case EndOfCentralDirectoryRecordSignature:
case EndOfCentralDirectoryRecord64Signature:
case LocalFileHeaderSignature:
data.Seek(current, SeekOrigin.Begin);
return -1;
}
// Seek backward 5 bytes, if possible
data.Seek(-5, SeekOrigin.Current);
}
// No signature was found
data.Seek(current, SeekOrigin.Begin);
return -1;
}
/// <summary>
/// Parse a Stream into an archive extra data record
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled archive extra data record on success, null on error</returns>
public static ArchiveExtraDataRecord? ParseArchiveExtraDataRecord(Stream data)
{
// TODO: Use marshalling here instead of building
var record = new ArchiveExtraDataRecord();
record.Signature = data.ReadUInt32();
if (record.Signature != ArchiveExtraDataRecordSignature)
return null;
record.ExtraFieldLength = data.ReadUInt32();
if (record.ExtraFieldLength > 0 && data.Position + record.ExtraFieldLength <= data.Length)
{
byte[] extraBytes = data.ReadBytes((int)record.ExtraFieldLength);
if (extraBytes.Length != record.ExtraFieldLength)
return null;
record.ExtraFieldData = extraBytes;
}
return record;
}
/// <summary>
/// Parse a Stream into a local file header
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled local file header on success, null on error</returns>
public static LocalFileHeader? ParseLocalFileHeader(Stream data)
{
// TODO: Use marshalling here instead of building
var header = new LocalFileHeader();
header.Signature = data.ReadUInt32();
if (header.Signature != LocalFileHeaderSignature)
return null;
header.Version = data.ReadUInt16();
header.Flags = (GeneralPurposeBitFlags)data.ReadUInt16();
header.CompressionMethod = (CompressionMethod)data.ReadUInt16();
header.LastModifedFileTime = data.ReadUInt16();
header.LastModifiedFileDate = data.ReadUInt16();
header.CRC32 = data.ReadUInt32();
header.CompressedSize = data.ReadUInt32();
header.UncompressedSize = data.ReadUInt32();
header.FileNameLength = data.ReadUInt16();
header.ExtraFieldLength = data.ReadUInt16();
if (header.FileNameLength > 0 && data.Position + header.FileNameLength <= data.Length)
{
byte[] filenameBytes = data.ReadBytes(header.FileNameLength);
if (filenameBytes.Length != header.FileNameLength)
return null;
header.FileName = Encoding.ASCII.GetString(filenameBytes);
}
if (header.ExtraFieldLength > 0 && data.Position + header.ExtraFieldLength <= data.Length)
{
byte[] extraBytes = data.ReadBytes(header.ExtraFieldLength);
if (extraBytes.Length != header.ExtraFieldLength)
return null;
header.ExtraField = extraBytes;
}
return header;
}
/// <summary>
/// Parse a Stream into a data descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled data descriptor on success, null on error</returns>
public static DataDescriptor? ParseDataDescriptor(Stream data)
{
// TODO: Use marshalling here instead of building
var dataDescriptor = new DataDescriptor();
// Signatures are expected but not required
dataDescriptor.Signature = data.ReadUInt32();
if (dataDescriptor.Signature != DataDescriptorSignature)
data.Seek(-4, SeekOrigin.Current);
dataDescriptor.CRC32 = data.ReadUInt32();
dataDescriptor.CompressedSize = data.ReadUInt32();
dataDescriptor.UncompressedSize = data.ReadUInt32();
return dataDescriptor;
}
/// <summary>
/// Parse a Stream into a ZIP64 data descriptor
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ZIP64 data descriptor on success, null on error</returns>
public static DataDescriptor64? ParseDataDescriptor64(Stream data)
{
// TODO: Use marshalling here instead of building
var zip64DataDescriptor = new DataDescriptor64();
// Signatures are expected but not required
zip64DataDescriptor.Signature = data.ReadUInt32();
if (zip64DataDescriptor.Signature != DataDescriptorSignature)
data.Seek(-4, SeekOrigin.Current);
zip64DataDescriptor.CRC32 = data.ReadUInt32();
zip64DataDescriptor.CompressedSize = data.ReadUInt64();
zip64DataDescriptor.UncompressedSize = data.ReadUInt64();
return zip64DataDescriptor;
}
}
}

View File

@@ -495,7 +495,7 @@ namespace SabreTools.Serialization.Deserializers
entry.Value = data.ReadUInt32();
entry.SectionNumber = data.ReadUInt16();
entry.SymbolType = (SymbolType)data.ReadUInt16();
entry.StorageClass = (StorageClass)data.ReadByte();
entry.StorageClass = (StorageClass)data.ReadByteValue();
entry.NumberOfAuxSymbols = data.ReadByteValue();
coffSymbolTable[i] = entry;
@@ -668,7 +668,7 @@ namespace SabreTools.Serialization.Deserializers
entry.CertificateType = (WindowsCertificateType)data.ReadUInt16();
int certificateDataLength = (int)(entry.Length - 8);
if (certificateDataLength > 0)
if (certificateDataLength > 0 && data.Position + certificateDataLength <= data.Length)
entry.Certificate = data.ReadBytes(certificateDataLength);
attributeCertificateTable.Add(entry);
@@ -703,7 +703,7 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Use marshalling here instead of building
var baseRelocationTable = new List<BaseRelocationBlock>();
while (data.Position < endOffset)
while (data.Position < endOffset && data.Position < data.Length)
{
var baseRelocationBlock = new BaseRelocationBlock();
@@ -724,12 +724,12 @@ namespace SabreTools.Serialization.Deserializers
totalSize += 2;
}
baseRelocationBlock.TypeOffsetFieldEntries = typeOffsetFieldEntries.ToArray();
baseRelocationBlock.TypeOffsetFieldEntries = [.. typeOffsetFieldEntries];
baseRelocationTable.Add(baseRelocationBlock);
}
return baseRelocationTable.ToArray();
return [.. baseRelocationTable];
}
/// <summary>
@@ -745,7 +745,7 @@ namespace SabreTools.Serialization.Deserializers
var debugDirectoryTable = new List<DebugDirectoryEntry>();
while (data.Position < endOffset)
while (data.Position < endOffset && data.Position < data.Length)
{
var debugDirectoryEntry = data.ReadType<DebugDirectoryEntry>();
if (debugDirectoryEntry == null)
@@ -1164,7 +1164,7 @@ namespace SabreTools.Serialization.Deserializers
var resourceDirectoryString = new ResourceDirectoryString();
resourceDirectoryString.Length = data.ReadUInt16();
if (resourceDirectoryString.Length > 0)
if (resourceDirectoryString.Length > 0 && data.Position + resourceDirectoryString.Length <= data.Length)
resourceDirectoryString.UnicodeString = data.ReadBytes(resourceDirectoryString.Length * 2);
entry.Name = resourceDirectoryString;
@@ -1194,7 +1194,7 @@ namespace SabreTools.Serialization.Deserializers
// Read the data from the offset
offset = resourceDataEntry.DataRVA.ConvertVirtualAddress(sections);
if (offset > 0 && resourceDataEntry.Size > 0)
if (offset > 0 && resourceDataEntry.Size > 0 && offset + (int)resourceDataEntry.Size < data.Length)
{
data.Seek(offset, SeekOrigin.Begin);
resourceDataEntry.Data = data.ReadBytes((int)resourceDataEntry.Size);
@@ -1246,7 +1246,7 @@ namespace SabreTools.Serialization.Deserializers
resourceDirectoryTable.Entries[totalEntryCount] = new ResourceDirectoryEntry
{
Name = new ResourceDirectoryString { UnicodeString = Encoding.ASCII.GetBytes("HIDDEN RESOURCE") },
Name = new ResourceDirectoryString { UnicodeString = Encoding.Unicode.GetBytes("HIDDEN RESOURCE") },
IntegerID = uint.MaxValue,
DataEntryOffset = (uint)data.Position,
DataEntry = new ResourceDataEntry

View File

@@ -67,7 +67,10 @@ namespace SabreTools.Serialization.Deserializers
#region Archive Hashes
if (header?.Version == 2 && file.ExtendedHeader != null && file.ExtendedHeader.ArchiveHashLength > 0)
if (header?.Version == 2
&& file.ExtendedHeader != null
&& file.ExtendedHeader.ArchiveHashLength > 0
&& data.Position + file.ExtendedHeader.ArchiveHashLength <= data.Length)
{
// Create the archive hashes list
var archiveHashes = new List<ArchiveHash>();
@@ -218,7 +221,9 @@ namespace SabreTools.Serialization.Deserializers
// Get the preload data pointer
long preloadDataPointer = -1; int preloadDataLength = -1;
if (directoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE && directoryEntry.EntryLength > 0)
if (directoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE
&& directoryEntry.EntryLength > 0
&& data.Position + directoryEntry.EntryLength <= data.Length)
{
preloadDataPointer = directoryEntry.EntryOffset;
preloadDataLength = (int)directoryEntry.EntryLength;
@@ -231,7 +236,9 @@ namespace SabreTools.Serialization.Deserializers
// If we had a valid preload data pointer
byte[]? preloadData = null;
if (preloadDataPointer >= 0 && preloadDataLength > 0)
if (preloadDataPointer >= 0
&& preloadDataLength > 0
&& data.Position + preloadDataLength <= data.Length)
{
// Cache the current offset
long initialOffset = data.Position;

View File

@@ -9,13 +9,8 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="entry">Resource type information entry to check</param>
/// <returns>True if the entry is an integer type, false if an offset, null on error</returns>
public static bool? IsIntegerType(this ResourceTypeInformationEntry entry)
public static bool IsIntegerType(this ResourceTypeInformationEntry entry)
{
// We can't do anything with an invalid entry
if (entry == null)
return null;
// If the highest order bit is set, it's an integer type
return (entry.TypeID & 0x8000) != 0;
}
@@ -24,13 +19,8 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="entry">Resource type resource entry to check</param>
/// <returns>True if the entry is an integer type, false if an offset, null on error</returns>
public static bool? IsIntegerType(this ResourceTypeResourceEntry entry)
public static bool IsIntegerType(this ResourceTypeResourceEntry entry)
{
// We can't do anything with an invalid entry
if (entry == null)
return null;
// If the highest order bit is set, it's an integer type
return (entry.ResourceID & 0x8000) != 0;
}
@@ -41,10 +31,6 @@ namespace SabreTools.Serialization
/// <returns>SegmentEntryType corresponding to the type</returns>
public static SegmentEntryType GetEntryType(this EntryTableBundle entry)
{
// We can't do anything with an invalid entry
if (entry == null)
return SegmentEntryType.Unused;
// Determine the entry type based on segment indicator
if (entry.SegmentIndicator == 0x00)
return SegmentEntryType.Unused;

View File

@@ -127,6 +127,15 @@ namespace SabreTools.Serialization
if (rsdsProgramDatabase.Signature != 0x53445352)
return null;
#if NET20 || NET35 || NET40 || NET452 || NET462
// Convert ASCII string to UTF-8
if (rsdsProgramDatabase.PathAndFileName != null)
{
byte[] bytes = Encoding.ASCII.GetBytes(rsdsProgramDatabase.PathAndFileName);
rsdsProgramDatabase.PathAndFileName = Encoding.UTF8.GetString(bytes);
}
#endif
return rsdsProgramDatabase;
}
@@ -595,7 +604,11 @@ namespace SabreTools.Serialization
#region Class resource
currentOffset = offset;
ushort classResourceIdentifier = entry.Data.ReadUInt16(ref offset);
ushort classResourceIdentifier;
if (offset >= entry.Data.Length)
classResourceIdentifier = 0x0000;
else
classResourceIdentifier = entry.Data.ReadUInt16(ref offset);
offset = currentOffset;
// 0x0000 means no elements
@@ -631,7 +644,11 @@ namespace SabreTools.Serialization
#region Title resource
currentOffset = offset;
ushort titleResourceIdentifier = entry.Data.ReadUInt16(ref offset);
ushort titleResourceIdentifier;
if (offset >= entry.Data.Length)
titleResourceIdentifier = 0x0000;
else
titleResourceIdentifier = entry.Data.ReadUInt16(ref offset);
offset = currentOffset;
// 0x0000 means no elements
@@ -891,7 +908,7 @@ namespace SabreTools.Serialization
if (menuHeaderExtended == null)
return null;
menuResource.ExtendedMenuHeader = menuHeaderExtended;
menuResource.MenuHeader = menuHeaderExtended;
#endregion
@@ -920,7 +937,7 @@ namespace SabreTools.Serialization
}
}
menuResource.ExtendedMenuItems = [.. extendedMenuItems];
menuResource.MenuItems = [.. extendedMenuItems];
#endregion
}
@@ -928,7 +945,7 @@ namespace SabreTools.Serialization
{
#region Menu header
var menuHeader = entry.Data.ReadType<MenuHeader>(ref offset);
var menuHeader = entry.Data.ReadType<NormalMenuHeader>(ref offset);
if (menuHeader == null)
return null;
@@ -1067,21 +1084,9 @@ namespace SabreTools.Serialization
// Loop through and add
while (offset < entry.Data.Length)
{
ushort stringLength = entry.Data.ReadUInt16(ref offset);
if (stringLength == 0)
string? stringValue = entry.Data.ReadPrefixedUnicodeString(ref offset);
if (stringValue != null)
{
stringTable[stringIndex++] = "[EMPTY]";
}
else
{
if (stringLength * 2 > entry.Data.Length - offset)
{
Console.WriteLine($"{stringLength * 2} requested but {entry.Data.Length - offset} remains");
stringLength = (ushort)((entry.Data.Length - offset) / 2);
}
string stringValue = Encoding.Unicode.GetString(entry.Data, offset, stringLength * 2);
offset += stringLength * 2;
stringValue = stringValue.Replace("\n", "\\n").Replace("\r", newValue: "\\r");
stringTable[stringIndex++] = stringValue;
}
@@ -1118,7 +1123,7 @@ namespace SabreTools.Serialization
versionInfo.Padding1 = entry.Data.ReadUInt16(ref offset);
// Read fixed file info
if (versionInfo.ValueLength > 0)
if (versionInfo.ValueLength > 0 && offset + versionInfo.ValueLength <= entry.Data.Length)
{
var fixedFileInfo = entry.Data.ReadType<FixedFileInfo>(ref offset);

View File

@@ -11,7 +11,7 @@ namespace SabreTools.Serialization
/// </summary>
/// <param name="path">Path to open as a stream</param>
/// <returns>Stream representing the file, null on error</returns>
public static Stream? OpenStream(string? path)
public static Stream? OpenStream(string? path, bool skipCompression = false)
{
try
{
@@ -26,11 +26,11 @@ namespace SabreTools.Serialization
string ext = Path.GetExtension(path).TrimStart('.');
// Determine what we do based on the extension
if (string.Equals(ext, "gz", StringComparison.OrdinalIgnoreCase))
if (!skipCompression && string.Equals(ext, "gz", StringComparison.OrdinalIgnoreCase))
{
return new GZipStream(stream, CompressionMode.Decompress);
}
else if (string.Equals(ext, "zip", StringComparison.OrdinalIgnoreCase))
else if (!skipCompression && string.Equals(ext, "zip", StringComparison.OrdinalIgnoreCase))
{
// TODO: Support zip-compressed files
return null;

View File

@@ -39,6 +39,7 @@ namespace SabreTools.Serialization
Wrapper.BFPK item => item.PrettyPrint(),
Wrapper.BSP item => item.PrettyPrint(),
Wrapper.CFB item => item.PrettyPrint(),
Wrapper.CHD item => item.PrettyPrint(),
Wrapper.CIA item => item.PrettyPrint(),
Wrapper.GCF item => item.PrettyPrint(),
Wrapper.InstallShieldCabinet item => item.PrettyPrint(),
@@ -54,6 +55,7 @@ namespace SabreTools.Serialization
Wrapper.PAK item => item.PrettyPrint(),
Wrapper.PFF item => item.PrettyPrint(),
Wrapper.PIC item => item.PrettyPrint(),
Wrapper.PKZIP item => item.PrettyPrint(),
Wrapper.PlayJAudioFile item => item.PrettyPrint(),
Wrapper.PlayJPlaylist item => item.PrettyPrint(),
Wrapper.PortableExecutable item => item.PrettyPrint(),
@@ -82,6 +84,7 @@ namespace SabreTools.Serialization
Wrapper.BFPK item => item.ExportJSON(),
Wrapper.BSP item => item.ExportJSON(),
Wrapper.CFB item => item.ExportJSON(),
Wrapper.CHD item => item.ExportJSON(),
Wrapper.CIA item => item.ExportJSON(),
Wrapper.GCF item => item.ExportJSON(),
Wrapper.InstallShieldCabinet item => item.ExportJSON(),
@@ -97,6 +100,7 @@ namespace SabreTools.Serialization
Wrapper.PAK item => item.ExportJSON(),
Wrapper.PFF item => item.ExportJSON(),
Wrapper.PIC item => item.ExportJSON(),
Wrapper.PKZIP item => item.ExportJSON(),
Wrapper.PlayJAudioFile item => item.ExportJSON(),
Wrapper.PlayJPlaylist item => item.ExportJSON(),
Wrapper.PortableExecutable item => item.ExportJSON(),
@@ -165,6 +169,16 @@ namespace SabreTools.Serialization
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.CHD item)
{
var builder = new StringBuilder();
CHD.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
@@ -314,6 +328,16 @@ namespace SabreTools.Serialization
PIC.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.PKZIP item)
{
var builder = new StringBuilder();
PKZIP.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text

View File

@@ -0,0 +1,161 @@
using System;
using System.Collections.Generic;
using System.Text;
using SabreTools.Models.CHD;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class CHD : IPrinter<Header>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, Header model)
=> Print(builder, model);
public static void Print(StringBuilder builder, Header header)
{
builder.AppendLine("CHD Header Information:");
builder.AppendLine("-------------------------");
if (header == null)
{
builder.AppendLine("No header");
builder.AppendLine();
return;
}
switch (header)
{
case HeaderV1 v1:
Print(builder, v1);
break;
case HeaderV2 v2:
Print(builder, v2);
break;
case HeaderV3 v3:
Print(builder, v3);
break;
case HeaderV4 v4:
Print(builder, v4);
break;
case HeaderV5 v5:
Print(builder, v5);
break;
default:
builder.AppendLine("Unrecognized header type");
builder.AppendLine();
break;
}
}
private static void Print(StringBuilder builder, HeaderV1 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.HunkSize, $"Hunk size");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.Cylinders, $"Cylinders");
builder.AppendLine(header.Heads, $"Heads");
builder.AppendLine(header.Sectors, $"Sectors");
builder.AppendLine(header.MD5, $"MD5");
builder.AppendLine(header.ParentMD5, $"Parent MD5");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV2 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.HunkSize, $"Hunk size");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.Cylinders, $"Cylinders");
builder.AppendLine(header.Heads, $"Heads");
builder.AppendLine(header.Sectors, $"Sectors");
builder.AppendLine(header.MD5, $"MD5");
builder.AppendLine(header.ParentMD5, $"Parent MD5");
builder.AppendLine(header.BytesPerSector, $"Bytes per sector");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV3 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
builder.AppendLine(header.MetaOffset, $"Meta offset");
builder.AppendLine(header.MD5, $"MD5");
builder.AppendLine(header.ParentMD5, $"Parent MD5");
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
builder.AppendLine(header.SHA1, $"SHA-1");
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV4 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
builder.AppendLine(header.MetaOffset, $"Meta offset");
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
builder.AppendLine(header.SHA1, $"SHA-1");
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
builder.AppendLine(header.RawSHA1, $"Raw SHA-1");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV5 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
// TODO: Remove this hack when actual compressor names are supported
// builder.AppendLine(header.Compressors, $"Compressors");
string compressorsLine = "Compressors: ";
if (header.Compressors == null)
{
compressorsLine += "[NULL]";
}
else
{
var compressors = new List<string>();
for (int i = 0; i < header.Compressors.Length; i++)
{
uint compressor = header.Compressors[i];
byte[] compressorBytes = BitConverter.GetBytes(compressor);
Array.Reverse(compressorBytes);
string compressorString = Encoding.ASCII.GetString(compressorBytes);
compressors.Add(compressorString);
}
compressorsLine += string.Join(", ", [.. compressors]);
}
builder.AppendLine(compressorsLine);
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
builder.AppendLine(header.MapOffset, $"Map offset");
builder.AppendLine(header.MetaOffset, $"Meta offset");
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
builder.AppendLine(header.UnitBytes, $"Unit bytes");
builder.AppendLine(header.RawSHA1, $"Raw SHA-1");
builder.AppendLine(header.SHA1, $"SHA-1");
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
builder.AppendLine();
}
}
}

View File

@@ -315,27 +315,19 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(fileGroup.NameOffset, " Name offset");
builder.AppendLine(fileGroup.Name, " Name");
builder.AppendLine(fileGroup.ExpandedSize, " Expanded size");
builder.AppendLine(fileGroup.Reserved0, " Reserved 0");
builder.AppendLine(fileGroup.CompressedSize, " Compressed size");
builder.AppendLine(fileGroup.Reserved1, " Reserved 1");
builder.AppendLine(fileGroup.Reserved2, " Reserved 2");
builder.AppendLine(fileGroup.Attribute1, " Attribute 1");
builder.AppendLine(fileGroup.Attribute2, " Attribute 2");
builder.AppendLine($" Attributes: {fileGroup.Attributes} (0x{fileGroup.Attributes:X})");
builder.AppendLine(fileGroup.FirstFile, " First file");
builder.AppendLine(fileGroup.LastFile, " Last file");
builder.AppendLine(fileGroup.UnknownOffset, " Unknown offset");
builder.AppendLine(fileGroup.Var4Offset, " Var 4 offset");
builder.AppendLine(fileGroup.Var1Offset, " Var 1 offset");
builder.AppendLine(fileGroup.UnknownStringOffset, " Unknown string offset");
builder.AppendLine(fileGroup.OperatingSystemOffset, " Operating system offset");
builder.AppendLine(fileGroup.LanguageOffset, " Language offset");
builder.AppendLine(fileGroup.HTTPLocationOffset, " HTTP location offset");
builder.AppendLine(fileGroup.FTPLocationOffset, " FTP location offset");
builder.AppendLine(fileGroup.MiscOffset, " Misc. offset");
builder.AppendLine(fileGroup.Var2Offset, " Var 2 offset");
builder.AppendLine(fileGroup.TargetDirectoryOffset, " Target directory offset");
builder.AppendLine(fileGroup.Reserved3, " Reserved 3");
builder.AppendLine(fileGroup.Reserved4, " Reserved 4");
builder.AppendLine(fileGroup.Reserved5, " Reserved 5");
builder.AppendLine(fileGroup.Reserved6, " Reserved 6");
builder.AppendLine(fileGroup.Reserved7, " Reserved 7");
builder.AppendLine($" Overwrite flags: {fileGroup.OverwriteFlags} (0x{fileGroup.OverwriteFlags:X})");
builder.AppendLine(fileGroup.Reserved, " Reserved");
}
builder.AppendLine();
}
@@ -366,16 +358,16 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(component.DescriptorOffset, " Descriptor offset");
builder.AppendLine(component.DisplayNameOffset, " Display name offset");
builder.AppendLine(component.DisplayName, " Display name");
builder.AppendLine(component.Reserved0, " Reserved 0");
builder.AppendLine(component.ReservedOffset0, " Reserved offset 0");
builder.AppendLine(component.ReservedOffset1, " Reserved offset 1");
builder.AppendLine($" Status: {component.Status} (0x{component.Status:X})");
builder.AppendLine(component.PasswordOffset, " Password offset");
builder.AppendLine(component.MiscOffset, " Misc. offset");
builder.AppendLine(component.ComponentIndex, " Component index");
builder.AppendLine(component.NameOffset, " Name offset");
builder.AppendLine(component.Name, " Name");
builder.AppendLine(component.ReservedOffset2, " Reserved offset 2");
builder.AppendLine(component.ReservedOffset3, " Reserved offset 3");
builder.AppendLine(component.ReservedOffset4, " Reserved offset 4");
builder.AppendLine(component.Reserved1, " Reserved 1");
builder.AppendLine(component.CDRomFolderOffset, " CD-ROM folder offset");
builder.AppendLine(component.HTTPLocationOffset, " HTTP location offset");
builder.AppendLine(component.FTPLocationOffset, " FTP location offset");
builder.AppendLine(component.Guid, " GUIDs");
builder.AppendLine(component.CLSIDOffset, " CLSID offset");
builder.AppendLine(component.CLSID, " CLSID");
builder.AppendLine(component.Reserved2, " Reserved 2");
@@ -406,10 +398,10 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(component.SubComponentsCount, " Sub-components count");
builder.AppendLine(component.SubComponentsOffset, " Sub-components offset");
builder.AppendLine(component.NextComponentOffset, " Next component offset");
builder.AppendLine(component.ReservedOffset5, " Reserved offset 5");
builder.AppendLine(component.ReservedOffset6, " Reserved offset 6");
builder.AppendLine(component.ReservedOffset7, " Reserved offset 7");
builder.AppendLine(component.ReservedOffset8, " Reserved offset 8");
builder.AppendLine(component.OnInstallingOffset, " On installing offset");
builder.AppendLine(component.OnInstalledOffset, " On installed offset");
builder.AppendLine(component.OnUninstallingOffset, " On uninstalling offset");
builder.AppendLine(component.OnUninstalledOffset, " On uninstalled offset");
}
builder.AppendLine();
}

View File

@@ -131,6 +131,8 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(header.CVerVersionNumber, " Version number of CVer in included update partition");
builder.AppendLine(header.Reserved4, " Reserved 4");
builder.AppendLine();
Print(builder, header.InitialData);
}
private static void Print(StringBuilder builder, DevelopmentCardInfoHeader? header)
@@ -227,6 +229,96 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine();
}
private static void Print(StringBuilder builder, InitialData? id)
{
builder.AppendLine(" Initial Data Information:");
builder.AppendLine(" -------------------------");
if (id == null)
{
builder.AppendLine(" No initial data");
builder.AppendLine();
return;
}
builder.AppendLine(id.CardSeedKeyY, " Card seed KeyY");
builder.AppendLine(id.EncryptedCardSeed, " Encrypted card seed");
builder.AppendLine(id.CardSeedAESMAC, " Card seed AES-MAC");
builder.AppendLine(id.CardSeedNonce, " Card seed nonce");
builder.AppendLine(id.Reserved, " Reserved");
builder.AppendLine();
PrintBackup(builder, id.BackupHeader);
}
private static void PrintBackup(StringBuilder builder, NCCHHeader? header)
{
builder.AppendLine(" Backup NCCH Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No backup NCCH header");
builder.AppendLine();
return;
}
if (header.MagicID == string.Empty)
{
builder.AppendLine(" Empty backup header, no data can be parsed");
}
else if (header.MagicID != Constants.NCCHMagicNumber)
{
builder.AppendLine(" Unrecognized backup header, no data can be parsed");
}
else
{
// Backup header omits RSA signature
builder.AppendLine(header.MagicID, " Magic ID");
builder.AppendLine(header.ContentSizeInMediaUnits, " Content size in media units");
builder.AppendLine(header.PartitionId, " Partition ID");
builder.AppendLine(header.MakerCode, " Maker code");
builder.AppendLine(header.Version, " Version");
builder.AppendLine(header.VerificationHash, " Verification hash");
builder.AppendLine(header.ProgramId, " Program ID");
builder.AppendLine(header.Reserved1, " Reserved 1");
builder.AppendLine(header.LogoRegionHash, " Logo region SHA-256 hash");
builder.AppendLine(header.ProductCode, " Product code");
builder.AppendLine(header.ExtendedHeaderHash, " Extended header SHA-256 hash");
builder.AppendLine(header.ExtendedHeaderSizeInBytes, " Extended header size in bytes");
builder.AppendLine(header.Reserved2, " Reserved 2");
builder.AppendLine(" Flags:");
if (header.Flags == null)
{
builder.AppendLine(" [NULL]");
}
else
{
builder.AppendLine(header.Flags.Reserved0, " Reserved 0");
builder.AppendLine(header.Flags.Reserved1, " Reserved 1");
builder.AppendLine(header.Flags.Reserved2, " Reserved 2");
builder.AppendLine($" Crypto method: {header.Flags.CryptoMethod} (0x{header.Flags.CryptoMethod:X})");
builder.AppendLine($" Content platform: {header.Flags.ContentPlatform} (0x{header.Flags.ContentPlatform:X})");
builder.AppendLine($" Content type: {header.Flags.MediaPlatformIndex} (0x{header.Flags.MediaPlatformIndex:X})");
builder.AppendLine(header.Flags.ContentUnitSize, " Content unit size");
builder.AppendLine($" Bitmasks: {header.Flags.BitMasks} (0x{header.Flags.BitMasks:X})");
}
builder.AppendLine(header.PlainRegionOffsetInMediaUnits, " Plain region offset, in media units");
builder.AppendLine(header.PlainRegionSizeInMediaUnits, " Plain region size, in media units");
builder.AppendLine(header.LogoRegionOffsetInMediaUnits, " Logo region offset, in media units");
builder.AppendLine(header.LogoRegionSizeInMediaUnits, " Logo region size, in media units");
builder.AppendLine(header.ExeFSOffsetInMediaUnits, " ExeFS offset, in media units");
builder.AppendLine(header.ExeFSSizeInMediaUnits, " ExeFS size, in media units");
builder.AppendLine(header.ExeFSHashRegionSizeInMediaUnits, " ExeFS hash region size, in media units");
builder.AppendLine(header.Reserved3, " Reserved 3");
builder.AppendLine(header.RomFSOffsetInMediaUnits, " RomFS offset, in media units");
builder.AppendLine(header.RomFSSizeInMediaUnits, " RomFS size, in media units");
builder.AppendLine(header.RomFSHashRegionSizeInMediaUnits, " RomFS hash region size, in media units");
builder.AppendLine(header.Reserved4, " Reserved 4");
builder.AppendLine(header.ExeFSSuperblockHash, " ExeFS superblock SHA-256 hash");
builder.AppendLine(header.RomFSSuperblockHash, " RomFS superblock SHA-256 hash");
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, NCCHHeader?[]? entries)
{
builder.AppendLine(" NCCH Partition Header Information:");
@@ -455,7 +547,7 @@ namespace SabreTools.Serialization.Printers
}
else
{
builder.AppendLine(entry.ACI.ARM9AccessControl.Descriptors, " Descriptors");
builder.AppendLine($" Descriptors: {entry.ACI.ARM9AccessControl.Descriptors} (0x{entry.ACI.ARM9AccessControl.Descriptors:X})");
builder.AppendLine(entry.ACI.ARM9AccessControl.DescriptorVersion, " Descriptor version");
}
@@ -523,7 +615,7 @@ namespace SabreTools.Serialization.Printers
}
else
{
builder.AppendLine(entry.ACIForLimitations.ARM9AccessControl.Descriptors, " Descriptors");
builder.AppendLine($" Descriptors: {entry.ACIForLimitations.ARM9AccessControl.Descriptors} (0x{entry.ACIForLimitations.ARM9AccessControl.Descriptors:X})");
builder.AppendLine(entry.ACIForLimitations.ARM9AccessControl.DescriptorVersion, " Descriptor version");
}
}

View File

@@ -188,7 +188,7 @@ namespace SabreTools.Serialization.Printers
{
// TODO: If not integer type, print out name
var resource = entry.Resources[j];
builder.AppendLine($" Resource Entry {i}");
builder.AppendLine($" Resource Entry {j}");
if (resource == null)
{
builder.AppendLine(" [NULL]");

View File

@@ -0,0 +1,279 @@
using System.Text;
using SabreTools.Models.PKZIP;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class PKZIP : IPrinter<Archive>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, Archive model)
=> Print(builder, model);
public static void Print(StringBuilder builder, Archive archive)
{
builder.AppendLine("PKZIP Archive (or Derived Format) Information:");
builder.AppendLine("-------------------------");
builder.AppendLine();
Print(builder, archive.EndOfCentralDirectoryRecord);
Print(builder, archive.ZIP64EndOfCentralDirectoryLocator);
Print(builder, archive.ZIP64EndOfCentralDirectoryRecord);
Print(builder, archive.CentralDirectoryHeaders);
Print(builder, archive.ArchiveExtraDataRecord);
Print(builder,
archive.LocalFileHeaders,
archive.EncryptionHeaders,
archive.FileData,
archive.DataDescriptors,
archive.ZIP64DataDescriptors);
}
private static void Print(StringBuilder builder, EndOfCentralDirectoryRecord? record)
{
builder.AppendLine(" End of Central Directory Record Information:");
builder.AppendLine(" -------------------------");
if (record == null)
{
builder.AppendLine(" No end of central directory record");
builder.AppendLine();
return;
}
builder.AppendLine(record.Signature, " Signature");
builder.AppendLine(record.DiskNumber, " Disk number");
builder.AppendLine(record.StartDiskNumber, " Start disk number");
builder.AppendLine(record.TotalEntriesOnDisk, " Total entries on disk");
builder.AppendLine(record.TotalEntries, " Total entries");
builder.AppendLine(record.CentralDirectorySize, " Central directory size");
builder.AppendLine(record.CentralDirectoryOffset, " Central directory offset");
builder.AppendLine(record.FileCommentLength, " File comment length");
builder.AppendLine(record.FileComment, " File comment");
builder.AppendLine();
}
private static void Print(StringBuilder builder, EndOfCentralDirectoryLocator64? locator)
{
builder.AppendLine(" ZIP64 End of Central Directory Locator Information:");
builder.AppendLine(" -------------------------");
if (locator == null)
{
builder.AppendLine(" No ZIP64 end of central directory locator");
builder.AppendLine();
return;
}
builder.AppendLine(locator.Signature, " Signature");
builder.AppendLine(locator.StartDiskNumber, " Start disk number");
builder.AppendLine(locator.CentralDirectoryOffset, " Central directory offset");
builder.AppendLine(locator.TotalDisks, " Total disks");
builder.AppendLine();
}
private static void Print(StringBuilder builder, EndOfCentralDirectoryRecord64? record)
{
builder.AppendLine(" ZIP64 End of Central Directory Record Information:");
builder.AppendLine(" -------------------------");
if (record == null)
{
builder.AppendLine(" No ZIP64 end of central directory record");
builder.AppendLine();
return;
}
builder.AppendLine(record.Signature, " Signature");
builder.AppendLine(record.DirectoryRecordSize, " Directory record size");
builder.AppendLine($" Host system: {record.HostSystem} (0x{record.HostSystem:X})");
builder.AppendLine(record.VersionMadeBy, " Version made by");
builder.AppendLine(record.VersionNeededToExtract, " Version needed to extract");
builder.AppendLine(record.DiskNumber, " Disk number");
builder.AppendLine(record.StartDiskNumber, " Start disk number");
builder.AppendLine(record.TotalEntriesOnDisk, " Total entries on disk");
builder.AppendLine(record.TotalEntries, " Total entries");
builder.AppendLine(record.CentralDirectorySize, " Central directory size");
builder.AppendLine(record.CentralDirectoryOffset, " Central directory offset");
//builder.AppendLine(record.ExtensibleDataSector, " Extensible data sector");
builder.AppendLine();
}
private static void Print(StringBuilder builder, CentralDirectoryFileHeader?[]? headers)
{
builder.AppendLine(" Central Directory File Headers Information:");
builder.AppendLine(" -------------------------");
if (headers == null || headers.Length == 0)
{
builder.AppendLine(" No central directory file headers");
builder.AppendLine();
return;
}
for (int i = 0; i < headers.Length; i++)
{
var record = headers[i];
Print(builder, record, i);
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, CentralDirectoryFileHeader? header, int index)
{
builder.AppendLine($" Central Directory File Header Entry {index}");
if (header == null)
{
builder.AppendLine(" [NULL]");
return;
}
builder.AppendLine(header.Signature, " Signature");
builder.AppendLine($" Host system: {header.HostSystem} (0x{header.HostSystem:X})");
builder.AppendLine(header.VersionMadeBy, " Version made by");
builder.AppendLine(header.VersionNeededToExtract, " Version needed to extract");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression method: {header.CompressionMethod} (0x{header.CompressionMethod:X})");
builder.AppendLine(header.LastModifedFileTime, " Last modified file time"); // TODO: Parse from MS-DOS
builder.AppendLine(header.LastModifiedFileDate, " Last modified file date"); // TODO: Parse from MS-DOS
builder.AppendLine(header.CRC32, " CRC-32");
builder.AppendLine(header.CompressedSize, " Compressed size");
builder.AppendLine(header.UncompressedSize, " Uncompressed size");
builder.AppendLine(header.FileNameLength, " File name length");
builder.AppendLine(header.ExtraFieldLength, " Extra field length");
builder.AppendLine(header.FileCommentLength, " File comment length");
builder.AppendLine(header.DiskNumberStart, " Disk number start");
builder.AppendLine($" Internal file attributes: {header.InternalFileAttributes} (0x{header.InternalFileAttributes:X})");
builder.AppendLine(header.ExternalFileAttributes, " External file attributes");
builder.AppendLine(header.RelativeOffsetOfLocalHeader, " Relative offset of local header");
builder.AppendLine(header.FileName, " File name");
builder.AppendLine(header.ExtraField, " Extra field");
builder.AppendLine(header.FileComment, " File comment");
}
private static void Print(StringBuilder builder, ArchiveExtraDataRecord? record)
{
builder.AppendLine(" Archive Extra Data Record Information:");
builder.AppendLine(" -------------------------");
if (record == null)
{
builder.AppendLine(" No archive extra data record");
builder.AppendLine();
return;
}
builder.AppendLine(record.Signature, " Signature");
builder.AppendLine(record.ExtraFieldLength, " Extra field length");
builder.AppendLine(record.ExtraFieldData, " Extra field data");
builder.AppendLine();
}
private static void Print(StringBuilder builder,
LocalFileHeader[]? localFileHeaders,
byte[]?[]? encryptionHeaders,
byte[][]? fileData,
DataDescriptor?[]? dataDescriptors,
DataDescriptor64?[]? zip64DataDescriptors)
{
builder.AppendLine(" Local File Information:");
builder.AppendLine(" -------------------------");
if (localFileHeaders == null || localFileHeaders.Length == 0)
{
builder.AppendLine(" No local files");
builder.AppendLine();
return;
}
if (encryptionHeaders == null || localFileHeaders.Length > encryptionHeaders.Length
|| fileData == null || localFileHeaders.Length > fileData.Length
|| dataDescriptors == null || localFileHeaders.Length > dataDescriptors.Length
|| zip64DataDescriptors == null || localFileHeaders.Length > zip64DataDescriptors.Length)
{
builder.AppendLine(" Mismatch in local file array values");
builder.AppendLine();
}
for (int i = 0; i < localFileHeaders.Length; i++)
{
var localFileHeader = localFileHeaders[i];
var encryptionHeader = encryptionHeaders != null && i < encryptionHeaders.Length ? encryptionHeaders[i] : null;
var fileDatum = fileData != null && i < fileData.Length ? fileData[i] : null;
var dataDescriptor = dataDescriptors != null && i < dataDescriptors.Length ? dataDescriptors[i] : null;
var zip64DataDescriptor = zip64DataDescriptors != null && i < zip64DataDescriptors.Length ? zip64DataDescriptors[i] : null;
Print(builder, localFileHeader, encryptionHeader, fileDatum, dataDescriptor, zip64DataDescriptor, i);
}
builder.AppendLine();
}
private static void Print(StringBuilder builder,
LocalFileHeader localFileHeader,
byte[]? encryptionHeader,
byte[]? fileData,
DataDescriptor? dataDescriptor,
DataDescriptor64? zip64DataDescriptor,
int index)
{
builder.AppendLine($" Local File Entry {index}");
if (localFileHeader == null)
{
builder.AppendLine(" [NULL]");
return;
}
builder.AppendLine(localFileHeader.Signature, " [Local File Header] Signature");
builder.AppendLine(localFileHeader.Version, " [Local File Header] Version");
builder.AppendLine($" [Local File Header] Flags: {localFileHeader.Flags} (0x{localFileHeader.Flags:X})");
builder.AppendLine($" [Local File Header] Compression method: {localFileHeader.CompressionMethod} (0x{localFileHeader.CompressionMethod:X})");
builder.AppendLine(localFileHeader.LastModifedFileTime, " [Local File Header] Last modified file time"); // TODO: Parse from MS-DOS
builder.AppendLine(localFileHeader.LastModifiedFileDate, " [Local File Header] Last modified file date"); // TODO: Parse from MS-DOS
builder.AppendLine(localFileHeader.CRC32, " [Local File Header] CRC-32");
builder.AppendLine(localFileHeader.CompressedSize, " [Local File Header] Compressed size");
builder.AppendLine(localFileHeader.UncompressedSize, " [Local File Header] Uncompressed size");
builder.AppendLine(localFileHeader.FileNameLength, " [Local File Header] File name length");
builder.AppendLine(localFileHeader.ExtraFieldLength, " [Local File Header] Extra field length");
builder.AppendLine(localFileHeader.FileName, " [Local File Header] File name");
builder.AppendLine(localFileHeader.ExtraField, " [Local File Header] Extra field");
if (encryptionHeader == null)
{
builder.AppendLine(" [Encryption Header]: [NULL]");
}
else
{
builder.AppendLine(encryptionHeader.Length, " [Encryption Header] Length");
builder.AppendLine(encryptionHeader, " [Encryption Header] Data");
}
if (fileData == null)
{
builder.AppendLine(" [File Data]: [NULL]");
}
else
{
builder.AppendLine(fileData.Length, " [File Data] Length");
//builder.AppendLine(fileData, " [File Data] Data");
}
if (dataDescriptor == null)
{
builder.AppendLine(" [Data Descriptor]: [NULL]");
}
else
{
builder.AppendLine(dataDescriptor.Signature, " [Data Descriptor] Signature");
builder.AppendLine(dataDescriptor.CRC32, " [Data Descriptor] CRC-32");
builder.AppendLine(dataDescriptor.CompressedSize, " [Data Descriptor] Compressed size");
builder.AppendLine(dataDescriptor.UncompressedSize, " [Data Descriptor] Uncompressed size");
}
if (zip64DataDescriptor == null)
{
builder.AppendLine(" [ZIP64 Data Descriptor]: [NULL]");
}
else
{
builder.AppendLine(zip64DataDescriptor.Signature, " [ZIP64 Data Descriptor] Signature");
builder.AppendLine(zip64DataDescriptor.CRC32, " [ZIP64 Data Descriptor] CRC-32");
builder.AppendLine(zip64DataDescriptor.CompressedSize, " [ZIP64 Data Descriptor] Compressed size");
builder.AppendLine(zip64DataDescriptor.UncompressedSize, " [ZIP64 Data Descriptor] Uncompressed size");
}
}
}
}

View File

@@ -254,7 +254,7 @@ namespace SabreTools.Serialization.Printers
}
if (header.DelayImportDescriptor != null)
{
builder.AppendLine(" Delay Import Descriptior (14)");
builder.AppendLine(" Delay Import Descriptor (14)");
builder.AppendLine(header.DelayImportDescriptor.VirtualAddress, " Virtual address");
builder.AppendLine(header.DelayImportDescriptor.VirtualAddress.ConvertVirtualAddress(table), " Physical address");
builder.AppendLine(header.DelayImportDescriptor.Size, " Size");
@@ -964,7 +964,7 @@ namespace SabreTools.Serialization.Printers
var newTypes = new List<object>(types ?? []);
if (entry.Name?.UnicodeString != null)
newTypes.Add(Encoding.UTF8.GetString(entry.Name.UnicodeString));
newTypes.Add(Encoding.Unicode.GetString(entry.Name.UnicodeString));
else
newTypes.Add(entry.IntegerID);
@@ -1120,79 +1120,67 @@ namespace SabreTools.Serialization.Printers
if (menu.MenuHeader != null)
{
builder.AppendLine(menu.MenuHeader.Version, $"{padding}Version");
builder.AppendLine(menu.MenuHeader.HeaderSize, $"{padding}Header size");
if (menu.MenuHeader is NormalMenuHeader normalMenuHeader)
{
builder.AppendLine(normalMenuHeader.Version, $"{padding}Version");
builder.AppendLine(normalMenuHeader.HeaderSize, $"{padding}Header size");
}
else if (menu.MenuHeader is MenuHeaderExtended menuHeaderExtended)
{
builder.AppendLine(menuHeaderExtended.Version, $"{padding}Version");
builder.AppendLine(menuHeaderExtended.Offset, $"{padding}Offset");
builder.AppendLine(menuHeaderExtended.HelpID, $"{padding}Help ID");
}
else
{
builder.AppendLine($"{padding}Menu header found, but malformed");
}
builder.AppendLine();
builder.AppendLine($"{padding}Menu items");
builder.AppendLine($"{padding}-------------------------");
if (menu.MenuItems == null || menu.MenuItems.Length == 0)
{
builder.AppendLine($"{padding}No menu items");
return;
}
for (int i = 0; i < menu.MenuItems.Length; i++)
{
var menuItem = menu.MenuItems[i];
builder.AppendLine($"{padding}Menu item {i}");
if (menuItem == null)
{
builder.AppendLine($"{padding} [NULL]");
continue;
}
if (menuItem is NormalMenuItem normalMenuItem)
{
builder.AppendLine($"{padding} Resource info: {normalMenuItem.NormalResInfo} (0x{normalMenuItem.NormalResInfo:X})");
builder.AppendLine(normalMenuItem.NormalMenuText, $"{padding} Menu text");
}
else if (menuItem is PopupMenuItem popupMenuItem)
{
builder.AppendLine($"{padding} Item type: {popupMenuItem.PopupItemType} (0x{popupMenuItem.PopupItemType:X})");
builder.AppendLine($"{padding} State: {popupMenuItem.PopupState} (0x{popupMenuItem.PopupState:X})");
builder.AppendLine(popupMenuItem.PopupID, $"{padding} ID");
builder.AppendLine($"{padding} Resource info: {popupMenuItem.PopupResInfo} (0x{popupMenuItem.PopupResInfo:X})");
builder.AppendLine(popupMenuItem.PopupMenuText, $"{padding} Menu text");
}
}
}
else if (menu.ExtendedMenuHeader != null)
builder.AppendLine($"{padding}Menu items");
builder.AppendLine($"{padding}-------------------------");
if (menu.MenuItems == null || menu.MenuItems.Length == 0)
{
builder.AppendLine(menu.ExtendedMenuHeader.Version, $"{padding}Version");
builder.AppendLine(menu.ExtendedMenuHeader.Offset, $"{padding}Offset");
builder.AppendLine(menu.ExtendedMenuHeader.HelpID, $"{padding}Help ID");
builder.AppendLine();
builder.AppendLine($"{padding}Menu items");
builder.AppendLine($"{padding}-------------------------");
if (menu.ExtendedMenuHeader.Offset == 0
|| menu.ExtendedMenuItems == null
|| menu.ExtendedMenuItems.Length == 0)
{
builder.AppendLine($"{padding}No menu items");
return;
}
for (int i = 0; i < menu.ExtendedMenuItems.Length; i++)
{
var menuItem = menu.ExtendedMenuItems[i];
builder.AppendLine($"{padding}Menu item {i}");
if (menuItem == null)
{
builder.AppendLine($"{padding} [NULL]");
continue;
}
builder.AppendLine($"{padding} Item type: {menuItem.ItemType} (0x{menuItem.ItemType:X})");
builder.AppendLine($"{padding} State: {menuItem.State} (0x{menuItem.State:X})");
builder.AppendLine(menuItem.ID, $"{padding} ID");
builder.AppendLine($"{padding} Flags: {menuItem.Flags} (0x{menuItem.Flags:X})");
builder.AppendLine(menuItem.MenuText, $"{padding} Menu text");
}
builder.AppendLine($"{padding}No menu items");
return;
}
else
for (int i = 0; i < menu.MenuItems.Length; i++)
{
builder.AppendLine($"{padding}Menu resource found, but malformed");
var menuItem = menu.MenuItems[i];
builder.AppendLine($"{padding}Menu item {i}");
if (menuItem == null)
{
builder.AppendLine($"{padding} [NULL]");
continue;
}
if (menuItem is NormalMenuItem normalMenuItem)
{
builder.AppendLine($"{padding} Resource info: {normalMenuItem.NormalResInfo} (0x{normalMenuItem.NormalResInfo:X})");
builder.AppendLine(normalMenuItem.NormalMenuText, $"{padding} Menu text");
}
else if (menuItem is PopupMenuItem popupMenuItem)
{
builder.AppendLine($"{padding} Item type: {popupMenuItem.PopupItemType} (0x{popupMenuItem.PopupItemType:X})");
builder.AppendLine($"{padding} State: {popupMenuItem.PopupState} (0x{popupMenuItem.PopupState:X})");
builder.AppendLine(popupMenuItem.PopupID, $"{padding} ID");
builder.AppendLine($"{padding} Resource info: {popupMenuItem.PopupResInfo} (0x{popupMenuItem.PopupResInfo:X})");
builder.AppendLine(popupMenuItem.PopupMenuText, $"{padding} Menu text");
}
else if (menuItem is MenuItemExtended menuItemExtended)
{
builder.AppendLine($"{padding} Item type: {menuItemExtended.ItemType} (0x{menuItemExtended.ItemType:X})");
builder.AppendLine($"{padding} State: {menuItemExtended.State} (0x{menuItemExtended.State:X})");
builder.AppendLine(menuItemExtended.ID, $"{padding} ID");
builder.AppendLine($"{padding} Flags: {menuItemExtended.Flags} (0x{menuItemExtended.Flags:X})");
builder.AppendLine(menuItemExtended.MenuText, $"{padding} Menu text");
}
else
{
builder.AppendLine($"{padding} Menu item found, but malformed");
}
}
}

View File

@@ -1,14 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.6.0</Version>
<Version>1.6.9</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
@@ -22,16 +24,30 @@
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.ASN1" Version="1.3.2" />
<PackageReference Include="SabreTools.Hashing" Version="1.2.0" />
<PackageReference Include="SabreTools.IO" Version="1.4.5" />
<PackageReference Include="SabreTools.Models" Version="1.4.5" />
<PackageReference Include="SabreTools.ASN1" Version="1.3.3" />
<PackageReference Include="SabreTools.Hashing" Version="1.2.2" />
<PackageReference Include="SabreTools.IO" Version="1.4.13" />
<PackageReference Include="SabreTools.Models" Version="1.4.11" />
</ItemGroup>
</Project>

View File

@@ -31,7 +31,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, sbyte? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X2})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -41,7 +41,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, byte? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X2})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -51,7 +51,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, short? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X4})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -61,7 +61,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, ushort? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X4})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -71,7 +71,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, int? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X8})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -81,7 +81,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, uint? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X8})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -91,7 +91,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, long? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X16})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -101,7 +101,7 @@ namespace SabreTools.Serialization
public static StringBuilder AppendLine(this StringBuilder sb, ulong? value, string prefixString)
{
value ??= 0;
string valueString = $"{value} (0x{value:X})";
string valueString = $"{value} (0x{value:X16})";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -110,7 +110,6 @@ namespace SabreTools.Serialization
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, string? value, string prefixString)
{
value ??= string.Empty;
string valueString = value ?? "[NULL]";
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -205,5 +204,14 @@ namespace SabreTools.Serialization
string valueString = (value == null ? "[NULL]" : string.Join(", ", value.Select(u => u.ToString()).ToArray()));
return sb.AppendLine($"{prefixString}: {valueString}");
}
/// <summary>
/// Append a line containing a UInt64[] value to a StringBuilder
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, Guid[]? value, string prefixString)
{
string valueString = (value == null ? "[NULL]" : string.Join(", ", value.Select(g => g.ToString()).ToArray()));
return sb.AppendLine($"{prefixString}: {valueString}");
}
}
}

View File

@@ -18,12 +18,12 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// Normal sector size in bytes
/// </summary>
public long SectorSize => (long)Math.Pow(2, this.Model.Header?.SectorShift ?? 0);
public long SectorSize => (long)Math.Pow(2, Model.Header?.SectorShift ?? 0);
/// <summary>
/// Mini sector size in bytes
/// </summary>
public long MiniSectorSize => (long)Math.Pow(2, this.Model.Header?.MiniSectorShift ?? 0);
public long MiniSectorSize => (long)Math.Pow(2, Model.Header?.MiniSectorShift ?? 0);
#endregion
@@ -101,7 +101,7 @@ namespace SabreTools.Serialization.Wrappers
public List<Models.CFB.SectorNumber?>? GetFATSectorChain(Models.CFB.SectorNumber? startingSector)
{
// If we have an invalid sector
if (startingSector == null || startingSector < 0 || this.Model.FATSectorNumbers == null || (long)startingSector >= this.Model.FATSectorNumbers.Length)
if (startingSector == null || startingSector < 0 || Model.FATSectorNumbers == null || (long)startingSector >= Model.FATSectorNumbers.Length)
return null;
// Setup the returned list
@@ -114,10 +114,10 @@ namespace SabreTools.Serialization.Wrappers
break;
// Get the next sector from the lookup table
var nextSector = this.Model.FATSectorNumbers[(uint)lastSector!.Value];
var nextSector = Model.FATSectorNumbers[(uint)lastSector!.Value];
// If we have an end of chain or free sector
if (nextSector == SabreTools.Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == SabreTools.Models.CFB.SectorNumber.FREESECT)
if (nextSector == Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == Models.CFB.SectorNumber.FREESECT)
break;
// Add the next sector to the list and replace the last sector
@@ -188,7 +188,7 @@ namespace SabreTools.Serialization.Wrappers
public List<Models.CFB.SectorNumber?>? GetMiniFATSectorChain(Models.CFB.SectorNumber? startingSector)
{
// If we have an invalid sector
if (startingSector == null || startingSector < 0 || this.Model.MiniFATSectorNumbers == null || (long)startingSector >= this.Model.MiniFATSectorNumbers.Length)
if (startingSector == null || startingSector < 0 || Model.MiniFATSectorNumbers == null || (long)startingSector >= Model.MiniFATSectorNumbers.Length)
return null;
// Setup the returned list
@@ -201,10 +201,10 @@ namespace SabreTools.Serialization.Wrappers
break;
// Get the next sector from the lookup table
var nextSector = this.Model.MiniFATSectorNumbers[(uint)lastSector!.Value];
var nextSector = Model.MiniFATSectorNumbers[(uint)lastSector!.Value];
// If we have an end of chain or free sector
if (nextSector == SabreTools.Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == SabreTools.Models.CFB.SectorNumber.FREESECT)
if (nextSector == Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == Models.CFB.SectorNumber.FREESECT)
break;
// Add the next sector to the list and replace the last sector

View File

@@ -0,0 +1,121 @@
using System.IO;
using SabreTools.Models.CHD;
namespace SabreTools.Serialization.Wrappers
{
public class CHD : WrapperBase<Header>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "MAME Compressed Hunks of Data";
#endregion
#region Extension Properties
/// <summary>
/// Internal MD5 hash, if available
/// </summary>
public byte[]? MD5
{
get
{
return Model switch
{
HeaderV1 v1 => v1.MD5,
HeaderV2 v2 => v2.MD5,
HeaderV3 v3 => v3.MD5,
HeaderV4 v4 => null,
HeaderV5 v5 => null,
_ => null,
};
}
}
/// <summary>
/// Internal SHA1 hash, if available
/// </summary>
public byte[]? SHA1
{
get
{
return Model switch
{
HeaderV1 v1 => null,
HeaderV2 v2 => null,
HeaderV3 v3 => v3.SHA1,
HeaderV4 v4 => v4.SHA1,
HeaderV5 v5 => v5.SHA1,
_ => null,
};
}
}
#endregion
#region Constructors
/// <inheritdoc/>
public CHD(Header? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public CHD(Header? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a CHD header from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A CHD header wrapper on success, null on failure</returns>
public static CHD? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a CHD header from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>An CHD header on success, null on failure</returns>
public static CHD? Create(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var header = Deserializers.CHD.DeserializeStream(data);
if (header == null)
return null;
try
{
return new CHD(header, data);
}
catch
{
return null;
}
}
#endregion
}
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using SabreTools.Models.N3DS;
namespace SabreTools.Serialization.Wrappers
{
@@ -74,5 +75,36 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
// TODO: Hook these up for external use
#region Currently Unused Extensions
#region Ticket
/// <summary>
/// Denotes if the ticket denotes a demo or not
/// </summary>
public static bool IsDemo(Ticket? ticket)
{
if (ticket?.Limits == null || ticket.Limits.Length == 0)
return false;
return ticket.Limits[0] == 0x0004;
}
/// <summary>
/// Denotes if the max playcount for a demo
/// </summary>
public static uint PlayCount(Ticket ticket)
{
if (ticket?.Limits == null || ticket.Limits.Length == 0)
return 0;
return ticket.Limits[1];
}
#endregion
#endregion
}
}

View File

@@ -26,16 +26,16 @@ namespace SabreTools.Serialization.Wrappers
return _files;
// If we don't have a required property
if (this.Model.DirectoryEntries == null || this.Model.DirectoryMapEntries == null || this.Model.BlockEntries == null)
if (Model.DirectoryEntries == null || Model.DirectoryMapEntries == null || Model.BlockEntries == null)
return null;
// Otherwise, scan and build the files
var files = new List<FileInfo>();
for (int i = 0; i < this.Model.DirectoryEntries.Length; i++)
for (int i = 0; i < Model.DirectoryEntries.Length; i++)
{
// Get the directory entry
var directoryEntry = this.Model.DirectoryEntries[i];
var directoryMapEntry = this.Model.DirectoryMapEntries[i];
var directoryEntry = Model.DirectoryEntries[i];
var directoryMapEntry = Model.DirectoryMapEntries[i];
if (directoryEntry == null || directoryMapEntry == null)
continue;
@@ -57,26 +57,26 @@ namespace SabreTools.Serialization.Wrappers
Encrypted = directoryEntry.DirectoryFlags.HasFlag(Models.GCF.HL_GCF_FLAG.HL_GCF_FLAG_ENCRYPTED),
#endif
};
var pathParts = new List<string> { this.Model.DirectoryNames![directoryEntry.NameOffset] ?? string.Empty };
var pathParts = new List<string> { Model.DirectoryNames![directoryEntry.NameOffset] ?? string.Empty };
var blockEntries = new List<Models.GCF.BlockEntry?>();
// Traverse the parent tree
uint index = directoryEntry.ParentIndex;
while (index != 0xFFFFFFFF)
{
var parentDirectoryEntry = this.Model.DirectoryEntries[index];
var parentDirectoryEntry = Model.DirectoryEntries[index];
if (parentDirectoryEntry == null)
break;
pathParts.Add(this.Model.DirectoryNames![parentDirectoryEntry.NameOffset] ?? string.Empty);
pathParts.Add(Model.DirectoryNames![parentDirectoryEntry.NameOffset] ?? string.Empty);
index = parentDirectoryEntry.ParentIndex;
}
// Traverse the block entries
index = directoryMapEntry.FirstBlockIndex;
while (index != this.Model.DataBlockHeader?.BlockCount)
while (index != Model.DataBlockHeader?.BlockCount)
{
var nextBlock = this.Model.BlockEntries[index];
var nextBlock = Model.BlockEntries[index];
if (nextBlock == null)
break;
@@ -134,14 +134,14 @@ namespace SabreTools.Serialization.Wrappers
return _dataBlockOffsets;
// If we don't have a block count, offset, or size
if (this.Model.DataBlockHeader?.BlockCount == null || this.Model.DataBlockHeader?.FirstBlockOffset == null || this.Model.DataBlockHeader?.BlockSize == null)
if (Model.DataBlockHeader?.BlockCount == null || Model.DataBlockHeader?.FirstBlockOffset == null || Model.DataBlockHeader?.BlockSize == null)
return null;
// Otherwise, build the data block set
_dataBlockOffsets = new long[this.Model.DataBlockHeader.BlockCount];
for (int i = 0; i < this.Model.DataBlockHeader.BlockCount; i++)
_dataBlockOffsets = new long[Model.DataBlockHeader.BlockCount];
for (int i = 0; i < Model.DataBlockHeader.BlockCount; i++)
{
long dataBlockOffset = this.Model.DataBlockHeader.FirstBlockOffset + (i * this.Model.DataBlockHeader.BlockSize);
long dataBlockOffset = Model.DataBlockHeader.FirstBlockOffset + (i * Model.DataBlockHeader.BlockSize);
_dataBlockOffsets[i] = dataBlockOffset;
}

View File

@@ -53,7 +53,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
uint majorVersion = this.Model.CommonHeader?.Version ?? 0;
uint majorVersion = Model.CommonHeader?.Version ?? 0;
if (majorVersion >> 24 == 1)
{
majorVersion = (majorVersion >> 12) & 0x0F;
@@ -177,7 +177,7 @@ namespace SabreTools.Serialization.Wrappers
/// Get the directory index for the given file index
/// </summary>
/// <returns>Directory index if found, UInt32.MaxValue on error</returns>
public uint GetFileDirectoryIndex(int index)
public uint GetDirectoryIndexFromFile(int index)
{
FileDescriptor? descriptor = GetFileDescriptor(index);
if (descriptor != null)
@@ -293,23 +293,43 @@ namespace SabreTools.Serialization.Wrappers
}
/// <summary>
/// Get the file group name at a given index, if possible
/// Get the file group for the given file index, if possible
/// </summary>
public string? GetFileGroupName(int index)
public FileGroup? GetFileGroupFromFile(int index)
{
if (Model.FileGroups == null)
return null;
if (index < 0 || index >= Model.FileGroups.Length)
if (index < 0 || index >= FileCount)
return null;
var fileGroup = Model.FileGroups[index];
if (fileGroup == null)
return null;
for (int i = 0; i < FileGroupCount; i++)
{
var fileGroup = GetFileGroup(i);
if (fileGroup == null)
continue;
return fileGroup.Name;
if (fileGroup.FirstFile > index || fileGroup.LastFile < index)
continue;
return fileGroup;
}
return null;
}
/// <summary>
/// Get the file group name at a given index, if possible
/// </summary>
public string? GetFileGroupName(int index)
=> GetFileGroup(index)?.Name;
/// <summary>
/// Get the file group name at a given file index, if possible
/// </summary>
public string? GetFileGroupNameFromFile(int index)
=> GetFileGroupFromFile(index)?.Name;
#endregion
}
}

View File

@@ -124,11 +124,11 @@ namespace SabreTools.Serialization.Wrappers
public DateTime? GetDateTime(int fileIndex)
{
// If we have an invalid file index
if (fileIndex < 0 || this.Model.Files == null || fileIndex >= this.Model.Files.Length)
if (fileIndex < 0 || Model.Files == null || fileIndex >= Model.Files.Length)
return null;
// Get the file header
var file = this.Model.Files[fileIndex];
var file = Model.Files[fileIndex];
if (file == null)
return null;

View File

@@ -1,4 +1,6 @@
using System;
using System.IO;
using SabreTools.Models.N3DS;
namespace SabreTools.Serialization.Wrappers
{
@@ -74,5 +76,178 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
// TODO: Hook these up for external use
#region Currently Unused Extensions
#region ExeFSFileHeader
/// <summary>
/// Determines if a file header represents a CODE block
/// </summary>
public static bool IsCodeBinary(ExeFSFileHeader? header)
{
if (header == null)
return false;
return header.FileName == ".code\0\0\0";
}
#endregion
#region NCCHHeaderFlags
/// <summary>
/// Get if the NoCrypto bit is set
/// </summary>
public static bool PossiblyDecrypted(NCCHHeaderFlags flags)
{
if (flags == null)
return false;
#if NET20 || NET35
return (flags.BitMasks & BitMasks.NoCrypto) != 0;
#else
return flags.BitMasks.HasFlag(BitMasks.NoCrypto);
#endif
}
#endregion
#region NCSDHeader
/// <summary>
/// Partition table entry for Executable Content (CXI)
/// </summary>
public static PartitionTableEntry? ExecutableContent(NCSDHeader? header)
{
if (header?.PartitionsTable == null)
return null;
return header.PartitionsTable[0];
}
/// <summary>
/// Partition table entry for E-Manual (CFA)
/// </summary>
public static PartitionTableEntry? EManual(NCSDHeader? header)
{
if (header?.PartitionsTable == null)
return null;
return header.PartitionsTable[1];
}
/// <summary>
/// Partition table entry for Download Play Child container (CFA)
/// </summary>
public static PartitionTableEntry? DownloadPlayChildContainer(NCSDHeader? header)
{
if (header?.PartitionsTable == null)
return null;
return header.PartitionsTable[2];
}
/// <summary>
/// Partition table entry for New3DS Update Data (CFA)
/// </summary>
public static PartitionTableEntry? New3DSUpdateData(NCSDHeader? header)
{
if (header?.PartitionsTable == null)
return null;
return header.PartitionsTable[6];
}
/// <summary>
/// Partition table entry for Update Data (CFA)
/// </summary>
public static PartitionTableEntry? UpdateData(NCSDHeader? header)
{
if (header?.PartitionsTable == null)
return null;
return header.PartitionsTable[7];
}
/// <summary>
/// Backup Write Wait Time (The time to wait to write save to backup after the card is recognized (0-255
/// seconds)).NATIVE_FIRM loads this flag from the gamecard NCSD header starting with 6.0.0-11.
/// </summary>
public static byte BackupWriteWaitTime(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return header.PartitionFlags[(int)NCSDFlags.BackupWriteWaitTime];
}
/// <summary>
/// Media Card Device (1 = NOR Flash, 2 = None, 3 = BT) (SDK 3.X+)
/// </summary>
public static MediaCardDeviceType MediaCardDevice3X(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return (MediaCardDeviceType)header.PartitionFlags[(int)NCSDFlags.MediaCardDevice3X];
}
/// <summary>
/// Media Platform Index (1 = CTR)
/// </summary>
public static MediaPlatformIndex MediaPlatformIndex(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return (MediaPlatformIndex)header.PartitionFlags[(int)NCSDFlags.MediaPlatformIndex];
}
/// <summary>
/// Media Type Index (0 = Inner Device, 1 = Card1, 2 = Card2, 3 = Extended Device)
/// </summary>
public static MediaTypeIndex MediaTypeIndex(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return (MediaTypeIndex)header.PartitionFlags[(int)NCSDFlags.MediaTypeIndex];
}
/// <summary>
/// Media Unit Size i.e. u32 MediaUnitSize = 0x200*2^flags[6];
/// </summary>
public static uint MediaUnitSize(Cart cart)
{
return MediaUnitSize(cart.Header);
}
/// <summary>
/// Media Unit Size i.e. u32 MediaUnitSize = 0x200*2^flags[6];
/// </summary>
public static uint MediaUnitSize(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return (uint)(0x200 * Math.Pow(2, header.PartitionFlags[(int)NCSDFlags.MediaUnitSize]));
}
/// <summary>
/// Media Card Device (1 = NOR Flash, 2 = None, 3 = BT) (Only SDK 2.X)
/// </summary>
public static MediaCardDeviceType MediaCardDevice2X(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return (MediaCardDeviceType)header.PartitionFlags[(int)NCSDFlags.MediaCardDevice2X];
}
#endregion
#endregion
}
}

View File

@@ -0,0 +1,79 @@
using System.IO;
using SabreTools.Models.PKZIP;
namespace SabreTools.Serialization.Wrappers
{
public class PKZIP : WrapperBase<Archive>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "PKZIP Archive (or Derived Format)";
#endregion
#region Constructors
/// <inheritdoc/>
public PKZIP(Archive? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public PKZIP(Archive? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a PKZIP archive (or derived format) from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A PKZIP wrapper on success, null on failure</returns>
public static PKZIP? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a PKZIP archive (or derived format) from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>A PKZIP wrapper on success, null on failure</returns>
public static PKZIP? Create(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var archive = Deserializers.PKZIP.DeserializeStream(data);
if (archive == null)
return null;
try
{
return new PKZIP(archive, data);
}
catch
{
return null;
}
}
#endregion
}
}

View File

@@ -33,19 +33,26 @@ namespace SabreTools.Serialization.Wrappers
// TODO: Don't scan the known header data as well
// If the section table is missing
if (this.Model.SectionTable == null)
return null;
// If any required pieces are missing
if (Model.Stub?.Header == null)
return [];
if (Model.SectionTable == null)
return [];
// Populate the raw header padding data based on the source
uint headerStartAddress = this.Model.Stub?.Header?.NewExeHeaderAddr ?? 0;
uint firstSectionAddress = this.Model.SectionTable
uint headerStartAddress = Model.Stub.Header.NewExeHeaderAddr;
uint firstSectionAddress = Model.SectionTable
.Select(s => s?.PointerToRawData ?? 0)
.Where(s => s != 0)
.Where(s => s != 0 && s >= headerStartAddress)
.OrderBy(s => s)
.First();
.FirstOrDefault();
// Check if the header length is more than 0 before reading data
int headerLength = (int)(firstSectionAddress - headerStartAddress);
_headerPaddingData = ReadFromDataSource((int)headerStartAddress, headerLength);
if (headerLength <= 0)
_headerPaddingData = [];
else
_headerPaddingData = ReadFromDataSource((int)headerStartAddress, headerLength);
// Cache and return the header padding data, even if null
return _headerPaddingData;
@@ -68,19 +75,26 @@ namespace SabreTools.Serialization.Wrappers
// TODO: Don't scan the known header data as well
// If the section table is missing
if (this.Model.SectionTable == null)
return null;
// If any required pieces are missing
if (Model.Stub?.Header == null)
return [];
if (Model.SectionTable == null)
return [];
// Populate the raw header padding data based on the source
uint headerStartAddress = this.Model.Stub?.Header?.NewExeHeaderAddr ?? 0;
uint firstSectionAddress = this.Model.SectionTable
// Populate the header padding strings based on the source
uint headerStartAddress = Model.Stub.Header.NewExeHeaderAddr;
uint firstSectionAddress = Model.SectionTable
.Select(s => s?.PointerToRawData ?? 0)
.Where(s => s != 0)
.Where(s => s != 0 && s >= headerStartAddress)
.OrderBy(s => s)
.First();
.FirstOrDefault();
// Check if the header length is more than 0 before reading strings
int headerLength = (int)(firstSectionAddress - headerStartAddress);
_headerPaddingStrings = ReadStringsFromDataSource((int)headerStartAddress, headerLength, charLimit: 3);
if (headerLength <= 0)
_headerPaddingStrings = [];
else
_headerPaddingStrings = ReadStringsFromDataSource((int)headerStartAddress, headerLength, charLimit: 3);
// Cache and return the header padding data, even if null
return _headerPaddingStrings;
@@ -98,21 +112,21 @@ namespace SabreTools.Serialization.Wrappers
lock (_sourceDataLock)
{
// If the section table is missing
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// If the address is missing
if (this.Model.OptionalHeader?.AddressOfEntryPoint == null)
if (Model.OptionalHeader?.AddressOfEntryPoint == null)
return null;
// If we have no entry point
int entryPointAddress = (int)this.Model.OptionalHeader.AddressOfEntryPoint.ConvertVirtualAddress(this.Model.SectionTable);
int entryPointAddress = (int)Model.OptionalHeader.AddressOfEntryPoint.ConvertVirtualAddress(Model.SectionTable);
if (entryPointAddress == 0)
return null;
// If the entry point matches with the start of a section, use that
int entryPointSection = FindEntryPointSectionIndex();
if (entryPointSection >= 0 && this.Model.OptionalHeader.AddressOfEntryPoint == this.Model.SectionTable[entryPointSection]?.VirtualAddress)
if (entryPointSection >= 0 && Model.OptionalHeader.AddressOfEntryPoint == Model.SectionTable[entryPointSection]?.VirtualAddress)
return GetSectionData(entryPointSection);
// If we already have cached data, just use that immediately
@@ -148,27 +162,27 @@ namespace SabreTools.Serialization.Wrappers
return -1;
// If the section table is missing
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return -1;
// If we have certificate data, use that as the end
if (this.Model.OptionalHeader?.CertificateTable != null)
if (Model.OptionalHeader?.CertificateTable != null)
{
int certificateTableAddress = (int)this.Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int certificateTableAddress = (int)Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (certificateTableAddress != 0 && certificateTableAddress < endOfFile)
endOfFile = certificateTableAddress;
}
// Search through all sections and find the furthest a section goes
int endOfSectionData = -1;
foreach (var section in this.Model.SectionTable)
foreach (var section in Model.SectionTable)
{
// If we have an invalid section
if (section == null)
continue;
// If we have an invalid section address
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (sectionAddress == 0)
continue;
@@ -219,27 +233,27 @@ namespace SabreTools.Serialization.Wrappers
return null;
// If the section table is missing
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// If we have certificate data, use that as the end
if (this.Model.OptionalHeader?.CertificateTable != null)
if (Model.OptionalHeader?.CertificateTable != null)
{
int certificateTableAddress = (int)this.Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int certificateTableAddress = (int)Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (certificateTableAddress != 0 && certificateTableAddress < endOfFile)
endOfFile = certificateTableAddress;
}
// Search through all sections and find the furthest a section goes
int endOfSectionData = -1;
foreach (var section in this.Model.SectionTable)
foreach (var section in Model.SectionTable)
{
// If we have an invalid section
if (section == null)
continue;
// If we have an invalid section address
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (sectionAddress == 0)
continue;
@@ -297,27 +311,27 @@ namespace SabreTools.Serialization.Wrappers
return null;
// If the section table is missing
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// If we have certificate data, use that as the end
if (this.Model.OptionalHeader?.CertificateTable != null)
if (Model.OptionalHeader?.CertificateTable != null)
{
int certificateTableAddress = (int)this.Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int certificateTableAddress = (int)Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (certificateTableAddress != 0 && certificateTableAddress < endOfFile)
endOfFile = certificateTableAddress;
}
// Search through all sections and find the furthest a section goes
int endOfSectionData = -1;
foreach (var section in this.Model.SectionTable)
foreach (var section in Model.SectionTable)
{
// If we have an invalid section
if (section == null)
continue;
// If we have an invalid section address
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (sectionAddress == 0)
continue;
@@ -370,14 +384,14 @@ namespace SabreTools.Serialization.Wrappers
return _sectionNames;
// If there are no sections
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// Otherwise, build and return the cached array
_sectionNames = new string[this.Model.SectionTable.Length];
_sectionNames = new string[Model.SectionTable.Length];
for (int i = 0; i < _sectionNames.Length; i++)
{
var section = this.Model.SectionTable[i];
var section = Model.SectionTable[i];
if (section == null)
continue;
@@ -408,12 +422,12 @@ namespace SabreTools.Serialization.Wrappers
if (_stubExecutableData != null)
return _stubExecutableData;
if (this.Model.Stub?.Header?.NewExeHeaderAddr == null)
if (Model.Stub?.Header?.NewExeHeaderAddr == null)
return null;
// Populate the raw stub executable data based on the source
int endOfStubHeader = 0x40;
int lengthOfStubExecutableData = (int)this.Model.Stub.Header.NewExeHeaderAddr - endOfStubHeader;
int lengthOfStubExecutableData = (int)Model.Stub.Header.NewExeHeaderAddr - endOfStubHeader;
_stubExecutableData = ReadFromDataSource(endOfStubHeader, lengthOfStubExecutableData);
// Cache and return the stub executable data, even if null
@@ -436,8 +450,8 @@ namespace SabreTools.Serialization.Wrappers
return _debugData;
// If we have no resource table, just return
if (this.Model.DebugTable?.DebugDirectoryTable == null
|| this.Model.DebugTable.DebugDirectoryTable.Length == 0)
if (Model.DebugTable?.DebugDirectoryTable == null
|| Model.DebugTable.DebugDirectoryTable.Length == 0)
return null;
// Otherwise, build and return the cached dictionary
@@ -461,13 +475,13 @@ namespace SabreTools.Serialization.Wrappers
return _resourceData;
// If we have no resource table, just return
if (this.Model.OptionalHeader?.ResourceTable == null
|| this.Model.OptionalHeader.ResourceTable.VirtualAddress == 0
|| this.Model.ResourceDirectoryTable == null)
if (Model.OptionalHeader?.ResourceTable == null
|| Model.OptionalHeader.ResourceTable.VirtualAddress == 0
|| Model.ResourceDirectoryTable == null)
return null;
// Otherwise, build and return the cached dictionary
ParseResourceDirectoryTable(this.Model.ResourceDirectoryTable, types: []);
ParseResourceDirectoryTable(Model.ResourceDirectoryTable, types: []);
return _resourceData;
}
}
@@ -585,15 +599,15 @@ namespace SabreTools.Serialization.Wrappers
/// <remarks>The internal version is either the file version, product version, or assembly version, in that order</remarks>
public string? GetInternalVersion()
{
string? version = this.FileVersion;
string? version = FileVersion;
if (!string.IsNullOrEmpty(version))
return version!.Replace(", ", ".");
version = this.ProductVersion;
version = ProductVersion;
if (!string.IsNullOrEmpty(version))
return version!.Replace(", ", ".");
version = this.AssemblyVersion;
version = AssemblyVersion;
if (!string.IsNullOrEmpty(version))
return version;
@@ -864,7 +878,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the debug data cached
if (DebugData == null)
return Enumerable.Empty<object?>();
return [];
var nb10Found = DebugData.Select(r => r.Value)
.Select(r => r as SabreTools.Models.PortableExecutable.NB10ProgramDatabase)
@@ -890,7 +904,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (DebugData == null)
return Enumerable.Empty<byte[]?>();
return [];
return DebugData.Select(r => r.Value)
.Select(b => b as byte[])
@@ -935,25 +949,34 @@ namespace SabreTools.Serialization.Wrappers
private void ParseDebugTable()
{
// If there is no debug table
if (this.Model.DebugTable?.DebugDirectoryTable == null)
if (Model.DebugTable?.DebugDirectoryTable == null)
return;
// Loop through all debug table entries
for (int i = 0; i < this.Model.DebugTable.DebugDirectoryTable.Length; i++)
for (int i = 0; i < Model.DebugTable.DebugDirectoryTable.Length; i++)
{
var entry = this.Model.DebugTable.DebugDirectoryTable[i];
var entry = Model.DebugTable.DebugDirectoryTable[i];
if (entry == null)
continue;
uint address = entry.PointerToRawData;
uint size = entry.SizeOfData;
byte[]? entryData = ReadFromDataSource((int)address, (int)size);
if (entryData == null)
continue;
// Read the entry data until we have the end of the stream
byte[]? entryData;
try
{
entryData = ReadFromDataSource((int)address, (int)size);
if (entryData == null || entryData.Length < 4)
continue;
}
catch (EndOfStreamException)
{
return;
}
// If we have CodeView debug data, try to parse it
if (entry.DebugType == SabreTools.Models.PortableExecutable.DebugType.IMAGE_DEBUG_TYPE_CODEVIEW)
if (entry.DebugType == Models.PortableExecutable.DebugType.IMAGE_DEBUG_TYPE_CODEVIEW)
{
// Read the signature
int offset = 0;
@@ -1004,7 +1027,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (ResourceData == null)
return Enumerable.Empty<Models.PortableExecutable.DialogBoxResource?>();
return [];
return ResourceData.Select(r => r.Value)
.Select(r => r as SabreTools.Models.PortableExecutable.DialogBoxResource)
@@ -1025,7 +1048,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (ResourceData == null)
return Enumerable.Empty<Models.PortableExecutable.DialogBoxResource?>();
return [];
return ResourceData.Select(r => r.Value)
.Select(r => r as SabreTools.Models.PortableExecutable.DialogBoxResource)
@@ -1058,7 +1081,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (ResourceData == null)
return Enumerable.Empty<Dictionary<int, string?>?>();
return [];
return ResourceData.Select(r => r.Value)
.Select(r => r as Dictionary<int, string?>)
@@ -1076,7 +1099,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (ResourceData == null)
return Enumerable.Empty<byte[]?>();
return [];
return ResourceData.Where(kvp => kvp.Key.Contains(typeName))
.Select(kvp => kvp.Value as byte[])
@@ -1092,7 +1115,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (ResourceData == null)
return Enumerable.Empty<byte[]?>();
return [];
return ResourceData.Select(r => r.Value)
.Select(r => r as byte[])
@@ -1148,7 +1171,7 @@ namespace SabreTools.Serialization.Wrappers
var newTypes = new List<object>(types ?? []);
if (entry.Name?.UnicodeString != null)
newTypes.Add(Encoding.UTF8.GetString(entry.Name.UnicodeString));
newTypes.Add(Encoding.Unicode.GetString(entry.Name.UnicodeString));
else
newTypes.Add(entry.IntegerID);
@@ -1188,68 +1211,68 @@ namespace SabreTools.Serialization.Wrappers
{
switch ((Models.PortableExecutable.ResourceType)resourceType)
{
case SabreTools.Models.PortableExecutable.ResourceType.RT_CURSOR:
case Models.PortableExecutable.ResourceType.RT_CURSOR:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_BITMAP:
case Models.PortableExecutable.ResourceType.RT_BITMAP:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_ICON:
case Models.PortableExecutable.ResourceType.RT_ICON:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_MENU:
case Models.PortableExecutable.ResourceType.RT_MENU:
value = entry.AsMenu();
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_DIALOG:
case Models.PortableExecutable.ResourceType.RT_DIALOG:
value = entry.AsDialogBox();
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_STRING:
case Models.PortableExecutable.ResourceType.RT_STRING:
value = entry.AsStringTable();
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_FONTDIR:
case Models.PortableExecutable.ResourceType.RT_FONTDIR:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_FONT:
case Models.PortableExecutable.ResourceType.RT_FONT:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_ACCELERATOR:
case Models.PortableExecutable.ResourceType.RT_ACCELERATOR:
value = entry.AsAcceleratorTableResource();
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_RCDATA:
case Models.PortableExecutable.ResourceType.RT_RCDATA:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_MESSAGETABLE:
case Models.PortableExecutable.ResourceType.RT_MESSAGETABLE:
value = entry.AsMessageResourceData();
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_GROUP_CURSOR:
case Models.PortableExecutable.ResourceType.RT_GROUP_CURSOR:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_GROUP_ICON:
case Models.PortableExecutable.ResourceType.RT_GROUP_ICON:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_VERSION:
case Models.PortableExecutable.ResourceType.RT_VERSION:
_versionInfo = entry.AsVersionInfo();
value = _versionInfo;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_DLGINCLUDE:
case Models.PortableExecutable.ResourceType.RT_DLGINCLUDE:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_PLUGPLAY:
case Models.PortableExecutable.ResourceType.RT_PLUGPLAY:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_VXD:
case Models.PortableExecutable.ResourceType.RT_VXD:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_ANICURSOR:
case Models.PortableExecutable.ResourceType.RT_ANICURSOR:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_ANIICON:
case Models.PortableExecutable.ResourceType.RT_ANIICON:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_HTML:
case Models.PortableExecutable.ResourceType.RT_HTML:
value = entry.Data;
break;
case SabreTools.Models.PortableExecutable.ResourceType.RT_MANIFEST:
case Models.PortableExecutable.ResourceType.RT_MANIFEST:
_assemblyManifest = entry.AsAssemblyManifest();
value = _versionInfo;
break;
@@ -1311,19 +1334,19 @@ namespace SabreTools.Serialization.Wrappers
public int FindEntryPointSectionIndex()
{
// If the section table is missing
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return -1;
// If the address is missing
if (this.Model.OptionalHeader?.AddressOfEntryPoint == null)
if (Model.OptionalHeader?.AddressOfEntryPoint == null)
return -1;
// If we don't have an entry point
if (this.Model.OptionalHeader.AddressOfEntryPoint.ConvertVirtualAddress(this.Model.SectionTable) == 0)
if (Model.OptionalHeader.AddressOfEntryPoint.ConvertVirtualAddress(Model.SectionTable) == 0)
return -1;
// Otherwise, find the section it exists within
return this.Model.OptionalHeader.AddressOfEntryPoint.ContainingSectionIndex(this.Model.SectionTable
return Model.OptionalHeader.AddressOfEntryPoint.ContainingSectionIndex(Model.SectionTable
.Where(sh => sh != null)
.Cast<Models.PortableExecutable.SectionHeader>()
.ToArray());
@@ -1335,10 +1358,10 @@ namespace SabreTools.Serialization.Wrappers
/// <param name="name">Name of the section to check for</param>
/// <param name="exact">True to enable exact matching of names, false for starts-with</param>
/// <returns>Section data on success, null on error</returns>
public SabreTools.Models.PortableExecutable.SectionHeader? GetFirstSection(string? name, bool exact = false)
public Models.PortableExecutable.SectionHeader? GetFirstSection(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1351,7 +1374,7 @@ namespace SabreTools.Serialization.Wrappers
return null;
// Return the section
return this.Model.SectionTable[index];
return Model.SectionTable[index];
}
/// <summary>
@@ -1360,10 +1383,10 @@ namespace SabreTools.Serialization.Wrappers
/// <param name="name">Name of the section to check for</param>
/// <param name="exact">True to enable exact matching of names, false for starts-with</param>
/// <returns>Section data on success, null on error</returns>
public SabreTools.Models.PortableExecutable.SectionHeader? GetLastSection(string? name, bool exact = false)
public Models.PortableExecutable.SectionHeader? GetLastSection(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1376,7 +1399,7 @@ namespace SabreTools.Serialization.Wrappers
return null;
// Return the section
return this.Model.SectionTable[index];
return Model.SectionTable[index];
}
/// <summary>
@@ -1384,18 +1407,18 @@ namespace SabreTools.Serialization.Wrappers
/// </summary>
/// <param name="index">Index of the section to check for</param>
/// <returns>Section data on success, null on error</returns>
public SabreTools.Models.PortableExecutable.SectionHeader? GetSection(int index)
public Models.PortableExecutable.SectionHeader? GetSection(int index)
{
// If we have no sections
if (this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
if (index < 0 || index >= this.Model.SectionTable.Length)
if (index < 0 || index >= Model.SectionTable.Length)
return null;
// Return the section
return this.Model.SectionTable[index];
return Model.SectionTable[index];
}
/// <summary>
@@ -1407,7 +1430,7 @@ namespace SabreTools.Serialization.Wrappers
public byte[]? GetFirstSectionData(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1428,7 +1451,7 @@ namespace SabreTools.Serialization.Wrappers
public byte[]? GetLastSectionData(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1448,19 +1471,19 @@ namespace SabreTools.Serialization.Wrappers
public byte[]? GetSectionData(int index)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
if (index < 0 || index >= this.Model.SectionTable.Length)
if (index < 0 || index >= Model.SectionTable.Length)
return null;
// Get the section data from the table
var section = this.Model.SectionTable[index];
var section = Model.SectionTable[index];
if (section == null)
return null;
uint address = section.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
uint address = section.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (address == 0)
return null;
@@ -1493,7 +1516,7 @@ namespace SabreTools.Serialization.Wrappers
public List<string>? GetFirstSectionStrings(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1514,7 +1537,7 @@ namespace SabreTools.Serialization.Wrappers
public List<string>? GetLastSectionStrings(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1534,19 +1557,19 @@ namespace SabreTools.Serialization.Wrappers
public List<string>? GetSectionStrings(int index)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
if (index < 0 || index >= this.Model.SectionTable.Length)
if (index < 0 || index >= Model.SectionTable.Length)
return null;
// Get the section data from the table
var section = this.Model.SectionTable[index];
var section = Model.SectionTable[index];
if (section == null)
return null;
uint address = section.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
uint address = section.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (address == 0)
return null;
@@ -1582,7 +1605,7 @@ namespace SabreTools.Serialization.Wrappers
public byte[]? GetTableData(int index)
{
// If the table doesn't exist
if (this.Model.OptionalHeader == null || index < 0 || index > 16)
if (Model.OptionalHeader == null || index < 0 || index > 16)
return null;
// Get the virtual address and size from the entries
@@ -1590,64 +1613,64 @@ namespace SabreTools.Serialization.Wrappers
switch (index)
{
case 1:
virtualAddress = this.Model.OptionalHeader.ExportTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ExportTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ExportTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ExportTable?.Size ?? 0;
break;
case 2:
virtualAddress = this.Model.OptionalHeader.ImportTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ImportTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ImportTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ImportTable?.Size ?? 0;
break;
case 3:
virtualAddress = this.Model.OptionalHeader.ResourceTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ResourceTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ResourceTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ResourceTable?.Size ?? 0;
break;
case 4:
virtualAddress = this.Model.OptionalHeader.ExceptionTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ExceptionTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ExceptionTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ExceptionTable?.Size ?? 0;
break;
case 5:
virtualAddress = this.Model.OptionalHeader.CertificateTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.CertificateTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.CertificateTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.CertificateTable?.Size ?? 0;
break;
case 6:
virtualAddress = this.Model.OptionalHeader.BaseRelocationTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.BaseRelocationTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.BaseRelocationTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.BaseRelocationTable?.Size ?? 0;
break;
case 7:
virtualAddress = this.Model.OptionalHeader.Debug?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.Debug?.Size ?? 0;
virtualAddress = Model.OptionalHeader.Debug?.VirtualAddress ?? 0;
size = Model.OptionalHeader.Debug?.Size ?? 0;
break;
case 8: // Architecture Table
virtualAddress = 0;
size = 0;
break;
case 9:
virtualAddress = this.Model.OptionalHeader.GlobalPtr?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.GlobalPtr?.Size ?? 0;
virtualAddress = Model.OptionalHeader.GlobalPtr?.VirtualAddress ?? 0;
size = Model.OptionalHeader.GlobalPtr?.Size ?? 0;
break;
case 10:
virtualAddress = this.Model.OptionalHeader.ThreadLocalStorageTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ThreadLocalStorageTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ThreadLocalStorageTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ThreadLocalStorageTable?.Size ?? 0;
break;
case 11:
virtualAddress = this.Model.OptionalHeader.LoadConfigTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.LoadConfigTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.LoadConfigTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.LoadConfigTable?.Size ?? 0;
break;
case 12:
virtualAddress = this.Model.OptionalHeader.BoundImport?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.BoundImport?.Size ?? 0;
virtualAddress = Model.OptionalHeader.BoundImport?.VirtualAddress ?? 0;
size = Model.OptionalHeader.BoundImport?.Size ?? 0;
break;
case 13:
virtualAddress = this.Model.OptionalHeader.ImportAddressTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ImportAddressTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ImportAddressTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ImportAddressTable?.Size ?? 0;
break;
case 14:
virtualAddress = this.Model.OptionalHeader.DelayImportDescriptor?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.DelayImportDescriptor?.Size ?? 0;
virtualAddress = Model.OptionalHeader.DelayImportDescriptor?.VirtualAddress ?? 0;
size = Model.OptionalHeader.DelayImportDescriptor?.Size ?? 0;
break;
case 15:
virtualAddress = this.Model.OptionalHeader.CLRRuntimeHeader?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.CLRRuntimeHeader?.Size ?? 0;
virtualAddress = Model.OptionalHeader.CLRRuntimeHeader?.VirtualAddress ?? 0;
size = Model.OptionalHeader.CLRRuntimeHeader?.Size ?? 0;
break;
case 16: // Reserved
virtualAddress = 0;
@@ -1656,11 +1679,11 @@ namespace SabreTools.Serialization.Wrappers
}
// If there is no section table
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// Get the physical address from the virtual one
uint address = virtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
uint address = virtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (address == 0 || size == 0)
return null;
@@ -1690,7 +1713,7 @@ namespace SabreTools.Serialization.Wrappers
public List<string>? GetTableStrings(int index)
{
// If the table doesn't exist
if (this.Model.OptionalHeader == null || index < 0 || index > 16)
if (Model.OptionalHeader == null || index < 0 || index > 16)
return null;
// Get the virtual address and size from the entries
@@ -1698,64 +1721,64 @@ namespace SabreTools.Serialization.Wrappers
switch (index)
{
case 1:
virtualAddress = this.Model.OptionalHeader.ExportTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ExportTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ExportTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ExportTable?.Size ?? 0;
break;
case 2:
virtualAddress = this.Model.OptionalHeader.ImportTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ImportTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ImportTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ImportTable?.Size ?? 0;
break;
case 3:
virtualAddress = this.Model.OptionalHeader.ResourceTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ResourceTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ResourceTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ResourceTable?.Size ?? 0;
break;
case 4:
virtualAddress = this.Model.OptionalHeader.ExceptionTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ExceptionTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ExceptionTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ExceptionTable?.Size ?? 0;
break;
case 5:
virtualAddress = this.Model.OptionalHeader.CertificateTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.CertificateTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.CertificateTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.CertificateTable?.Size ?? 0;
break;
case 6:
virtualAddress = this.Model.OptionalHeader.BaseRelocationTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.BaseRelocationTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.BaseRelocationTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.BaseRelocationTable?.Size ?? 0;
break;
case 7:
virtualAddress = this.Model.OptionalHeader.Debug?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.Debug?.Size ?? 0;
virtualAddress = Model.OptionalHeader.Debug?.VirtualAddress ?? 0;
size = Model.OptionalHeader.Debug?.Size ?? 0;
break;
case 8: // Architecture Table
virtualAddress = 0;
size = 0;
break;
case 9:
virtualAddress = this.Model.OptionalHeader.GlobalPtr?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.GlobalPtr?.Size ?? 0;
virtualAddress = Model.OptionalHeader.GlobalPtr?.VirtualAddress ?? 0;
size = Model.OptionalHeader.GlobalPtr?.Size ?? 0;
break;
case 10:
virtualAddress = this.Model.OptionalHeader.ThreadLocalStorageTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ThreadLocalStorageTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ThreadLocalStorageTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ThreadLocalStorageTable?.Size ?? 0;
break;
case 11:
virtualAddress = this.Model.OptionalHeader.LoadConfigTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.LoadConfigTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.LoadConfigTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.LoadConfigTable?.Size ?? 0;
break;
case 12:
virtualAddress = this.Model.OptionalHeader.BoundImport?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.BoundImport?.Size ?? 0;
virtualAddress = Model.OptionalHeader.BoundImport?.VirtualAddress ?? 0;
size = Model.OptionalHeader.BoundImport?.Size ?? 0;
break;
case 13:
virtualAddress = this.Model.OptionalHeader.ImportAddressTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ImportAddressTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ImportAddressTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ImportAddressTable?.Size ?? 0;
break;
case 14:
virtualAddress = this.Model.OptionalHeader.DelayImportDescriptor?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.DelayImportDescriptor?.Size ?? 0;
virtualAddress = Model.OptionalHeader.DelayImportDescriptor?.VirtualAddress ?? 0;
size = Model.OptionalHeader.DelayImportDescriptor?.Size ?? 0;
break;
case 15:
virtualAddress = this.Model.OptionalHeader.CLRRuntimeHeader?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.CLRRuntimeHeader?.Size ?? 0;
virtualAddress = Model.OptionalHeader.CLRRuntimeHeader?.VirtualAddress ?? 0;
size = Model.OptionalHeader.CLRRuntimeHeader?.Size ?? 0;
break;
case 16: // Reserved
virtualAddress = 0;
@@ -1764,11 +1787,11 @@ namespace SabreTools.Serialization.Wrappers
}
// If there is no section table
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// Get the physical address from the virtual one
uint address = virtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
uint address = virtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (address == 0 || size == 0)
return null;

View File

@@ -43,9 +43,9 @@ namespace SabreTools.Serialization.Wrappers
return null;
// Get the archive count
int archiveCount = this.Model.DirectoryItems == null
int archiveCount = Model.DirectoryItems == null
? 0
: this.Model.DirectoryItems
: Model.DirectoryItems
.Select(di => di?.DirectoryEntry)
.Select(de => de?.ArchiveIndex ?? 0)
.Where(ai => ai != HL_VPK_NO_ARCHIVE)

View File

@@ -29,6 +29,11 @@ namespace SabreTools.Serialization.Wrappers
/// </summary>
protected DataSource _dataSource = DataSource.UNKNOWN;
/// <summary>
/// Lock object for reading from the source
/// </summary>
private readonly object _streamDataLock = new();
/// <summary>
/// Source byte array data
/// </summary>
@@ -86,7 +91,7 @@ namespace SabreTools.Serialization.Wrappers
if (offset < 0 || offset >= data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
this.Model = model;
Model = model;
_dataSource = DataSource.ByteArray;
_byteArrayData = data;
_byteArrayOffset = offset;
@@ -104,7 +109,7 @@ namespace SabreTools.Serialization.Wrappers
if (data.Length == 0 || !data.CanSeek || !data.CanRead)
throw new ArgumentOutOfRangeException(nameof(data));
this.Model = model;
Model = model;
_dataSource = DataSource.Stream;
_streamData = data;
}
@@ -193,11 +198,14 @@ namespace SabreTools.Serialization.Wrappers
break;
case DataSource.Stream:
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
lock (_streamDataLock)
{
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
}
}
return sectionData;

View File

@@ -21,6 +21,7 @@ namespace SabreTools.Serialization.Wrappers
case WrapperType.BSP: return BSP.Create(data);
case WrapperType.BZip2: return null; // TODO: Implement wrapper
case WrapperType.CFB: return CFB.Create(data);
case WrapperType.CHD: return CHD.Create(data);
case WrapperType.CIA: return CIA.Create(data);
case WrapperType.Executable: return CreateExecutableWrapper(data);
case WrapperType.GCF: return GCF.Create(data);
@@ -38,7 +39,7 @@ namespace SabreTools.Serialization.Wrappers
case WrapperType.PAK: return PAK.Create(data);
case WrapperType.PFF: return PFF.Create(data);
case WrapperType.PIC: return PIC.Create(data);
case WrapperType.PKZIP: return null; // TODO: Implement wrapper
case WrapperType.PKZIP: return PKZIP.Create(data);
case WrapperType.PlayJAudioFile: return PlayJAudioFile.Create(data);
case WrapperType.PlayJPlaylist: return PlayJPlaylist.Create(data);
case WrapperType.Quantum: return Quantum.Create(data);
@@ -116,15 +117,18 @@ namespace SabreTools.Serialization.Wrappers
}
/// <summary>
/// Get the supported file type for a magic string
/// Get the supported file type for a magic string and an extension
/// </summary>
/// <remarks>Recommend sending in 16 bytes to check</remarks>
public static WrapperType GetFileType(byte[] magic)
public static WrapperType GetFileType(byte[]? magic, string? extension)
{
// If we have an invalid magic byte array
if (magic == null || magic.Length == 0)
// If we have an invalid magic byte array and extension
if (magic == null || magic.Length == 0 || extension == null)
return WrapperType.UNKNOWN;
// Normalize the extension
extension = extension.TrimStart('.').Trim();
// TODO: For all modelled types, use the constants instead of hardcoded values here
#region AACSMediaKeyBlock
@@ -136,6 +140,15 @@ namespace SabreTools.Serialization.Wrappers
if (magic.StartsWith(new byte?[] { 0x10, 0x00, 0x00, 0x0C }))
return WrapperType.AACSMediaKeyBlock;
// Shares an extension with INF setup information so it can't be used accurately
// Blu-ray
// if (extension.Equals("inf", StringComparison.OrdinalIgnoreCase))
// return WrapperType.AACSMediaKeyBlock;
// HD-DVD
if (extension.Equals("aacs", StringComparison.OrdinalIgnoreCase))
return WrapperType.AACSMediaKeyBlock;
#endregion
#region BDPlusSVM
@@ -143,6 +156,9 @@ namespace SabreTools.Serialization.Wrappers
if (magic.StartsWith(new byte?[] { 0x42, 0x44, 0x53, 0x56, 0x4D, 0x5F, 0x43, 0x43 }))
return WrapperType.BDPlusSVM;
if (extension.Equals("svm", StringComparison.OrdinalIgnoreCase))
return WrapperType.BDPlusSVM;
#endregion
#region BFPK
@@ -154,7 +170,14 @@ namespace SabreTools.Serialization.Wrappers
#region BSP
if (magic.StartsWith(new byte?[] { 0x1e, 0x00, 0x00, 0x00 }))
// Shares a first 4 bytes with some .mc files
// Shares an extension with VBSP
if (magic.StartsWith(new byte?[] { 0x1d, 0x00, 0x00, 0x00 }) && extension.Equals("bsp", StringComparison.OrdinalIgnoreCase))
return WrapperType.BSP;
// Shares a first 4 bytes with some .mc files
// Shares an extension with VBSP
if (magic.StartsWith(new byte?[] { 0x1e, 0x00, 0x00, 0x00 }) && extension.Equals("bsp", StringComparison.OrdinalIgnoreCase))
return WrapperType.BSP;
#endregion
@@ -164,6 +187,9 @@ namespace SabreTools.Serialization.Wrappers
if (magic.StartsWith(new byte?[] { 0x42, 0x52, 0x68 }))
return WrapperType.BZip2;
if (extension.Equals("bz2", StringComparison.OrdinalIgnoreCase))
return WrapperType.BZip2;
#endregion
#region CFB
@@ -171,11 +197,39 @@ namespace SabreTools.Serialization.Wrappers
if (magic.StartsWith(new byte?[] { 0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1 }))
return WrapperType.CFB;
// Installer package
if (extension.Equals("msi", StringComparison.OrdinalIgnoreCase))
return WrapperType.CFB;
// Merge module
else if (extension.Equals("msm", StringComparison.OrdinalIgnoreCase))
return WrapperType.CFB;
// Patch Package
else if (extension.Equals("msp", StringComparison.OrdinalIgnoreCase))
return WrapperType.CFB;
// Transform
else if (extension.Equals("mst", StringComparison.OrdinalIgnoreCase))
return WrapperType.CFB;
// Patch Creation Properties
else if (extension.Equals("pcp", StringComparison.OrdinalIgnoreCase))
return WrapperType.CFB;
#endregion
#region CHD
if (magic.StartsWith(new byte?[] { 0x4D, 0x43, 0x6F, 0x6D, 0x70, 0x72, 0x48, 0x44 }))
return WrapperType.CHD;
#endregion
#region CIA
// No magic checks for CIA
if (extension.Equals("cia", StringComparison.OrdinalIgnoreCase))
return WrapperType.CIA;
#endregion
@@ -213,369 +267,6 @@ namespace SabreTools.Serialization.Wrappers
return FileTypes.Executable;
*/
#endregion
#region GCF
if (magic.StartsWith(new byte?[] { 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 }))
return WrapperType.GCF;
#endregion
#region GZIP
if (magic.StartsWith(new byte?[] { 0x1f, 0x8b }))
return WrapperType.GZIP;
#endregion
#region IniFile
// No magic checks for IniFile
#endregion
#region InstallShieldArchiveV3
if (magic.StartsWith(new byte?[] { 0x13, 0x5D, 0x65, 0x8C }))
return WrapperType.InstallShieldArchiveV3;
#endregion
#region InstallShieldCAB
if (magic.StartsWith(new byte?[] { 0x49, 0x53, 0x63 }))
return WrapperType.InstallShieldCAB;
#endregion
#region LDSCRYPT
if (magic.StartsWith(new byte?[] { 0x4C, 0x44, 0x53, 0x43, 0x52, 0x59, 0x50, 0x54 }))
return WrapperType.LDSCRYPT;
#endregion
#region MicrosoftCAB
if (magic.StartsWith(new byte?[] { 0x4d, 0x53, 0x43, 0x46 }))
return WrapperType.MicrosoftCAB;
#endregion
#region MicrosoftLZ
if (magic.StartsWith(new byte?[] { 0x53, 0x5a, 0x44, 0x44, 0x88, 0xf0, 0x27, 0x33 }))
return WrapperType.MicrosoftLZ;
#endregion
#region MPQ
if (magic.StartsWith(new byte?[] { 0x4d, 0x50, 0x51, 0x1a }))
return WrapperType.MoPaQ;
if (magic.StartsWith(new byte?[] { 0x4d, 0x50, 0x51, 0x1b }))
return WrapperType.MoPaQ;
#endregion
#region N3DS
// No magic checks for N3DS
#endregion
#region NCF
if (magic.StartsWith(new byte?[] { 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00 }))
return WrapperType.NCF;
#endregion
#region Nitro
// No magic checks for Nitro
#endregion
#region PAK
if (magic.StartsWith(new byte?[] { 0x50, 0x41, 0x43, 0x4B }))
return WrapperType.PAK;
#endregion
#region PFF
// Version 2
if (magic.StartsWith(new byte?[] { 0x14, 0x00, 0x00, 0x00, 0x50, 0x46, 0x46, 0x32 }))
return WrapperType.PFF;
// Version 3
if (magic.StartsWith(new byte?[] { 0x14, 0x00, 0x00, 0x00, 0x50, 0x46, 0x46, 0x33 }))
return WrapperType.PFF;
// Version 4
if (magic.StartsWith(new byte?[] { 0x14, 0x00, 0x00, 0x00, 0x50, 0x46, 0x46, 0x34 }))
return WrapperType.PFF;
#endregion
#region PKZIP
// PKZIP (Unknown)
if (magic.StartsWith(new byte?[] { 0x50, 0x4b, 0x00, 0x00 }))
return WrapperType.PKZIP;
// PKZIP
if (magic.StartsWith(new byte?[] { 0x50, 0x4b, 0x03, 0x04 }))
return WrapperType.PKZIP;
// PKZIP (Empty Archive)
if (magic.StartsWith(new byte?[] { 0x50, 0x4b, 0x05, 0x06 }))
return WrapperType.PKZIP;
// PKZIP (Spanned Archive)
if (magic.StartsWith(new byte?[] { 0x50, 0x4b, 0x07, 0x08 }))
return WrapperType.PKZIP;
#endregion
#region PLJ
// https://www.iana.org/assignments/media-types/audio/vnd.everad.plj
if (magic.StartsWith(new byte?[] { 0xFF, 0x9D, 0x53, 0x4B }))
return WrapperType.PlayJAudioFile;
#endregion
#region Quantum
if (magic.StartsWith(new byte?[] { 0x44, 0x53 }))
return WrapperType.Quantum;
#endregion
#region RAR
// RAR archive version 1.50 onwards
if (magic.StartsWith(new byte?[] { 0x52, 0x61, 0x72, 0x21, 0x1a, 0x07, 0x00 }))
return WrapperType.RAR;
// RAR archive version 5.0 onwards
if (magic.StartsWith(new byte?[] { 0x52, 0x61, 0x72, 0x21, 0x1a, 0x07, 0x01, 0x00 }))
return WrapperType.RAR;
#endregion
#region RealArcade
// RASGI2.0
// Found in the ".rgs files in IA item "Nova_RealArcadeCD_USA".
if (magic.StartsWith(new byte?[] { 0x52, 0x41, 0x53, 0x47, 0x49, 0x32, 0x2E, 0x30 }))
return WrapperType.RealArcadeInstaller;
// XZip2.0
// Found in the ".mez" files in IA item "Nova_RealArcadeCD_USA".
if (magic.StartsWith(new byte?[] { 0x58, 0x5A, 0x69, 0x70, 0x32, 0x2E, 0x30 }))
return WrapperType.RealArcadeMezzanine;
#endregion
#region SevenZip
if (magic.StartsWith(new byte?[] { 0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c }))
return WrapperType.SevenZip;
#endregion
#region SFFS
// Found in Redump entry 81756, confirmed to be "StarForce Filesystem" by PiD.
if (magic.StartsWith(new byte?[] { 0x53, 0x46, 0x46, 0x53 }))
return WrapperType.SFFS;
#endregion
#region SGA
if (magic.StartsWith(new byte?[] { 0x5F, 0x41, 0x52, 0x43, 0x48, 0x49, 0x56, 0x45 }))
return WrapperType.SGA;
#endregion
#region TapeArchive
if (magic.StartsWith(new byte?[] { 0x75, 0x73, 0x74, 0x61, 0x72, 0x00, 0x30, 0x30 }))
return WrapperType.TapeArchive;
if (magic.StartsWith(new byte?[] { 0x75, 0x73, 0x74, 0x61, 0x72, 0x20, 0x20, 0x00 }))
return WrapperType.TapeArchive;
#endregion
#region Textfile
// Not all textfiles can be determined through magic number
// HTML
if (magic.StartsWith(new byte?[] { 0x3c, 0x68, 0x74, 0x6d, 0x6c }))
return WrapperType.Textfile;
// HTML and XML
if (magic.StartsWith(new byte?[] { 0x3c, 0x21, 0x44, 0x4f, 0x43, 0x54, 0x59, 0x50, 0x45 }))
return WrapperType.Textfile;
// InstallShield Compiled Rules
if (magic.StartsWith(new byte?[] { 0x61, 0x4C, 0x75, 0x5A }))
return WrapperType.Textfile;
// Microsoft Office File (old)
if (magic.StartsWith(new byte?[] { 0xd0, 0xcf, 0x11, 0xe0, 0xa1, 0xb1, 0x1a, 0xe1 }))
return WrapperType.Textfile;
// Rich Text File
if (magic.StartsWith(new byte?[] { 0x7b, 0x5c, 0x72, 0x74, 0x66, 0x31 }))
return WrapperType.Textfile;
// Windows Help File
if (magic.StartsWith(new byte?[] { 0x3F, 0x5F, 0x03, 0x00 }))
return WrapperType.Textfile;
// XML
// "<?xml"
if (magic.StartsWith(new byte?[] { 0x3C, 0x3F, 0x78, 0x6D, 0x6C }))
return WrapperType.Textfile;
#endregion
#region VBSP
if (magic.StartsWith(new byte?[] { 0x56, 0x42, 0x53, 0x50 }))
return WrapperType.VBSP;
#endregion
#region VPK
if (magic.StartsWith(new byte?[] { 0x34, 0x12, 0xaa, 0x55 }))
return WrapperType.VPK;
#endregion
#region WAD
if (magic.StartsWith(new byte?[] { 0x57, 0x41, 0x44, 0x33 }))
return WrapperType.WAD;
#endregion
#region XZ
if (magic.StartsWith(new byte?[] { 0xfd, 0x37, 0x7a, 0x58, 0x5a, 0x00 }))
return WrapperType.XZ;
#endregion
#region XZP
if (magic.StartsWith(new byte?[] { 0x70, 0x69, 0x5A, 0x78 }))
return WrapperType.XZP;
#endregion
// We couldn't find a supported match
return WrapperType.UNKNOWN;
}
/// <summary>
/// Get the supported file type for an extension
/// </summary>
/// <remarks>This is less accurate than a magic string match</remarks>
public static WrapperType GetFileType(string extension)
{
// If we have an invalid extension
if (string.IsNullOrEmpty(extension))
return WrapperType.UNKNOWN;
// Normalize the extension
extension = extension.TrimStart('.').Trim();
#region AACSMediaKeyBlock
// Shares an extension with INF setup information so it can't be used accurately
// Blu-ray
// if (extension.Equals("inf", StringComparison.OrdinalIgnoreCase))
// return WrapperType.AACSMediaKeyBlock;
// HD-DVD
if (extension.Equals("aacs", StringComparison.OrdinalIgnoreCase))
return WrapperType.AACSMediaKeyBlock;
#endregion
#region BDPlusSVM
if (extension.Equals("svm", StringComparison.OrdinalIgnoreCase))
return WrapperType.BDPlusSVM;
#endregion
#region BFPK
// No extensions registered for BFPK
#endregion
#region BSP
// Shares an extension with VBSP so it can't be used accurately
// if (extension.Equals("bsp", StringComparison.OrdinalIgnoreCase))
// return WrapperType.BSP;
#endregion
#region BZip2
if (extension.Equals("bz2", StringComparison.OrdinalIgnoreCase))
return WrapperType.BZip2;
#endregion
#region CFB
// Installer package
if (extension.Equals("msi", StringComparison.OrdinalIgnoreCase))
return WrapperType.CFB;
// Merge module
else if (extension.Equals("msm", StringComparison.OrdinalIgnoreCase))
return WrapperType.CFB;
// Patch Package
else if (extension.Equals("msp", StringComparison.OrdinalIgnoreCase))
return WrapperType.CFB;
// Transform
else if (extension.Equals("mst", StringComparison.OrdinalIgnoreCase))
return WrapperType.CFB;
// Patch Creation Properties
else if (extension.Equals("pcp", StringComparison.OrdinalIgnoreCase))
return WrapperType.CFB;
#endregion
#region CIA
if (extension.Equals("cia", StringComparison.OrdinalIgnoreCase))
return WrapperType.CIA;
#endregion
#region Executable
// DOS MZ executable file format (and descendants)
if (extension.Equals("exe", StringComparison.OrdinalIgnoreCase))
return WrapperType.Executable;
@@ -588,6 +279,9 @@ namespace SabreTools.Serialization.Wrappers
#region GCF
if (magic.StartsWith(new byte?[] { 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 }))
return WrapperType.GCF;
if (extension.Equals("gcf", StringComparison.OrdinalIgnoreCase))
return WrapperType.GCF;
@@ -595,6 +289,9 @@ namespace SabreTools.Serialization.Wrappers
#region GZIP
if (magic.StartsWith(new byte?[] { 0x1f, 0x8b }))
return WrapperType.GZIP;
if (extension.Equals("gz", StringComparison.OrdinalIgnoreCase))
return WrapperType.GZIP;
@@ -609,6 +306,9 @@ namespace SabreTools.Serialization.Wrappers
#region InstallShieldArchiveV3
if (magic.StartsWith(new byte?[] { 0x13, 0x5D, 0x65, 0x8C }))
return WrapperType.InstallShieldArchiveV3;
if (extension.Equals("z", StringComparison.OrdinalIgnoreCase))
return WrapperType.InstallShieldArchiveV3;
@@ -616,19 +316,43 @@ namespace SabreTools.Serialization.Wrappers
#region InstallShieldCAB
// No extensions registered for InstallShieldCAB
if (magic.StartsWith(new byte?[] { 0x49, 0x53, 0x63 }))
return WrapperType.InstallShieldCAB;
// Both InstallShieldCAB and MicrosoftCAB share the same extension
#endregion
#region LDSCRYPT
if (magic.StartsWith(new byte?[] { 0x4C, 0x44, 0x53, 0x43, 0x52, 0x59, 0x50, 0x54 }))
return WrapperType.LDSCRYPT;
#endregion
#region MicrosoftCAB
// No extensions registered for InstallShieldCAB
if (magic.StartsWith(new byte?[] { 0x4d, 0x53, 0x43, 0x46 }))
return WrapperType.MicrosoftCAB;
// Both InstallShieldCAB and MicrosoftCAB share the same extension
#endregion
#region MPQ
#region MicrosoftLZ
if (magic.StartsWith(new byte?[] { 0x53, 0x5a, 0x44, 0x44, 0x88, 0xf0, 0x27, 0x33 }))
return WrapperType.MicrosoftLZ;
#endregion
#region MoPaQ
if (magic.StartsWith(new byte?[] { 0x4d, 0x50, 0x51, 0x1a }))
return WrapperType.MoPaQ;
if (magic.StartsWith(new byte?[] { 0x4d, 0x50, 0x51, 0x1b }))
return WrapperType.MoPaQ;
if (extension.Equals("mpq", StringComparison.OrdinalIgnoreCase))
return WrapperType.MoPaQ;
@@ -649,6 +373,9 @@ namespace SabreTools.Serialization.Wrappers
#region NCF
if (magic.StartsWith(new byte?[] { 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00 }))
return WrapperType.NCF;
if (extension.Equals("ncf", StringComparison.OrdinalIgnoreCase))
return WrapperType.NCF;
@@ -676,7 +403,9 @@ namespace SabreTools.Serialization.Wrappers
#region PAK
// No extensions registered for PAK
if (magic.StartsWith(new byte?[] { 0x50, 0x41, 0x43, 0x4B }))
return WrapperType.PAK;
// Both PAK and Quantum share one extension
// if (extension.Equals("pak", StringComparison.OrdinalIgnoreCase))
// return WrapperType.PAK;
@@ -685,6 +414,18 @@ namespace SabreTools.Serialization.Wrappers
#region PFF
// Version 2
if (magic.StartsWith(new byte?[] { 0x14, 0x00, 0x00, 0x00, 0x50, 0x46, 0x46, 0x32 }))
return WrapperType.PFF;
// Version 3
if (magic.StartsWith(new byte?[] { 0x14, 0x00, 0x00, 0x00, 0x50, 0x46, 0x46, 0x33 }))
return WrapperType.PFF;
// Version 4
if (magic.StartsWith(new byte?[] { 0x14, 0x00, 0x00, 0x00, 0x50, 0x46, 0x46, 0x34 }))
return WrapperType.PFF;
if (extension.Equals("pff", StringComparison.OrdinalIgnoreCase))
return WrapperType.PFF;
@@ -692,6 +433,22 @@ namespace SabreTools.Serialization.Wrappers
#region PKZIP
// PKZIP (Unknown)
if (magic.StartsWith(new byte?[] { 0x50, 0x4b, 0x00, 0x00 }))
return WrapperType.PKZIP;
// PKZIP
if (magic.StartsWith(new byte?[] { 0x50, 0x4b, 0x03, 0x04 }))
return WrapperType.PKZIP;
// PKZIP (Empty Archive)
if (magic.StartsWith(new byte?[] { 0x50, 0x4b, 0x05, 0x06 }))
return WrapperType.PKZIP;
// PKZIP (Spanned Archive)
if (magic.StartsWith(new byte?[] { 0x50, 0x4b, 0x07, 0x08 }))
return WrapperType.PKZIP;
// PKZIP
if (extension.Equals("zip", StringComparison.OrdinalIgnoreCase))
return WrapperType.PKZIP;
@@ -780,6 +537,10 @@ namespace SabreTools.Serialization.Wrappers
#region PLJ
// https://www.iana.org/assignments/media-types/audio/vnd.everad.plj
if (magic.StartsWith(new byte?[] { 0xFF, 0x9D, 0x53, 0x4B }))
return WrapperType.PlayJAudioFile;
// https://www.iana.org/assignments/media-types/audio/vnd.everad.plj
if (extension.Equals("plj", StringComparison.OrdinalIgnoreCase))
return WrapperType.PlayJAudioFile;
@@ -788,6 +549,9 @@ namespace SabreTools.Serialization.Wrappers
#region Quantum
if (magic.StartsWith(new byte?[] { 0x44, 0x53 }))
return WrapperType.Quantum;
if (extension.Equals("q", StringComparison.OrdinalIgnoreCase))
return WrapperType.Quantum;
@@ -799,20 +563,56 @@ namespace SabreTools.Serialization.Wrappers
#region RAR
// RAR archive version 1.50 onwards
if (magic.StartsWith(new byte?[] { 0x52, 0x61, 0x72, 0x21, 0x1a, 0x07, 0x00 }))
return WrapperType.RAR;
// RAR archive version 5.0 onwards
if (magic.StartsWith(new byte?[] { 0x52, 0x61, 0x72, 0x21, 0x1a, 0x07, 0x01, 0x00 }))
return WrapperType.RAR;
if (extension.Equals("rar", StringComparison.OrdinalIgnoreCase))
return WrapperType.RAR;
#endregion
#region RealArcade
// RASGI2.0
// Found in the ".rgs files in IA item "Nova_RealArcadeCD_USA".
if (magic.StartsWith(new byte?[] { 0x52, 0x41, 0x53, 0x47, 0x49, 0x32, 0x2E, 0x30 }))
return WrapperType.RealArcadeInstaller;
// XZip2.0
// Found in the ".mez" files in IA item "Nova_RealArcadeCD_USA".
if (magic.StartsWith(new byte?[] { 0x58, 0x5A, 0x69, 0x70, 0x32, 0x2E, 0x30 }))
return WrapperType.RealArcadeMezzanine;
#endregion
#region SevenZip
if (magic.StartsWith(new byte?[] { 0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c }))
return WrapperType.SevenZip;
if (extension.Equals("7z", StringComparison.OrdinalIgnoreCase))
return WrapperType.SevenZip;
#endregion
#region SFFS
// Found in Redump entry 81756, confirmed to be "StarForce Filesystem" by PiD.
if (magic.StartsWith(new byte?[] { 0x53, 0x46, 0x46, 0x53 }))
return WrapperType.SFFS;
#endregion
#region SGA
if (magic.StartsWith(new byte?[] { 0x5F, 0x41, 0x52, 0x43, 0x48, 0x49, 0x56, 0x45 }))
return WrapperType.SGA;
if (extension.Equals("sga", StringComparison.OrdinalIgnoreCase))
return WrapperType.SGA;
@@ -820,6 +620,12 @@ namespace SabreTools.Serialization.Wrappers
#region TapeArchive
if (magic.StartsWith(new byte?[] { 0x75, 0x73, 0x74, 0x61, 0x72, 0x00, 0x30, 0x30 }))
return WrapperType.TapeArchive;
if (magic.StartsWith(new byte?[] { 0x75, 0x73, 0x74, 0x61, 0x72, 0x20, 0x20, 0x00 }))
return WrapperType.TapeArchive;
if (extension.Equals("tar", StringComparison.OrdinalIgnoreCase))
return WrapperType.SevenZip;
@@ -827,6 +633,37 @@ namespace SabreTools.Serialization.Wrappers
#region Textfile
// Not all textfiles can be determined through magic number
// HTML
if (magic.StartsWith(new byte?[] { 0x3c, 0x68, 0x74, 0x6d, 0x6c }))
return WrapperType.Textfile;
// HTML and XML
if (magic.StartsWith(new byte?[] { 0x3c, 0x21, 0x44, 0x4f, 0x43, 0x54, 0x59, 0x50, 0x45 }))
return WrapperType.Textfile;
// InstallShield Compiled Rules
if (magic.StartsWith(new byte?[] { 0x61, 0x4C, 0x75, 0x5A }))
return WrapperType.Textfile;
// Microsoft Office File (old)
if (magic.StartsWith(new byte?[] { 0xd0, 0xcf, 0x11, 0xe0, 0xa1, 0xb1, 0x1a, 0xe1 }))
return WrapperType.Textfile;
// Rich Text File
if (magic.StartsWith(new byte?[] { 0x7b, 0x5c, 0x72, 0x74, 0x66, 0x31 }))
return WrapperType.Textfile;
// Windows Help File
if (magic.StartsWith(new byte?[] { 0x3F, 0x5F, 0x03, 0x00 }))
return WrapperType.Textfile;
// XML
// "<?xml"
if (magic.StartsWith(new byte?[] { 0x3C, 0x3F, 0x78, 0x6D, 0x6C }))
return WrapperType.Textfile;
// "Description in Zip"
if (extension.Equals("diz", StringComparison.OrdinalIgnoreCase))
return WrapperType.Textfile;
@@ -873,20 +710,24 @@ namespace SabreTools.Serialization.Wrappers
if (extension.Equals("xml", StringComparison.OrdinalIgnoreCase))
return WrapperType.Textfile;
#endregion
#region VBSP
// Shares an extension with BSP so it can't be used accurately
// if (extension.Equals("bsp", StringComparison.OrdinalIgnoreCase))
// return WrapperType.VBSP;
if (magic.StartsWith(new byte?[] { 0x56, 0x42, 0x53, 0x50 }))
return WrapperType.VBSP;
// Shares an extension with BSP
if (extension.Equals("bsp", StringComparison.OrdinalIgnoreCase))
return WrapperType.VBSP;
#endregion
#region VPK
if (magic.StartsWith(new byte?[] { 0x34, 0x12, 0xaa, 0x55 }))
return WrapperType.VPK;
// Common extension so this cannot be used accurately
// if (extension.Equals("vpk", StringComparison.OrdinalIgnoreCase))
// return WrapperType.VPK;
@@ -895,6 +736,9 @@ namespace SabreTools.Serialization.Wrappers
#region WAD
if (magic.StartsWith(new byte?[] { 0x57, 0x41, 0x44, 0x33 }))
return WrapperType.WAD;
// Common extension so this cannot be used accurately
// if (extension.Equals("wad", StringComparison.OrdinalIgnoreCase))
// return WrapperType.WAD;
@@ -903,6 +747,9 @@ namespace SabreTools.Serialization.Wrappers
#region XZ
if (magic.StartsWith(new byte?[] { 0xfd, 0x37, 0x7a, 0x58, 0x5a, 0x00 }))
return WrapperType.XZ;
if (extension.Equals("xz", StringComparison.OrdinalIgnoreCase))
return WrapperType.XZ;
@@ -910,6 +757,9 @@ namespace SabreTools.Serialization.Wrappers
#region XZP
if (magic.StartsWith(new byte?[] { 0x70, 0x69, 0x5A, 0x78 }))
return WrapperType.XZP;
if (extension.Equals("xzp", StringComparison.OrdinalIgnoreCase))
return WrapperType.XZP;

View File

@@ -41,6 +41,11 @@ namespace SabreTools.Serialization.Wrappers
/// </summary>
CFB,
/// <summary>
/// MAME Compressed Hunks of Data
/// </summary>
CHD,
/// <summary>
/// CTR Importable Archive
/// </summary>
@@ -136,7 +141,6 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// PKWARE ZIP archive and derivatives
/// </summary>
/// <remarks>Currently has no IWrapper implementation</remarks>
PKZIP,
/// <summary>

View File

@@ -22,7 +22,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
var publisherIdentifier = this.Model.PublisherIdentifier;
var publisherIdentifier = Model.PublisherIdentifier;
if (string.IsNullOrEmpty(publisherIdentifier))
return "Unknown";
@@ -40,7 +40,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
var regionIdentifier = this.Model.RegionIdentifier;
var regionIdentifier = Model.RegionIdentifier;
if (Regions.ContainsKey(regionIdentifier))
return Regions[regionIdentifier];
@@ -51,12 +51,12 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// Get the human-readable serial string
/// </summary>
public string Serial => $"{this.Model.PublisherIdentifier}-{this.Model.GameID}";
public string Serial => $"{Model.PublisherIdentifier}-{Model.GameID}";
/// <summary>
/// Get the human-readable version string
/// </summary>
public string Version => $"1.{this.Model.VersionNumber}";
public string Version => $"1.{Model.VersionNumber}";
#endregion

View File

@@ -22,7 +22,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
char mediaSubtype = this.Model.MediaSubtypeIdentifier;
char mediaSubtype = Model.MediaSubtypeIdentifier;
if (MediaSubtypes.ContainsKey(mediaSubtype))
return MediaSubtypes[mediaSubtype];
@@ -37,7 +37,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
var publisherIdentifier = this.Model.PublisherIdentifier;
var publisherIdentifier = Model.PublisherIdentifier;
if (string.IsNullOrEmpty(publisherIdentifier))
return "Unknown";
@@ -55,7 +55,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
var regionIdentifier = this.Model.RegionIdentifier;
var regionIdentifier = Model.RegionIdentifier;
if (Regions.ContainsKey(regionIdentifier))
return Regions[regionIdentifier];
@@ -66,12 +66,12 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// Get the human-readable serial string
/// </summary>
public string Serial => $"{this.Model.PublisherIdentifier}-{this.Model.PlatformIdentifier}{this.Model.GameID}";
public string Serial => $"{Model.PublisherIdentifier}-{Model.PlatformIdentifier}{Model.GameID}";
/// <summary>
/// Get the human-readable version string
/// </summary>
public string Version => $"1.{this.Model.SKU}";
public string Version => $"1.{Model.SKU}";
#endregion

View File

@@ -17,13 +17,11 @@ namespace Test
if (options == null)
{
Options.DisplayHelp();
Console.WriteLine("Press enter to close the program...");
Console.ReadLine();
return;
}
// Loop through the input paths
foreach (string inputPath in args)
foreach (string inputPath in options.InputPaths)
{
#if NETFRAMEWORK
PrintPathInfo(inputPath, false, options.Debug);
@@ -81,12 +79,8 @@ namespace Test
stream.Seek(0, SeekOrigin.Begin);
// Get the file type
WrapperType ft = WrapperFactory.GetFileType(magic ?? []);
if (ft == WrapperType.UNKNOWN)
{
string extension = Path.GetExtension(file).TrimStart('.');
ft = WrapperFactory.GetFileType(extension);
}
string extension = Path.GetExtension(file).TrimStart('.');
WrapperType ft = WrapperFactory.GetFileType(magic ?? [], extension);
// Print out the file format
Console.WriteLine($"File format found: {ft}");

View File

@@ -2,7 +2,6 @@
<PropertyGroup>
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
<OutputType>Exe</OutputType>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
@@ -12,6 +11,20 @@
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.Serialization\SabreTools.Serialization.csproj" />
</ItemGroup>

View File

@@ -1,4 +1,4 @@
#! /bin/bash
#!/bin/bash
# This batch file assumes the following:
# - .NET 8.0 (or newer) SDK is installed and in PATH
@@ -12,9 +12,8 @@
USE_ALL=false
NO_BUILD=false
NO_ARCHIVE=false
while getopts "uba" OPTION
do
case $OPTION in
while getopts "uba" OPTION; do
case $OPTION in
u)
USE_ALL=true
;;
@@ -35,27 +34,32 @@ done
BUILD_FOLDER=$PWD
# Set the current commit hash
COMMIT=`git log --pretty=%H -1`
COMMIT=$(git log --pretty=%H -1)
# Output the selected options
echo "Selected Options:"
echo " Use all frameworks (-u) $USE_ALL"
echo " No build (-b) $NO_BUILD"
echo " No archive (-a) $NO_ARCHIVE"
echo " "
# Create the build matrix arrays
FRAMEWORKS=("net8.0")
RUNTIMES=("win-x86" "win-x64" "linux-x64" "osx-x64")
RUNTIMES=("win-x86" "win-x64" "win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Use expanded lists, if requested
if [ $USE_ALL = true ]
then
if [ $USE_ALL = true ]; then
FRAMEWORKS=("net20" "net35" "net40" "net452" "net462" "net472" "net48" "netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0")
RUNTIMES=("win-x86" "win-x64" "win-arm64" "linux-x64" "linux-arm64" "osx-x64")
fi
# Create the filter arrays
SINGLE_FILE_CAPABLE=("net5.0" "net6.0" "net7.0" "net8.0")
VALID_APPLE_FRAMEWORKS=("net6.0" "net7.0" "net8.0")
VALID_CROSS_PLATFORM_FRAMEWORKS=("netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0")
VALID_CROSS_PLATFORM_RUNTIMES=("win-arm64" "linux-x64" "linux-arm64" "osx-x64")
VALID_CROSS_PLATFORM_RUNTIMES=("win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Only build if requested
if [ $NO_BUILD = false ]
then
if [ $NO_BUILD = false ]; then
# Restore Nuget packages for all builds
echo "Restoring Nuget packages"
dotnet restore
@@ -63,24 +67,40 @@ then
# Create Nuget Package
dotnet pack SabreTools.Serialization/SabreTools.Serialization.csproj --output $BUILD_FOLDER
# Build Test
for FRAMEWORK in "${FRAMEWORKS[@]}"
do
for RUNTIME in "${RUNTIMES[@]}"
do
# Build Program
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Build Program - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ] && [ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]
then
continue
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only .NET 5 and above can publish to a single file
if [[ $(echo ${SINGLE_FILE_CAPABLE[@]} | fgrep -w $FRAMEWORK) ]]
then
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
if [[ $(echo ${SINGLE_FILE_CAPABLE[@]} | fgrep -w $FRAMEWORK) ]]; then
# Only include Debug if building all
if [ $USE_ALL = true ]; then
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
fi
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
else
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
# Only include Debug if building all
if [ $USE_ALL = true ]; then
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
fi
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
fi
done
@@ -88,21 +108,34 @@ then
fi
# Only create archives if requested
if [ $NO_ARCHIVE = false ]
then
if [ $NO_ARCHIVE = false ]; then
# Create Test archives
for FRAMEWORK in "${FRAMEWORKS[@]}"
do
for RUNTIME in "${RUNTIMES[@]}"
do
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Archive Program - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ] && [ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]
then
continue
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
cd $BUILD_FOLDER/Test/bin/Debug/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_debug.zip .
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only include Debug if building all
if [ $USE_ALL = true ]; then
cd $BUILD_FOLDER/Test/bin/Debug/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_debug.zip .
fi
cd $BUILD_FOLDER/Test/bin/Release/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_release.zip .
done
@@ -110,4 +143,4 @@ then
# Reset the directory
cd $BUILD_FOLDER
fi
fi

View File

@@ -27,25 +27,30 @@ $BUILD_FOLDER = $PSScriptRoot
# Set the current commit hash
$COMMIT = git log --pretty=format:"%H" -1
# Output the selected options
Write-Host "Selected Options:"
Write-Host " Use all frameworks (-UseAll) $USE_ALL"
Write-Host " No build (-NoBuild) $NO_BUILD"
Write-Host " No archive (-NoArchive) $NO_ARCHIVE"
Write-Host " "
# Create the build matrix arrays
$FRAMEWORKS = @('net8.0')
$RUNTIMES = @('win-x86', 'win-x64', 'linux-x64', 'osx-x64')
$RUNTIMES = @('win-x86', 'win-x64', 'win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Use expanded lists, if requested
if ($USE_ALL.IsPresent)
{
if ($USE_ALL.IsPresent) {
$FRAMEWORKS = @('net20', 'net35', 'net40', 'net452', 'net462', 'net472', 'net48', 'netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0')
$RUNTIMES = @('win-x86', 'win-x64', 'win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64')
}
# Create the filter arrays
$SINGLE_FILE_CAPABLE = @('net5.0', 'net6.0', 'net7.0', 'net8.0')
$VALID_APPLE_FRAMEWORKS = @('net6.0', 'net7.0', 'net8.0')
$VALID_CROSS_PLATFORM_FRAMEWORKS = @('netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0')
$VALID_CROSS_PLATFORM_RUNTIMES = @('win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64')
$VALID_CROSS_PLATFORM_RUNTIMES = @('win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Only build if requested
if (!$NO_BUILD.IsPresent)
{
if (!$NO_BUILD.IsPresent) {
# Restore Nuget packages for all builds
Write-Host "Restoring Nuget packages"
dotnet restore
@@ -54,25 +59,36 @@ if (!$NO_BUILD.IsPresent)
dotnet pack SabreTools.Serialization\SabreTools.Serialization.csproj --output $BUILD_FOLDER
# Build Test
foreach ($FRAMEWORK in $FRAMEWORKS)
{
foreach ($RUNTIME in $RUNTIMES)
{
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Build Program - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME)
{
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only .NET 5 and above can publish to a single file
if ($SINGLE_FILE_CAPABLE -contains $FRAMEWORK)
{
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
if ($SINGLE_FILE_CAPABLE -contains $FRAMEWORK) {
# Only include Debug if building all
if ($USE_ALL.IsPresent) {
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
}
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
}
else
{
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
else {
# Only include Debug if building all
if ($USE_ALL.IsPresent) {
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
}
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
}
}
@@ -80,21 +96,31 @@ if (!$NO_BUILD.IsPresent)
}
# Only create archives if requested
if (!$NO_ARCHIVE.IsPresent)
{
if (!$NO_ARCHIVE.IsPresent) {
# Create Test archives
foreach ($FRAMEWORK in $FRAMEWORKS)
{
foreach ($RUNTIME in $RUNTIMES)
{
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Archive Program - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME)
{
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
Set-Location -Path $BUILD_FOLDER\Test\bin\Debug\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_debug.zip *
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only include Debug if building all
if ($USE_ALL.IsPresent) {
Set-Location -Path $BUILD_FOLDER\Test\bin\Debug\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_debug.zip *
}
Set-Location -Path $BUILD_FOLDER\Test\bin\Release\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_release.zip *
}