Compare commits

...

33 Commits
1.6.5 ... 1.6.9

Author SHA1 Message Date
Matt Nadareski
fe95b894d7 Bump version 2024-10-31 15:23:59 -04:00
Matt Nadareski
38a2712a8f Fake readable compressor names 2024-10-31 13:51:29 -04:00
Matt Nadareski
d1ea091574 Remove "press enter" on failure 2024-10-31 13:49:08 -04:00
Matt Nadareski
6bc812fc2f Fix formatting for CHD printing 2024-10-31 13:38:42 -04:00
Matt Nadareski
61b89fbd72 Fix typo in N3DS 2024-10-31 12:10:53 -04:00
Matt Nadareski
a2c065bdf2 Add CHD to factory 2024-10-31 12:09:36 -04:00
Matt Nadareski
88479f674b Add CHD printer 2024-10-31 12:06:25 -04:00
Matt Nadareski
5edbacde74 Add CHD printer 2024-10-31 12:03:34 -04:00
Matt Nadareski
67fc51224b Fix lack of ValueTuple in switch 2024-10-31 11:51:14 -04:00
Matt Nadareski
101f3294b4 Add CHD wrapper 2024-10-31 11:47:58 -04:00
Matt Nadareski
6c5622f732 Add CHD header deserialization 2024-10-31 11:40:50 -04:00
Matt Nadareski
f2a6fe1445 Update Models to 1.4.11 2024-10-31 11:34:45 -04:00
Matt Nadareski
b0b593443f Update packages 2024-10-24 17:27:55 -04:00
Matt Nadareski
9b05185add Fix old .NET compatibility 2024-10-14 00:20:02 -04:00
Matt Nadareski
17316da536 Port numerous extensions from NDecrypt 2024-10-14 00:15:14 -04:00
Matt Nadareski
f3ca4dd989 Port logic from UnshieldSharp 2024-10-03 11:14:41 -04:00
Matt Nadareski
e2b7bdac8c Temporary fix for IS-CAB file group parsing 2024-10-03 02:51:37 -04:00
Matt Nadareski
f86f6dc438 Bump version 2024-10-01 14:08:18 -04:00
Matt Nadareski
2bac0ed505 Update packages 2024-10-01 14:06:53 -04:00
Matt Nadareski
ae4078bb7f Fix inconsistencies in build and publish 2024-08-08 20:17:42 -04:00
Matt Nadareski
afaffbd9a2 Fix 3DS serialization and printing 2024-08-08 19:46:05 -04:00
TheRogueArchivist
b878e59e2e Fix typo in PortableExecutable Printer (#11) 2024-07-12 11:08:50 -04:00
Matt Nadareski
4bb3f625dd Make PE debug table parsing safer 2024-06-20 11:23:28 -04:00
Matt Nadareski
b7978cafa5 Bump version 2024-06-13 11:12:40 -04:00
Matt Nadareski
17f376c76f Remove all instances of this. 2024-06-05 22:49:27 -04:00
Matt Nadareski
2774fdf158 Clean up enumerables and namespace use 2024-06-05 22:48:42 -04:00
Matt Nadareski
11081efcb0 Make PE header reading even saferer 2024-06-05 22:22:22 -04:00
TheRogueArchivist
1b412c3027 Add header length safeguards to PortableExecutable wrapper (#9) 2024-06-05 22:19:35 -04:00
Matt Nadareski
73ec66e627 Fix ISv3 deserialization 2024-06-03 11:55:12 -04:00
Matt Nadareski
4ae4cd80b1 Bump version 2024-05-30 21:07:04 -04:00
Matt Nadareski
6eb27c66fc Merge pull request #8 from TheRogueArchivist/streamdatalock
Add lock for reading data from stream
2024-05-30 12:30:33 -04:00
TheRogueArchivist
f96fd17fd3 Add lock for reading data from stream 2024-05-27 15:36:04 -06:00
Matt Nadareski
c255a2494d Fix IS-CAB file group name parsing 2024-05-18 21:27:09 -04:00
33 changed files with 1737 additions and 354 deletions

328
.gitignore vendored
View File

@@ -1,15 +1,7 @@
*.swp
*.*~
project.lock.json
.DS_Store
*.pyc
nupkg/
# Visual Studio Code
.vscode
# Rider
.idea
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.suo
@@ -17,6 +9,9 @@ nupkg/
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
@@ -24,15 +19,312 @@ nupkg/
[Rr]eleases/
x64/
x86/
build/
bld/
[Bb]in/
[Oo]bj/
[Oo]ut/
msbuild.log
msbuild.err
msbuild.wrn
[Ll]og/
# Visual Studio 2015
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
**/Properties/launchSettings.json
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# JetBrains Rider
.idea/
*.sln.iml
# CodeRush
.cr/
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/

28
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,28 @@
{
// Use IntelliSense to find out which attributes exist for C# debugging
// Use hover for the description of the existing attributes
// For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md
"version": "0.2.0",
"configurations": [
{
"name": ".NET Core Launch (Test)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
// If you have changed target frameworks, make sure to update the program path.
"program": "${workspaceFolder}/Test/bin/Debug/net8.0/Test.dll",
"args": [],
"cwd": "${workspaceFolder}",
// For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console
"console": "internalConsole",
"stopAtEntry": false,
"justMyCode": false
},
{
"name": ".NET Core Attach",
"type": "coreclr",
"request": "attach",
"processId": "${command:pickProcess}"
}
]
}

24
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,24 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build",
"command": "dotnet",
"type": "shell",
"args": [
"build",
// Ask dotnet build to generate full paths for file names.
"/property:GenerateFullPaths=true",
// Do not generate summary otherwise it leads to duplicate errors in Problems panel
"/consoleloggerparameters:NoSummary"
],
"group": "build",
"presentation": {
"reveal": "silent"
},
"problemMatcher": "$msCompile"
}
]
}

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net6.0;net8.0</TargetFrameworks>
@@ -26,10 +26,10 @@
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.9.0" />
<PackageReference Include="SabreTools.Models" Version="1.4.8" />
<PackageReference Include="xunit" Version="2.8.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.0">
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageReference Include="SabreTools.Models" Version="1.4.11" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>

View File

@@ -0,0 +1,253 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.CHD;
namespace SabreTools.Serialization.Deserializers
{
// TODO: Expand this to full CHD files eventually
public class CHD : BaseBinaryDeserializer<Header>
{
/// <inheritdoc/>
public override Header? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Determine the header version
uint version = GetVersion(data);
// Read and return the current CHD
return version switch
{
1 => ParseHeaderV1(data),
2 => ParseHeaderV2(data),
3 => ParseHeaderV3(data),
4 => ParseHeaderV4(data),
5 => ParseHeaderV5(data),
_ => null,
};
}
/// <summary>
/// Get the matching CHD version, if possible
/// </summary>
/// <returns>Matching version, 0 if none</returns>
private static uint GetVersion(Stream data)
{
// Read the header values
byte[] tagBytes = data.ReadBytes(8);
string tag = Encoding.ASCII.GetString(tagBytes);
uint length = data.ReadUInt32BigEndian();
uint version = data.ReadUInt32BigEndian();
// Seek back to start
data.SeekIfPossible();
// Check the signature
if (!string.Equals(tag, Constants.SignatureString, StringComparison.Ordinal))
return 0;
// Match the version to header length
#if NET472_OR_GREATER || NETCOREAPP
return (version, length) switch
{
(1, Constants.HeaderV1Size) => version,
(2, Constants.HeaderV2Size) => version,
(3, Constants.HeaderV3Size) => version,
(4, Constants.HeaderV4Size) => version,
(5, Constants.HeaderV5Size) => version,
_ => 0,
};
#else
return version switch
{
1 => length == Constants.HeaderV1Size ? version : 0,
2 => length == Constants.HeaderV2Size ? version : 0,
3 => length == Constants.HeaderV3Size ? version : 0,
4 => length == Constants.HeaderV4Size ? version : 0,
5 => length == Constants.HeaderV5Size ? version : 0,
_ => 0,
};
#endif
}
/// <summary>
/// Parse a Stream into a V1 header
/// </summary>
private static HeaderV1? ParseHeaderV1(Stream data)
{
var header = new HeaderV1();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV1Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
header.HunkSize = data.ReadUInt32BigEndian();
header.TotalHunks = data.ReadUInt32BigEndian();
header.Cylinders = data.ReadUInt32BigEndian();
header.Heads = data.ReadUInt32BigEndian();
header.Sectors = data.ReadUInt32BigEndian();
header.MD5 = data.ReadBytes(16);
header.ParentMD5 = data.ReadBytes(16);
return header;
}
/// <summary>
/// Parse a Stream into a V2 header
/// </summary>
private static HeaderV2? ParseHeaderV2(Stream data)
{
var header = new HeaderV2();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV2Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
header.HunkSize = data.ReadUInt32BigEndian();
header.TotalHunks = data.ReadUInt32BigEndian();
header.Cylinders = data.ReadUInt32BigEndian();
header.Heads = data.ReadUInt32BigEndian();
header.Sectors = data.ReadUInt32BigEndian();
header.MD5 = data.ReadBytes(16);
header.ParentMD5 = data.ReadBytes(16);
header.BytesPerSector = data.ReadUInt32BigEndian();
return header;
}
/// <summary>
/// Parse a Stream into a V3 header
/// </summary>
private static HeaderV3? ParseHeaderV3(Stream data)
{
var header = new HeaderV3();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV3Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB_PLUS)
return null;
header.TotalHunks = data.ReadUInt32BigEndian();
header.LogicalBytes = data.ReadUInt64BigEndian();
header.MetaOffset = data.ReadUInt64BigEndian();
header.MD5 = data.ReadBytes(16);
header.ParentMD5 = data.ReadBytes(16);
header.HunkBytes = data.ReadUInt32BigEndian();
header.SHA1 = data.ReadBytes(20);
header.ParentSHA1 = data.ReadBytes(20);
return header;
}
/// <summary>
/// Parse a Stream into a V4 header
/// </summary>
private static HeaderV4? ParseHeaderV4(Stream data)
{
var header = new HeaderV4();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV4Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_AV)
return null;
header.TotalHunks = data.ReadUInt32BigEndian();
header.LogicalBytes = data.ReadUInt64BigEndian();
header.MetaOffset = data.ReadUInt64BigEndian();
header.HunkBytes = data.ReadUInt32BigEndian();
header.SHA1 = data.ReadBytes(20);
header.ParentSHA1 = data.ReadBytes(20);
header.RawSHA1 = data.ReadBytes(20);
return header;
}
/// <summary>
/// Parse a Stream into a V5 header
/// </summary>
private static HeaderV5? ParseHeaderV5(Stream data)
{
var header = new HeaderV5();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV5Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Compressors = new uint[4];
for (int i = 0; i < header.Compressors.Length; i++)
{
header.Compressors[i] = data.ReadUInt32BigEndian();
}
header.LogicalBytes = data.ReadUInt64BigEndian();
header.MapOffset = data.ReadUInt64BigEndian();
header.MetaOffset = data.ReadUInt64BigEndian();
header.HunkBytes = data.ReadUInt32BigEndian();
header.UnitBytes = data.ReadUInt32BigEndian();
header.RawSHA1 = data.ReadBytes(20);
header.SHA1 = data.ReadBytes(20);
header.ParentSHA1 = data.ReadBytes(20);
return header;
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.InstallShieldArchiveV3;
@@ -114,7 +115,17 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled directory on success, null on error</returns>
public static Models.InstallShieldArchiveV3.Directory? ParseDirectory(Stream data)
{
return data.ReadType<Models.InstallShieldArchiveV3.Directory>();
var directory = new Models.InstallShieldArchiveV3.Directory();
directory.FileCount = data.ReadUInt16();
directory.ChunkSize = data.ReadUInt16();
// TODO: Is there any equivilent automatic type for UInt16-prefixed ANSI?
ushort nameLength = data.ReadUInt16();
byte[] nameBytes = data.ReadBytes(nameLength);
directory.Name = Encoding.ASCII.GetString(nameBytes);
return directory;
}
/// <summary>

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using SabreTools.IO.Extensions;
@@ -447,14 +448,9 @@ namespace SabreTools.Serialization.Deserializers
var fileGroup = new FileGroup();
fileGroup.NameOffset = data.ReadUInt32();
fileGroup.ExpandedSize = data.ReadUInt32();
fileGroup.Reserved0 = data.ReadBytes(4);
fileGroup.CompressedSize = data.ReadUInt32();
fileGroup.Reserved1 = data.ReadBytes(4);
fileGroup.Reserved2 = data.ReadBytes(2);
fileGroup.Attribute1 = data.ReadUInt16();
fileGroup.Attribute2 = data.ReadUInt16();
fileGroup.Attributes = (FileGroupAttributes)data.ReadUInt16();
// TODO: Figure out what data lives in this area for V5 and below
if (majorVersion <= 5)
@@ -462,19 +458,19 @@ namespace SabreTools.Serialization.Deserializers
fileGroup.FirstFile = data.ReadUInt32();
fileGroup.LastFile = data.ReadUInt32();
fileGroup.UnknownOffset = data.ReadUInt32();
fileGroup.Var4Offset = data.ReadUInt32();
fileGroup.Var1Offset = data.ReadUInt32();
fileGroup.UnknownStringOffset = data.ReadUInt32();
fileGroup.OperatingSystemOffset = data.ReadUInt32();
fileGroup.LanguageOffset = data.ReadUInt32();
fileGroup.HTTPLocationOffset = data.ReadUInt32();
fileGroup.FTPLocationOffset = data.ReadUInt32();
fileGroup.MiscOffset = data.ReadUInt32();
fileGroup.Var2Offset = data.ReadUInt32();
fileGroup.TargetDirectoryOffset = data.ReadUInt32();
fileGroup.Reserved3 = data.ReadBytes(2);
fileGroup.Reserved4 = data.ReadBytes(2);
fileGroup.Reserved5 = data.ReadBytes(2);
fileGroup.Reserved6 = data.ReadBytes(2);
fileGroup.Reserved7 = data.ReadBytes(2);
fileGroup.OverwriteFlags = (FileGroupFlags)data.ReadUInt32();
fileGroup.Reserved = new uint[4];
for (int i = 0; i < fileGroup.Reserved.Length; i++)
{
fileGroup.Reserved[i] = data.ReadUInt32();
}
// Cache the current position
long currentPosition = data.Position;
@@ -489,7 +485,7 @@ namespace SabreTools.Serialization.Deserializers
if (majorVersion >= 17)
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
else
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
fileGroup.Name = data.ReadNullTerminatedAnsiString();
}
// Seek back to the correct offset
@@ -512,15 +508,19 @@ namespace SabreTools.Serialization.Deserializers
component.IdentifierOffset = data.ReadUInt32();
component.DescriptorOffset = data.ReadUInt32();
component.DisplayNameOffset = data.ReadUInt32();
component.Reserved0 = data.ReadUInt16();
component.ReservedOffset0 = data.ReadUInt32();
component.ReservedOffset1 = data.ReadUInt32();
component.Status = (ComponentStatus)data.ReadUInt16();
component.PasswordOffset = data.ReadUInt32();
component.MiscOffset = data.ReadUInt32();
component.ComponentIndex = data.ReadUInt16();
component.NameOffset = data.ReadUInt32();
component.ReservedOffset2 = data.ReadUInt32();
component.ReservedOffset3 = data.ReadUInt32();
component.ReservedOffset4 = data.ReadUInt32();
component.Reserved1 = data.ReadBytes(32);
component.CDRomFolderOffset = data.ReadUInt32();
component.HTTPLocationOffset = data.ReadUInt32();
component.FTPLocationOffset = data.ReadUInt32();
component.Guid = new Guid[2];
for (int i = 0; i < component.Guid.Length; i++)
{
component.Guid[i] = data.ReadGuid();
}
component.CLSIDOffset = data.ReadUInt32();
component.Reserved2 = data.ReadBytes(28);
component.Reserved3 = data.ReadBytes(majorVersion <= 5 ? 2 : 1);
@@ -533,10 +533,10 @@ namespace SabreTools.Serialization.Deserializers
component.SubComponentsCount = data.ReadUInt16();
component.SubComponentsOffset = data.ReadUInt32();
component.NextComponentOffset = data.ReadUInt32();
component.ReservedOffset5 = data.ReadUInt32();
component.ReservedOffset6 = data.ReadUInt32();
component.ReservedOffset7 = data.ReadUInt32();
component.ReservedOffset8 = data.ReadUInt32();
component.OnInstallingOffset = data.ReadUInt32();
component.OnInstalledOffset = data.ReadUInt32();
component.OnUninstallingOffset = data.ReadUInt32();
component.OnUninstalledOffset = data.ReadUInt32();
// Cache the current position
long currentPosition = data.Position;

View File

@@ -259,7 +259,18 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled card info header on success, null on error</returns>
public static CardInfoHeader? ParseCardInfoHeader(Stream data)
{
return data.ReadType<CardInfoHeader>();
// TODO: Use marshalling here instead of building
var header = new CardInfoHeader();
header.WritableAddressMediaUnits = data.ReadUInt32();
header.CardInfoBitmask = data.ReadUInt32();
header.Reserved3 = data.ReadBytes(0x108);
header.TitleVersion = data.ReadUInt16();
header.CardRevision = data.ReadUInt16();
header.Reserved4 = data.ReadBytes(0xCD6);
header.InitialData = ParseInitialData(data);
return header;
}
/// <summary>
@@ -272,6 +283,26 @@ namespace SabreTools.Serialization.Deserializers
return data.ReadType<DevelopmentCardInfoHeader>();
}
/// <summary>
/// Parse a Stream into initial data
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled initial data on success, null on error</returns>
public static InitialData? ParseInitialData(Stream data)
{
// TODO: Use marshalling here instead of building
var id = new InitialData();
id.CardSeedKeyY = data.ReadBytes(0x10);
id.EncryptedCardSeed = data.ReadBytes(0x10);
id.CardSeedAESMAC = data.ReadBytes(0x10);
id.CardSeedNonce = data.ReadBytes(0x0C);
id.Reserved = data.ReadBytes(0xC4);
id.BackupHeader = ParseNCCHHeader(data, skipSignature: true);
return id;
}
/// <summary>
/// Parse a Stream into an NCCH header
/// </summary>

View File

@@ -39,6 +39,7 @@ namespace SabreTools.Serialization
Wrapper.BFPK item => item.PrettyPrint(),
Wrapper.BSP item => item.PrettyPrint(),
Wrapper.CFB item => item.PrettyPrint(),
Wrapper.CHD item => item.PrettyPrint(),
Wrapper.CIA item => item.PrettyPrint(),
Wrapper.GCF item => item.PrettyPrint(),
Wrapper.InstallShieldCabinet item => item.PrettyPrint(),
@@ -83,6 +84,7 @@ namespace SabreTools.Serialization
Wrapper.BFPK item => item.ExportJSON(),
Wrapper.BSP item => item.ExportJSON(),
Wrapper.CFB item => item.ExportJSON(),
Wrapper.CHD item => item.ExportJSON(),
Wrapper.CIA item => item.ExportJSON(),
Wrapper.GCF item => item.ExportJSON(),
Wrapper.InstallShieldCabinet item => item.ExportJSON(),
@@ -167,6 +169,16 @@ namespace SabreTools.Serialization
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.CHD item)
{
var builder = new StringBuilder();
CHD.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>

View File

@@ -0,0 +1,161 @@
using System;
using System.Collections.Generic;
using System.Text;
using SabreTools.Models.CHD;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class CHD : IPrinter<Header>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, Header model)
=> Print(builder, model);
public static void Print(StringBuilder builder, Header header)
{
builder.AppendLine("CHD Header Information:");
builder.AppendLine("-------------------------");
if (header == null)
{
builder.AppendLine("No header");
builder.AppendLine();
return;
}
switch (header)
{
case HeaderV1 v1:
Print(builder, v1);
break;
case HeaderV2 v2:
Print(builder, v2);
break;
case HeaderV3 v3:
Print(builder, v3);
break;
case HeaderV4 v4:
Print(builder, v4);
break;
case HeaderV5 v5:
Print(builder, v5);
break;
default:
builder.AppendLine("Unrecognized header type");
builder.AppendLine();
break;
}
}
private static void Print(StringBuilder builder, HeaderV1 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.HunkSize, $"Hunk size");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.Cylinders, $"Cylinders");
builder.AppendLine(header.Heads, $"Heads");
builder.AppendLine(header.Sectors, $"Sectors");
builder.AppendLine(header.MD5, $"MD5");
builder.AppendLine(header.ParentMD5, $"Parent MD5");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV2 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.HunkSize, $"Hunk size");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.Cylinders, $"Cylinders");
builder.AppendLine(header.Heads, $"Heads");
builder.AppendLine(header.Sectors, $"Sectors");
builder.AppendLine(header.MD5, $"MD5");
builder.AppendLine(header.ParentMD5, $"Parent MD5");
builder.AppendLine(header.BytesPerSector, $"Bytes per sector");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV3 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
builder.AppendLine(header.MetaOffset, $"Meta offset");
builder.AppendLine(header.MD5, $"MD5");
builder.AppendLine(header.ParentMD5, $"Parent MD5");
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
builder.AppendLine(header.SHA1, $"SHA-1");
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV4 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
builder.AppendLine(header.MetaOffset, $"Meta offset");
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
builder.AppendLine(header.SHA1, $"SHA-1");
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
builder.AppendLine(header.RawSHA1, $"Raw SHA-1");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV5 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
// TODO: Remove this hack when actual compressor names are supported
// builder.AppendLine(header.Compressors, $"Compressors");
string compressorsLine = "Compressors: ";
if (header.Compressors == null)
{
compressorsLine += "[NULL]";
}
else
{
var compressors = new List<string>();
for (int i = 0; i < header.Compressors.Length; i++)
{
uint compressor = header.Compressors[i];
byte[] compressorBytes = BitConverter.GetBytes(compressor);
Array.Reverse(compressorBytes);
string compressorString = Encoding.ASCII.GetString(compressorBytes);
compressors.Add(compressorString);
}
compressorsLine += string.Join(", ", [.. compressors]);
}
builder.AppendLine(compressorsLine);
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
builder.AppendLine(header.MapOffset, $"Map offset");
builder.AppendLine(header.MetaOffset, $"Meta offset");
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
builder.AppendLine(header.UnitBytes, $"Unit bytes");
builder.AppendLine(header.RawSHA1, $"Raw SHA-1");
builder.AppendLine(header.SHA1, $"SHA-1");
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
builder.AppendLine();
}
}
}

View File

@@ -315,27 +315,19 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(fileGroup.NameOffset, " Name offset");
builder.AppendLine(fileGroup.Name, " Name");
builder.AppendLine(fileGroup.ExpandedSize, " Expanded size");
builder.AppendLine(fileGroup.Reserved0, " Reserved 0");
builder.AppendLine(fileGroup.CompressedSize, " Compressed size");
builder.AppendLine(fileGroup.Reserved1, " Reserved 1");
builder.AppendLine(fileGroup.Reserved2, " Reserved 2");
builder.AppendLine(fileGroup.Attribute1, " Attribute 1");
builder.AppendLine(fileGroup.Attribute2, " Attribute 2");
builder.AppendLine($" Attributes: {fileGroup.Attributes} (0x{fileGroup.Attributes:X})");
builder.AppendLine(fileGroup.FirstFile, " First file");
builder.AppendLine(fileGroup.LastFile, " Last file");
builder.AppendLine(fileGroup.UnknownOffset, " Unknown offset");
builder.AppendLine(fileGroup.Var4Offset, " Var 4 offset");
builder.AppendLine(fileGroup.Var1Offset, " Var 1 offset");
builder.AppendLine(fileGroup.UnknownStringOffset, " Unknown string offset");
builder.AppendLine(fileGroup.OperatingSystemOffset, " Operating system offset");
builder.AppendLine(fileGroup.LanguageOffset, " Language offset");
builder.AppendLine(fileGroup.HTTPLocationOffset, " HTTP location offset");
builder.AppendLine(fileGroup.FTPLocationOffset, " FTP location offset");
builder.AppendLine(fileGroup.MiscOffset, " Misc. offset");
builder.AppendLine(fileGroup.Var2Offset, " Var 2 offset");
builder.AppendLine(fileGroup.TargetDirectoryOffset, " Target directory offset");
builder.AppendLine(fileGroup.Reserved3, " Reserved 3");
builder.AppendLine(fileGroup.Reserved4, " Reserved 4");
builder.AppendLine(fileGroup.Reserved5, " Reserved 5");
builder.AppendLine(fileGroup.Reserved6, " Reserved 6");
builder.AppendLine(fileGroup.Reserved7, " Reserved 7");
builder.AppendLine($" Overwrite flags: {fileGroup.OverwriteFlags} (0x{fileGroup.OverwriteFlags:X})");
builder.AppendLine(fileGroup.Reserved, " Reserved");
}
builder.AppendLine();
}
@@ -366,16 +358,16 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(component.DescriptorOffset, " Descriptor offset");
builder.AppendLine(component.DisplayNameOffset, " Display name offset");
builder.AppendLine(component.DisplayName, " Display name");
builder.AppendLine(component.Reserved0, " Reserved 0");
builder.AppendLine(component.ReservedOffset0, " Reserved offset 0");
builder.AppendLine(component.ReservedOffset1, " Reserved offset 1");
builder.AppendLine($" Status: {component.Status} (0x{component.Status:X})");
builder.AppendLine(component.PasswordOffset, " Password offset");
builder.AppendLine(component.MiscOffset, " Misc. offset");
builder.AppendLine(component.ComponentIndex, " Component index");
builder.AppendLine(component.NameOffset, " Name offset");
builder.AppendLine(component.Name, " Name");
builder.AppendLine(component.ReservedOffset2, " Reserved offset 2");
builder.AppendLine(component.ReservedOffset3, " Reserved offset 3");
builder.AppendLine(component.ReservedOffset4, " Reserved offset 4");
builder.AppendLine(component.Reserved1, " Reserved 1");
builder.AppendLine(component.CDRomFolderOffset, " CD-ROM folder offset");
builder.AppendLine(component.HTTPLocationOffset, " HTTP location offset");
builder.AppendLine(component.FTPLocationOffset, " FTP location offset");
builder.AppendLine(component.Guid, " GUIDs");
builder.AppendLine(component.CLSIDOffset, " CLSID offset");
builder.AppendLine(component.CLSID, " CLSID");
builder.AppendLine(component.Reserved2, " Reserved 2");
@@ -406,10 +398,10 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(component.SubComponentsCount, " Sub-components count");
builder.AppendLine(component.SubComponentsOffset, " Sub-components offset");
builder.AppendLine(component.NextComponentOffset, " Next component offset");
builder.AppendLine(component.ReservedOffset5, " Reserved offset 5");
builder.AppendLine(component.ReservedOffset6, " Reserved offset 6");
builder.AppendLine(component.ReservedOffset7, " Reserved offset 7");
builder.AppendLine(component.ReservedOffset8, " Reserved offset 8");
builder.AppendLine(component.OnInstallingOffset, " On installing offset");
builder.AppendLine(component.OnInstalledOffset, " On installed offset");
builder.AppendLine(component.OnUninstallingOffset, " On uninstalling offset");
builder.AppendLine(component.OnUninstalledOffset, " On uninstalled offset");
}
builder.AppendLine();
}

View File

@@ -131,6 +131,8 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(header.CVerVersionNumber, " Version number of CVer in included update partition");
builder.AppendLine(header.Reserved4, " Reserved 4");
builder.AppendLine();
Print(builder, header.InitialData);
}
private static void Print(StringBuilder builder, DevelopmentCardInfoHeader? header)
@@ -227,6 +229,96 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine();
}
private static void Print(StringBuilder builder, InitialData? id)
{
builder.AppendLine(" Initial Data Information:");
builder.AppendLine(" -------------------------");
if (id == null)
{
builder.AppendLine(" No initial data");
builder.AppendLine();
return;
}
builder.AppendLine(id.CardSeedKeyY, " Card seed KeyY");
builder.AppendLine(id.EncryptedCardSeed, " Encrypted card seed");
builder.AppendLine(id.CardSeedAESMAC, " Card seed AES-MAC");
builder.AppendLine(id.CardSeedNonce, " Card seed nonce");
builder.AppendLine(id.Reserved, " Reserved");
builder.AppendLine();
PrintBackup(builder, id.BackupHeader);
}
private static void PrintBackup(StringBuilder builder, NCCHHeader? header)
{
builder.AppendLine(" Backup NCCH Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No backup NCCH header");
builder.AppendLine();
return;
}
if (header.MagicID == string.Empty)
{
builder.AppendLine(" Empty backup header, no data can be parsed");
}
else if (header.MagicID != Constants.NCCHMagicNumber)
{
builder.AppendLine(" Unrecognized backup header, no data can be parsed");
}
else
{
// Backup header omits RSA signature
builder.AppendLine(header.MagicID, " Magic ID");
builder.AppendLine(header.ContentSizeInMediaUnits, " Content size in media units");
builder.AppendLine(header.PartitionId, " Partition ID");
builder.AppendLine(header.MakerCode, " Maker code");
builder.AppendLine(header.Version, " Version");
builder.AppendLine(header.VerificationHash, " Verification hash");
builder.AppendLine(header.ProgramId, " Program ID");
builder.AppendLine(header.Reserved1, " Reserved 1");
builder.AppendLine(header.LogoRegionHash, " Logo region SHA-256 hash");
builder.AppendLine(header.ProductCode, " Product code");
builder.AppendLine(header.ExtendedHeaderHash, " Extended header SHA-256 hash");
builder.AppendLine(header.ExtendedHeaderSizeInBytes, " Extended header size in bytes");
builder.AppendLine(header.Reserved2, " Reserved 2");
builder.AppendLine(" Flags:");
if (header.Flags == null)
{
builder.AppendLine(" [NULL]");
}
else
{
builder.AppendLine(header.Flags.Reserved0, " Reserved 0");
builder.AppendLine(header.Flags.Reserved1, " Reserved 1");
builder.AppendLine(header.Flags.Reserved2, " Reserved 2");
builder.AppendLine($" Crypto method: {header.Flags.CryptoMethod} (0x{header.Flags.CryptoMethod:X})");
builder.AppendLine($" Content platform: {header.Flags.ContentPlatform} (0x{header.Flags.ContentPlatform:X})");
builder.AppendLine($" Content type: {header.Flags.MediaPlatformIndex} (0x{header.Flags.MediaPlatformIndex:X})");
builder.AppendLine(header.Flags.ContentUnitSize, " Content unit size");
builder.AppendLine($" Bitmasks: {header.Flags.BitMasks} (0x{header.Flags.BitMasks:X})");
}
builder.AppendLine(header.PlainRegionOffsetInMediaUnits, " Plain region offset, in media units");
builder.AppendLine(header.PlainRegionSizeInMediaUnits, " Plain region size, in media units");
builder.AppendLine(header.LogoRegionOffsetInMediaUnits, " Logo region offset, in media units");
builder.AppendLine(header.LogoRegionSizeInMediaUnits, " Logo region size, in media units");
builder.AppendLine(header.ExeFSOffsetInMediaUnits, " ExeFS offset, in media units");
builder.AppendLine(header.ExeFSSizeInMediaUnits, " ExeFS size, in media units");
builder.AppendLine(header.ExeFSHashRegionSizeInMediaUnits, " ExeFS hash region size, in media units");
builder.AppendLine(header.Reserved3, " Reserved 3");
builder.AppendLine(header.RomFSOffsetInMediaUnits, " RomFS offset, in media units");
builder.AppendLine(header.RomFSSizeInMediaUnits, " RomFS size, in media units");
builder.AppendLine(header.RomFSHashRegionSizeInMediaUnits, " RomFS hash region size, in media units");
builder.AppendLine(header.Reserved4, " Reserved 4");
builder.AppendLine(header.ExeFSSuperblockHash, " ExeFS superblock SHA-256 hash");
builder.AppendLine(header.RomFSSuperblockHash, " RomFS superblock SHA-256 hash");
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, NCCHHeader?[]? entries)
{
builder.AppendLine(" NCCH Partition Header Information:");
@@ -455,7 +547,7 @@ namespace SabreTools.Serialization.Printers
}
else
{
builder.AppendLine(entry.ACI.ARM9AccessControl.Descriptors, " Descriptors");
builder.AppendLine($" Descriptors: {entry.ACI.ARM9AccessControl.Descriptors} (0x{entry.ACI.ARM9AccessControl.Descriptors:X})");
builder.AppendLine(entry.ACI.ARM9AccessControl.DescriptorVersion, " Descriptor version");
}
@@ -523,7 +615,7 @@ namespace SabreTools.Serialization.Printers
}
else
{
builder.AppendLine(entry.ACIForLimitations.ARM9AccessControl.Descriptors, " Descriptors");
builder.AppendLine($" Descriptors: {entry.ACIForLimitations.ARM9AccessControl.Descriptors} (0x{entry.ACIForLimitations.ARM9AccessControl.Descriptors:X})");
builder.AppendLine(entry.ACIForLimitations.ARM9AccessControl.DescriptorVersion, " Descriptor version");
}
}

View File

@@ -254,7 +254,7 @@ namespace SabreTools.Serialization.Printers
}
if (header.DelayImportDescriptor != null)
{
builder.AppendLine(" Delay Import Descriptior (14)");
builder.AppendLine(" Delay Import Descriptor (14)");
builder.AppendLine(header.DelayImportDescriptor.VirtualAddress, " Virtual address");
builder.AppendLine(header.DelayImportDescriptor.VirtualAddress.ConvertVirtualAddress(table), " Physical address");
builder.AppendLine(header.DelayImportDescriptor.Size, " Size");

View File

@@ -1,14 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.6.5</Version>
<Version>1.6.9</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
@@ -22,16 +24,30 @@
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.ASN1" Version="1.3.2" />
<PackageReference Include="SabreTools.Hashing" Version="1.2.0" />
<PackageReference Include="SabreTools.IO" Version="1.4.11" />
<PackageReference Include="SabreTools.Models" Version="1.4.8" />
<PackageReference Include="SabreTools.ASN1" Version="1.3.3" />
<PackageReference Include="SabreTools.Hashing" Version="1.2.2" />
<PackageReference Include="SabreTools.IO" Version="1.4.13" />
<PackageReference Include="SabreTools.Models" Version="1.4.11" />
</ItemGroup>
</Project>

View File

@@ -204,5 +204,14 @@ namespace SabreTools.Serialization
string valueString = (value == null ? "[NULL]" : string.Join(", ", value.Select(u => u.ToString()).ToArray()));
return sb.AppendLine($"{prefixString}: {valueString}");
}
/// <summary>
/// Append a line containing a UInt64[] value to a StringBuilder
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, Guid[]? value, string prefixString)
{
string valueString = (value == null ? "[NULL]" : string.Join(", ", value.Select(g => g.ToString()).ToArray()));
return sb.AppendLine($"{prefixString}: {valueString}");
}
}
}

View File

@@ -18,12 +18,12 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// Normal sector size in bytes
/// </summary>
public long SectorSize => (long)Math.Pow(2, this.Model.Header?.SectorShift ?? 0);
public long SectorSize => (long)Math.Pow(2, Model.Header?.SectorShift ?? 0);
/// <summary>
/// Mini sector size in bytes
/// </summary>
public long MiniSectorSize => (long)Math.Pow(2, this.Model.Header?.MiniSectorShift ?? 0);
public long MiniSectorSize => (long)Math.Pow(2, Model.Header?.MiniSectorShift ?? 0);
#endregion
@@ -101,7 +101,7 @@ namespace SabreTools.Serialization.Wrappers
public List<Models.CFB.SectorNumber?>? GetFATSectorChain(Models.CFB.SectorNumber? startingSector)
{
// If we have an invalid sector
if (startingSector == null || startingSector < 0 || this.Model.FATSectorNumbers == null || (long)startingSector >= this.Model.FATSectorNumbers.Length)
if (startingSector == null || startingSector < 0 || Model.FATSectorNumbers == null || (long)startingSector >= Model.FATSectorNumbers.Length)
return null;
// Setup the returned list
@@ -114,10 +114,10 @@ namespace SabreTools.Serialization.Wrappers
break;
// Get the next sector from the lookup table
var nextSector = this.Model.FATSectorNumbers[(uint)lastSector!.Value];
var nextSector = Model.FATSectorNumbers[(uint)lastSector!.Value];
// If we have an end of chain or free sector
if (nextSector == SabreTools.Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == SabreTools.Models.CFB.SectorNumber.FREESECT)
if (nextSector == Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == Models.CFB.SectorNumber.FREESECT)
break;
// Add the next sector to the list and replace the last sector
@@ -188,7 +188,7 @@ namespace SabreTools.Serialization.Wrappers
public List<Models.CFB.SectorNumber?>? GetMiniFATSectorChain(Models.CFB.SectorNumber? startingSector)
{
// If we have an invalid sector
if (startingSector == null || startingSector < 0 || this.Model.MiniFATSectorNumbers == null || (long)startingSector >= this.Model.MiniFATSectorNumbers.Length)
if (startingSector == null || startingSector < 0 || Model.MiniFATSectorNumbers == null || (long)startingSector >= Model.MiniFATSectorNumbers.Length)
return null;
// Setup the returned list
@@ -201,10 +201,10 @@ namespace SabreTools.Serialization.Wrappers
break;
// Get the next sector from the lookup table
var nextSector = this.Model.MiniFATSectorNumbers[(uint)lastSector!.Value];
var nextSector = Model.MiniFATSectorNumbers[(uint)lastSector!.Value];
// If we have an end of chain or free sector
if (nextSector == SabreTools.Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == SabreTools.Models.CFB.SectorNumber.FREESECT)
if (nextSector == Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == Models.CFB.SectorNumber.FREESECT)
break;
// Add the next sector to the list and replace the last sector

View File

@@ -0,0 +1,121 @@
using System.IO;
using SabreTools.Models.CHD;
namespace SabreTools.Serialization.Wrappers
{
public class CHD : WrapperBase<Header>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "MAME Compressed Hunks of Data";
#endregion
#region Extension Properties
/// <summary>
/// Internal MD5 hash, if available
/// </summary>
public byte[]? MD5
{
get
{
return Model switch
{
HeaderV1 v1 => v1.MD5,
HeaderV2 v2 => v2.MD5,
HeaderV3 v3 => v3.MD5,
HeaderV4 v4 => null,
HeaderV5 v5 => null,
_ => null,
};
}
}
/// <summary>
/// Internal SHA1 hash, if available
/// </summary>
public byte[]? SHA1
{
get
{
return Model switch
{
HeaderV1 v1 => null,
HeaderV2 v2 => null,
HeaderV3 v3 => v3.SHA1,
HeaderV4 v4 => v4.SHA1,
HeaderV5 v5 => v5.SHA1,
_ => null,
};
}
}
#endregion
#region Constructors
/// <inheritdoc/>
public CHD(Header? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public CHD(Header? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a CHD header from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A CHD header wrapper on success, null on failure</returns>
public static CHD? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a CHD header from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>An CHD header on success, null on failure</returns>
public static CHD? Create(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var header = Deserializers.CHD.DeserializeStream(data);
if (header == null)
return null;
try
{
return new CHD(header, data);
}
catch
{
return null;
}
}
#endregion
}
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using SabreTools.Models.N3DS;
namespace SabreTools.Serialization.Wrappers
{
@@ -74,5 +75,36 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
// TODO: Hook these up for external use
#region Currently Unused Extensions
#region Ticket
/// <summary>
/// Denotes if the ticket denotes a demo or not
/// </summary>
public static bool IsDemo(Ticket? ticket)
{
if (ticket?.Limits == null || ticket.Limits.Length == 0)
return false;
return ticket.Limits[0] == 0x0004;
}
/// <summary>
/// Denotes if the max playcount for a demo
/// </summary>
public static uint PlayCount(Ticket ticket)
{
if (ticket?.Limits == null || ticket.Limits.Length == 0)
return 0;
return ticket.Limits[1];
}
#endregion
#endregion
}
}

View File

@@ -26,16 +26,16 @@ namespace SabreTools.Serialization.Wrappers
return _files;
// If we don't have a required property
if (this.Model.DirectoryEntries == null || this.Model.DirectoryMapEntries == null || this.Model.BlockEntries == null)
if (Model.DirectoryEntries == null || Model.DirectoryMapEntries == null || Model.BlockEntries == null)
return null;
// Otherwise, scan and build the files
var files = new List<FileInfo>();
for (int i = 0; i < this.Model.DirectoryEntries.Length; i++)
for (int i = 0; i < Model.DirectoryEntries.Length; i++)
{
// Get the directory entry
var directoryEntry = this.Model.DirectoryEntries[i];
var directoryMapEntry = this.Model.DirectoryMapEntries[i];
var directoryEntry = Model.DirectoryEntries[i];
var directoryMapEntry = Model.DirectoryMapEntries[i];
if (directoryEntry == null || directoryMapEntry == null)
continue;
@@ -57,26 +57,26 @@ namespace SabreTools.Serialization.Wrappers
Encrypted = directoryEntry.DirectoryFlags.HasFlag(Models.GCF.HL_GCF_FLAG.HL_GCF_FLAG_ENCRYPTED),
#endif
};
var pathParts = new List<string> { this.Model.DirectoryNames![directoryEntry.NameOffset] ?? string.Empty };
var pathParts = new List<string> { Model.DirectoryNames![directoryEntry.NameOffset] ?? string.Empty };
var blockEntries = new List<Models.GCF.BlockEntry?>();
// Traverse the parent tree
uint index = directoryEntry.ParentIndex;
while (index != 0xFFFFFFFF)
{
var parentDirectoryEntry = this.Model.DirectoryEntries[index];
var parentDirectoryEntry = Model.DirectoryEntries[index];
if (parentDirectoryEntry == null)
break;
pathParts.Add(this.Model.DirectoryNames![parentDirectoryEntry.NameOffset] ?? string.Empty);
pathParts.Add(Model.DirectoryNames![parentDirectoryEntry.NameOffset] ?? string.Empty);
index = parentDirectoryEntry.ParentIndex;
}
// Traverse the block entries
index = directoryMapEntry.FirstBlockIndex;
while (index != this.Model.DataBlockHeader?.BlockCount)
while (index != Model.DataBlockHeader?.BlockCount)
{
var nextBlock = this.Model.BlockEntries[index];
var nextBlock = Model.BlockEntries[index];
if (nextBlock == null)
break;
@@ -134,14 +134,14 @@ namespace SabreTools.Serialization.Wrappers
return _dataBlockOffsets;
// If we don't have a block count, offset, or size
if (this.Model.DataBlockHeader?.BlockCount == null || this.Model.DataBlockHeader?.FirstBlockOffset == null || this.Model.DataBlockHeader?.BlockSize == null)
if (Model.DataBlockHeader?.BlockCount == null || Model.DataBlockHeader?.FirstBlockOffset == null || Model.DataBlockHeader?.BlockSize == null)
return null;
// Otherwise, build the data block set
_dataBlockOffsets = new long[this.Model.DataBlockHeader.BlockCount];
for (int i = 0; i < this.Model.DataBlockHeader.BlockCount; i++)
_dataBlockOffsets = new long[Model.DataBlockHeader.BlockCount];
for (int i = 0; i < Model.DataBlockHeader.BlockCount; i++)
{
long dataBlockOffset = this.Model.DataBlockHeader.FirstBlockOffset + (i * this.Model.DataBlockHeader.BlockSize);
long dataBlockOffset = Model.DataBlockHeader.FirstBlockOffset + (i * Model.DataBlockHeader.BlockSize);
_dataBlockOffsets[i] = dataBlockOffset;
}

View File

@@ -53,7 +53,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
uint majorVersion = this.Model.CommonHeader?.Version ?? 0;
uint majorVersion = Model.CommonHeader?.Version ?? 0;
if (majorVersion >> 24 == 1)
{
majorVersion = (majorVersion >> 12) & 0x0F;
@@ -177,7 +177,7 @@ namespace SabreTools.Serialization.Wrappers
/// Get the directory index for the given file index
/// </summary>
/// <returns>Directory index if found, UInt32.MaxValue on error</returns>
public uint GetFileDirectoryIndex(int index)
public uint GetDirectoryIndexFromFile(int index)
{
FileDescriptor? descriptor = GetFileDescriptor(index);
if (descriptor != null)
@@ -293,23 +293,43 @@ namespace SabreTools.Serialization.Wrappers
}
/// <summary>
/// Get the file group name at a given index, if possible
/// Get the file group for the given file index, if possible
/// </summary>
public string? GetFileGroupName(int index)
public FileGroup? GetFileGroupFromFile(int index)
{
if (Model.FileGroups == null)
return null;
if (index < 0 || index >= Model.FileGroups.Length)
if (index < 0 || index >= FileCount)
return null;
var fileGroup = Model.FileGroups[index];
if (fileGroup == null)
return null;
for (int i = 0; i < FileGroupCount; i++)
{
var fileGroup = GetFileGroup(i);
if (fileGroup == null)
continue;
return fileGroup.Name;
if (fileGroup.FirstFile > index || fileGroup.LastFile < index)
continue;
return fileGroup;
}
return null;
}
/// <summary>
/// Get the file group name at a given index, if possible
/// </summary>
public string? GetFileGroupName(int index)
=> GetFileGroup(index)?.Name;
/// <summary>
/// Get the file group name at a given file index, if possible
/// </summary>
public string? GetFileGroupNameFromFile(int index)
=> GetFileGroupFromFile(index)?.Name;
#endregion
}
}

View File

@@ -124,11 +124,11 @@ namespace SabreTools.Serialization.Wrappers
public DateTime? GetDateTime(int fileIndex)
{
// If we have an invalid file index
if (fileIndex < 0 || this.Model.Files == null || fileIndex >= this.Model.Files.Length)
if (fileIndex < 0 || Model.Files == null || fileIndex >= Model.Files.Length)
return null;
// Get the file header
var file = this.Model.Files[fileIndex];
var file = Model.Files[fileIndex];
if (file == null)
return null;

View File

@@ -1,4 +1,6 @@
using System;
using System.IO;
using SabreTools.Models.N3DS;
namespace SabreTools.Serialization.Wrappers
{
@@ -74,5 +76,178 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
// TODO: Hook these up for external use
#region Currently Unused Extensions
#region ExeFSFileHeader
/// <summary>
/// Determines if a file header represents a CODE block
/// </summary>
public static bool IsCodeBinary(ExeFSFileHeader? header)
{
if (header == null)
return false;
return header.FileName == ".code\0\0\0";
}
#endregion
#region NCCHHeaderFlags
/// <summary>
/// Get if the NoCrypto bit is set
/// </summary>
public static bool PossiblyDecrypted(NCCHHeaderFlags flags)
{
if (flags == null)
return false;
#if NET20 || NET35
return (flags.BitMasks & BitMasks.NoCrypto) != 0;
#else
return flags.BitMasks.HasFlag(BitMasks.NoCrypto);
#endif
}
#endregion
#region NCSDHeader
/// <summary>
/// Partition table entry for Executable Content (CXI)
/// </summary>
public static PartitionTableEntry? ExecutableContent(NCSDHeader? header)
{
if (header?.PartitionsTable == null)
return null;
return header.PartitionsTable[0];
}
/// <summary>
/// Partition table entry for E-Manual (CFA)
/// </summary>
public static PartitionTableEntry? EManual(NCSDHeader? header)
{
if (header?.PartitionsTable == null)
return null;
return header.PartitionsTable[1];
}
/// <summary>
/// Partition table entry for Download Play Child container (CFA)
/// </summary>
public static PartitionTableEntry? DownloadPlayChildContainer(NCSDHeader? header)
{
if (header?.PartitionsTable == null)
return null;
return header.PartitionsTable[2];
}
/// <summary>
/// Partition table entry for New3DS Update Data (CFA)
/// </summary>
public static PartitionTableEntry? New3DSUpdateData(NCSDHeader? header)
{
if (header?.PartitionsTable == null)
return null;
return header.PartitionsTable[6];
}
/// <summary>
/// Partition table entry for Update Data (CFA)
/// </summary>
public static PartitionTableEntry? UpdateData(NCSDHeader? header)
{
if (header?.PartitionsTable == null)
return null;
return header.PartitionsTable[7];
}
/// <summary>
/// Backup Write Wait Time (The time to wait to write save to backup after the card is recognized (0-255
/// seconds)).NATIVE_FIRM loads this flag from the gamecard NCSD header starting with 6.0.0-11.
/// </summary>
public static byte BackupWriteWaitTime(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return header.PartitionFlags[(int)NCSDFlags.BackupWriteWaitTime];
}
/// <summary>
/// Media Card Device (1 = NOR Flash, 2 = None, 3 = BT) (SDK 3.X+)
/// </summary>
public static MediaCardDeviceType MediaCardDevice3X(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return (MediaCardDeviceType)header.PartitionFlags[(int)NCSDFlags.MediaCardDevice3X];
}
/// <summary>
/// Media Platform Index (1 = CTR)
/// </summary>
public static MediaPlatformIndex MediaPlatformIndex(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return (MediaPlatformIndex)header.PartitionFlags[(int)NCSDFlags.MediaPlatformIndex];
}
/// <summary>
/// Media Type Index (0 = Inner Device, 1 = Card1, 2 = Card2, 3 = Extended Device)
/// </summary>
public static MediaTypeIndex MediaTypeIndex(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return (MediaTypeIndex)header.PartitionFlags[(int)NCSDFlags.MediaTypeIndex];
}
/// <summary>
/// Media Unit Size i.e. u32 MediaUnitSize = 0x200*2^flags[6];
/// </summary>
public static uint MediaUnitSize(Cart cart)
{
return MediaUnitSize(cart.Header);
}
/// <summary>
/// Media Unit Size i.e. u32 MediaUnitSize = 0x200*2^flags[6];
/// </summary>
public static uint MediaUnitSize(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return (uint)(0x200 * Math.Pow(2, header.PartitionFlags[(int)NCSDFlags.MediaUnitSize]));
}
/// <summary>
/// Media Card Device (1 = NOR Flash, 2 = None, 3 = BT) (Only SDK 2.X)
/// </summary>
public static MediaCardDeviceType MediaCardDevice2X(NCSDHeader? header)
{
if (header?.PartitionFlags == null)
return default;
return (MediaCardDeviceType)header.PartitionFlags[(int)NCSDFlags.MediaCardDevice2X];
}
#endregion
#endregion
}
}

View File

@@ -33,19 +33,26 @@ namespace SabreTools.Serialization.Wrappers
// TODO: Don't scan the known header data as well
// If the section table is missing
if (this.Model.SectionTable == null)
return null;
// If any required pieces are missing
if (Model.Stub?.Header == null)
return [];
if (Model.SectionTable == null)
return [];
// Populate the raw header padding data based on the source
uint headerStartAddress = this.Model.Stub?.Header?.NewExeHeaderAddr ?? 0;
uint firstSectionAddress = this.Model.SectionTable
uint headerStartAddress = Model.Stub.Header.NewExeHeaderAddr;
uint firstSectionAddress = Model.SectionTable
.Select(s => s?.PointerToRawData ?? 0)
.Where(s => s != 0)
.Where(s => s != 0 && s >= headerStartAddress)
.OrderBy(s => s)
.First();
.FirstOrDefault();
// Check if the header length is more than 0 before reading data
int headerLength = (int)(firstSectionAddress - headerStartAddress);
_headerPaddingData = ReadFromDataSource((int)headerStartAddress, headerLength);
if (headerLength <= 0)
_headerPaddingData = [];
else
_headerPaddingData = ReadFromDataSource((int)headerStartAddress, headerLength);
// Cache and return the header padding data, even if null
return _headerPaddingData;
@@ -68,19 +75,26 @@ namespace SabreTools.Serialization.Wrappers
// TODO: Don't scan the known header data as well
// If the section table is missing
if (this.Model.SectionTable == null)
return null;
// If any required pieces are missing
if (Model.Stub?.Header == null)
return [];
if (Model.SectionTable == null)
return [];
// Populate the raw header padding data based on the source
uint headerStartAddress = this.Model.Stub?.Header?.NewExeHeaderAddr ?? 0;
uint firstSectionAddress = this.Model.SectionTable
// Populate the header padding strings based on the source
uint headerStartAddress = Model.Stub.Header.NewExeHeaderAddr;
uint firstSectionAddress = Model.SectionTable
.Select(s => s?.PointerToRawData ?? 0)
.Where(s => s != 0)
.Where(s => s != 0 && s >= headerStartAddress)
.OrderBy(s => s)
.First();
.FirstOrDefault();
// Check if the header length is more than 0 before reading strings
int headerLength = (int)(firstSectionAddress - headerStartAddress);
_headerPaddingStrings = ReadStringsFromDataSource((int)headerStartAddress, headerLength, charLimit: 3);
if (headerLength <= 0)
_headerPaddingStrings = [];
else
_headerPaddingStrings = ReadStringsFromDataSource((int)headerStartAddress, headerLength, charLimit: 3);
// Cache and return the header padding data, even if null
return _headerPaddingStrings;
@@ -98,21 +112,21 @@ namespace SabreTools.Serialization.Wrappers
lock (_sourceDataLock)
{
// If the section table is missing
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// If the address is missing
if (this.Model.OptionalHeader?.AddressOfEntryPoint == null)
if (Model.OptionalHeader?.AddressOfEntryPoint == null)
return null;
// If we have no entry point
int entryPointAddress = (int)this.Model.OptionalHeader.AddressOfEntryPoint.ConvertVirtualAddress(this.Model.SectionTable);
int entryPointAddress = (int)Model.OptionalHeader.AddressOfEntryPoint.ConvertVirtualAddress(Model.SectionTable);
if (entryPointAddress == 0)
return null;
// If the entry point matches with the start of a section, use that
int entryPointSection = FindEntryPointSectionIndex();
if (entryPointSection >= 0 && this.Model.OptionalHeader.AddressOfEntryPoint == this.Model.SectionTable[entryPointSection]?.VirtualAddress)
if (entryPointSection >= 0 && Model.OptionalHeader.AddressOfEntryPoint == Model.SectionTable[entryPointSection]?.VirtualAddress)
return GetSectionData(entryPointSection);
// If we already have cached data, just use that immediately
@@ -148,27 +162,27 @@ namespace SabreTools.Serialization.Wrappers
return -1;
// If the section table is missing
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return -1;
// If we have certificate data, use that as the end
if (this.Model.OptionalHeader?.CertificateTable != null)
if (Model.OptionalHeader?.CertificateTable != null)
{
int certificateTableAddress = (int)this.Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int certificateTableAddress = (int)Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (certificateTableAddress != 0 && certificateTableAddress < endOfFile)
endOfFile = certificateTableAddress;
}
// Search through all sections and find the furthest a section goes
int endOfSectionData = -1;
foreach (var section in this.Model.SectionTable)
foreach (var section in Model.SectionTable)
{
// If we have an invalid section
if (section == null)
continue;
// If we have an invalid section address
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (sectionAddress == 0)
continue;
@@ -219,27 +233,27 @@ namespace SabreTools.Serialization.Wrappers
return null;
// If the section table is missing
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// If we have certificate data, use that as the end
if (this.Model.OptionalHeader?.CertificateTable != null)
if (Model.OptionalHeader?.CertificateTable != null)
{
int certificateTableAddress = (int)this.Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int certificateTableAddress = (int)Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (certificateTableAddress != 0 && certificateTableAddress < endOfFile)
endOfFile = certificateTableAddress;
}
// Search through all sections and find the furthest a section goes
int endOfSectionData = -1;
foreach (var section in this.Model.SectionTable)
foreach (var section in Model.SectionTable)
{
// If we have an invalid section
if (section == null)
continue;
// If we have an invalid section address
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (sectionAddress == 0)
continue;
@@ -297,27 +311,27 @@ namespace SabreTools.Serialization.Wrappers
return null;
// If the section table is missing
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// If we have certificate data, use that as the end
if (this.Model.OptionalHeader?.CertificateTable != null)
if (Model.OptionalHeader?.CertificateTable != null)
{
int certificateTableAddress = (int)this.Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int certificateTableAddress = (int)Model.OptionalHeader.CertificateTable.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (certificateTableAddress != 0 && certificateTableAddress < endOfFile)
endOfFile = certificateTableAddress;
}
// Search through all sections and find the furthest a section goes
int endOfSectionData = -1;
foreach (var section in this.Model.SectionTable)
foreach (var section in Model.SectionTable)
{
// If we have an invalid section
if (section == null)
continue;
// If we have an invalid section address
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
int sectionAddress = (int)section.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (sectionAddress == 0)
continue;
@@ -370,14 +384,14 @@ namespace SabreTools.Serialization.Wrappers
return _sectionNames;
// If there are no sections
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// Otherwise, build and return the cached array
_sectionNames = new string[this.Model.SectionTable.Length];
_sectionNames = new string[Model.SectionTable.Length];
for (int i = 0; i < _sectionNames.Length; i++)
{
var section = this.Model.SectionTable[i];
var section = Model.SectionTable[i];
if (section == null)
continue;
@@ -408,12 +422,12 @@ namespace SabreTools.Serialization.Wrappers
if (_stubExecutableData != null)
return _stubExecutableData;
if (this.Model.Stub?.Header?.NewExeHeaderAddr == null)
if (Model.Stub?.Header?.NewExeHeaderAddr == null)
return null;
// Populate the raw stub executable data based on the source
int endOfStubHeader = 0x40;
int lengthOfStubExecutableData = (int)this.Model.Stub.Header.NewExeHeaderAddr - endOfStubHeader;
int lengthOfStubExecutableData = (int)Model.Stub.Header.NewExeHeaderAddr - endOfStubHeader;
_stubExecutableData = ReadFromDataSource(endOfStubHeader, lengthOfStubExecutableData);
// Cache and return the stub executable data, even if null
@@ -436,8 +450,8 @@ namespace SabreTools.Serialization.Wrappers
return _debugData;
// If we have no resource table, just return
if (this.Model.DebugTable?.DebugDirectoryTable == null
|| this.Model.DebugTable.DebugDirectoryTable.Length == 0)
if (Model.DebugTable?.DebugDirectoryTable == null
|| Model.DebugTable.DebugDirectoryTable.Length == 0)
return null;
// Otherwise, build and return the cached dictionary
@@ -461,13 +475,13 @@ namespace SabreTools.Serialization.Wrappers
return _resourceData;
// If we have no resource table, just return
if (this.Model.OptionalHeader?.ResourceTable == null
|| this.Model.OptionalHeader.ResourceTable.VirtualAddress == 0
|| this.Model.ResourceDirectoryTable == null)
if (Model.OptionalHeader?.ResourceTable == null
|| Model.OptionalHeader.ResourceTable.VirtualAddress == 0
|| Model.ResourceDirectoryTable == null)
return null;
// Otherwise, build and return the cached dictionary
ParseResourceDirectoryTable(this.Model.ResourceDirectoryTable, types: []);
ParseResourceDirectoryTable(Model.ResourceDirectoryTable, types: []);
return _resourceData;
}
}
@@ -585,15 +599,15 @@ namespace SabreTools.Serialization.Wrappers
/// <remarks>The internal version is either the file version, product version, or assembly version, in that order</remarks>
public string? GetInternalVersion()
{
string? version = this.FileVersion;
string? version = FileVersion;
if (!string.IsNullOrEmpty(version))
return version!.Replace(", ", ".");
version = this.ProductVersion;
version = ProductVersion;
if (!string.IsNullOrEmpty(version))
return version!.Replace(", ", ".");
version = this.AssemblyVersion;
version = AssemblyVersion;
if (!string.IsNullOrEmpty(version))
return version;
@@ -864,7 +878,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the debug data cached
if (DebugData == null)
return Enumerable.Empty<object?>();
return [];
var nb10Found = DebugData.Select(r => r.Value)
.Select(r => r as SabreTools.Models.PortableExecutable.NB10ProgramDatabase)
@@ -890,7 +904,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (DebugData == null)
return Enumerable.Empty<byte[]?>();
return [];
return DebugData.Select(r => r.Value)
.Select(b => b as byte[])
@@ -935,25 +949,34 @@ namespace SabreTools.Serialization.Wrappers
private void ParseDebugTable()
{
// If there is no debug table
if (this.Model.DebugTable?.DebugDirectoryTable == null)
if (Model.DebugTable?.DebugDirectoryTable == null)
return;
// Loop through all debug table entries
for (int i = 0; i < this.Model.DebugTable.DebugDirectoryTable.Length; i++)
for (int i = 0; i < Model.DebugTable.DebugDirectoryTable.Length; i++)
{
var entry = this.Model.DebugTable.DebugDirectoryTable[i];
var entry = Model.DebugTable.DebugDirectoryTable[i];
if (entry == null)
continue;
uint address = entry.PointerToRawData;
uint size = entry.SizeOfData;
byte[]? entryData = ReadFromDataSource((int)address, (int)size);
if (entryData == null)
continue;
// Read the entry data until we have the end of the stream
byte[]? entryData;
try
{
entryData = ReadFromDataSource((int)address, (int)size);
if (entryData == null || entryData.Length < 4)
continue;
}
catch (EndOfStreamException)
{
return;
}
// If we have CodeView debug data, try to parse it
if (entry.DebugType == SabreTools.Models.PortableExecutable.DebugType.IMAGE_DEBUG_TYPE_CODEVIEW)
if (entry.DebugType == Models.PortableExecutable.DebugType.IMAGE_DEBUG_TYPE_CODEVIEW)
{
// Read the signature
int offset = 0;
@@ -1004,7 +1027,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (ResourceData == null)
return Enumerable.Empty<Models.PortableExecutable.DialogBoxResource?>();
return [];
return ResourceData.Select(r => r.Value)
.Select(r => r as SabreTools.Models.PortableExecutable.DialogBoxResource)
@@ -1025,7 +1048,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (ResourceData == null)
return Enumerable.Empty<Models.PortableExecutable.DialogBoxResource?>();
return [];
return ResourceData.Select(r => r.Value)
.Select(r => r as SabreTools.Models.PortableExecutable.DialogBoxResource)
@@ -1058,7 +1081,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (ResourceData == null)
return Enumerable.Empty<Dictionary<int, string?>?>();
return [];
return ResourceData.Select(r => r.Value)
.Select(r => r as Dictionary<int, string?>)
@@ -1076,7 +1099,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (ResourceData == null)
return Enumerable.Empty<byte[]?>();
return [];
return ResourceData.Where(kvp => kvp.Key.Contains(typeName))
.Select(kvp => kvp.Value as byte[])
@@ -1092,7 +1115,7 @@ namespace SabreTools.Serialization.Wrappers
{
// Ensure that we have the resource data cached
if (ResourceData == null)
return Enumerable.Empty<byte[]?>();
return [];
return ResourceData.Select(r => r.Value)
.Select(r => r as byte[])
@@ -1311,19 +1334,19 @@ namespace SabreTools.Serialization.Wrappers
public int FindEntryPointSectionIndex()
{
// If the section table is missing
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return -1;
// If the address is missing
if (this.Model.OptionalHeader?.AddressOfEntryPoint == null)
if (Model.OptionalHeader?.AddressOfEntryPoint == null)
return -1;
// If we don't have an entry point
if (this.Model.OptionalHeader.AddressOfEntryPoint.ConvertVirtualAddress(this.Model.SectionTable) == 0)
if (Model.OptionalHeader.AddressOfEntryPoint.ConvertVirtualAddress(Model.SectionTable) == 0)
return -1;
// Otherwise, find the section it exists within
return this.Model.OptionalHeader.AddressOfEntryPoint.ContainingSectionIndex(this.Model.SectionTable
return Model.OptionalHeader.AddressOfEntryPoint.ContainingSectionIndex(Model.SectionTable
.Where(sh => sh != null)
.Cast<Models.PortableExecutable.SectionHeader>()
.ToArray());
@@ -1338,7 +1361,7 @@ namespace SabreTools.Serialization.Wrappers
public Models.PortableExecutable.SectionHeader? GetFirstSection(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1351,7 +1374,7 @@ namespace SabreTools.Serialization.Wrappers
return null;
// Return the section
return this.Model.SectionTable[index];
return Model.SectionTable[index];
}
/// <summary>
@@ -1363,7 +1386,7 @@ namespace SabreTools.Serialization.Wrappers
public Models.PortableExecutable.SectionHeader? GetLastSection(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1376,7 +1399,7 @@ namespace SabreTools.Serialization.Wrappers
return null;
// Return the section
return this.Model.SectionTable[index];
return Model.SectionTable[index];
}
/// <summary>
@@ -1387,15 +1410,15 @@ namespace SabreTools.Serialization.Wrappers
public Models.PortableExecutable.SectionHeader? GetSection(int index)
{
// If we have no sections
if (this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
if (index < 0 || index >= this.Model.SectionTable.Length)
if (index < 0 || index >= Model.SectionTable.Length)
return null;
// Return the section
return this.Model.SectionTable[index];
return Model.SectionTable[index];
}
/// <summary>
@@ -1407,7 +1430,7 @@ namespace SabreTools.Serialization.Wrappers
public byte[]? GetFirstSectionData(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1428,7 +1451,7 @@ namespace SabreTools.Serialization.Wrappers
public byte[]? GetLastSectionData(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1448,19 +1471,19 @@ namespace SabreTools.Serialization.Wrappers
public byte[]? GetSectionData(int index)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
if (index < 0 || index >= this.Model.SectionTable.Length)
if (index < 0 || index >= Model.SectionTable.Length)
return null;
// Get the section data from the table
var section = this.Model.SectionTable[index];
var section = Model.SectionTable[index];
if (section == null)
return null;
uint address = section.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
uint address = section.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (address == 0)
return null;
@@ -1493,7 +1516,7 @@ namespace SabreTools.Serialization.Wrappers
public List<string>? GetFirstSectionStrings(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1514,7 +1537,7 @@ namespace SabreTools.Serialization.Wrappers
public List<string>? GetLastSectionStrings(string? name, bool exact = false)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
@@ -1534,19 +1557,19 @@ namespace SabreTools.Serialization.Wrappers
public List<string>? GetSectionStrings(int index)
{
// If we have no sections
if (SectionNames == null || !SectionNames.Any() || this.Model.SectionTable == null || !this.Model.SectionTable.Any())
if (SectionNames == null || !SectionNames.Any() || Model.SectionTable == null || !Model.SectionTable.Any())
return null;
// If the section doesn't exist
if (index < 0 || index >= this.Model.SectionTable.Length)
if (index < 0 || index >= Model.SectionTable.Length)
return null;
// Get the section data from the table
var section = this.Model.SectionTable[index];
var section = Model.SectionTable[index];
if (section == null)
return null;
uint address = section.VirtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
uint address = section.VirtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (address == 0)
return null;
@@ -1582,7 +1605,7 @@ namespace SabreTools.Serialization.Wrappers
public byte[]? GetTableData(int index)
{
// If the table doesn't exist
if (this.Model.OptionalHeader == null || index < 0 || index > 16)
if (Model.OptionalHeader == null || index < 0 || index > 16)
return null;
// Get the virtual address and size from the entries
@@ -1590,64 +1613,64 @@ namespace SabreTools.Serialization.Wrappers
switch (index)
{
case 1:
virtualAddress = this.Model.OptionalHeader.ExportTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ExportTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ExportTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ExportTable?.Size ?? 0;
break;
case 2:
virtualAddress = this.Model.OptionalHeader.ImportTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ImportTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ImportTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ImportTable?.Size ?? 0;
break;
case 3:
virtualAddress = this.Model.OptionalHeader.ResourceTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ResourceTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ResourceTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ResourceTable?.Size ?? 0;
break;
case 4:
virtualAddress = this.Model.OptionalHeader.ExceptionTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ExceptionTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ExceptionTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ExceptionTable?.Size ?? 0;
break;
case 5:
virtualAddress = this.Model.OptionalHeader.CertificateTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.CertificateTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.CertificateTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.CertificateTable?.Size ?? 0;
break;
case 6:
virtualAddress = this.Model.OptionalHeader.BaseRelocationTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.BaseRelocationTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.BaseRelocationTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.BaseRelocationTable?.Size ?? 0;
break;
case 7:
virtualAddress = this.Model.OptionalHeader.Debug?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.Debug?.Size ?? 0;
virtualAddress = Model.OptionalHeader.Debug?.VirtualAddress ?? 0;
size = Model.OptionalHeader.Debug?.Size ?? 0;
break;
case 8: // Architecture Table
virtualAddress = 0;
size = 0;
break;
case 9:
virtualAddress = this.Model.OptionalHeader.GlobalPtr?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.GlobalPtr?.Size ?? 0;
virtualAddress = Model.OptionalHeader.GlobalPtr?.VirtualAddress ?? 0;
size = Model.OptionalHeader.GlobalPtr?.Size ?? 0;
break;
case 10:
virtualAddress = this.Model.OptionalHeader.ThreadLocalStorageTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ThreadLocalStorageTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ThreadLocalStorageTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ThreadLocalStorageTable?.Size ?? 0;
break;
case 11:
virtualAddress = this.Model.OptionalHeader.LoadConfigTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.LoadConfigTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.LoadConfigTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.LoadConfigTable?.Size ?? 0;
break;
case 12:
virtualAddress = this.Model.OptionalHeader.BoundImport?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.BoundImport?.Size ?? 0;
virtualAddress = Model.OptionalHeader.BoundImport?.VirtualAddress ?? 0;
size = Model.OptionalHeader.BoundImport?.Size ?? 0;
break;
case 13:
virtualAddress = this.Model.OptionalHeader.ImportAddressTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ImportAddressTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ImportAddressTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ImportAddressTable?.Size ?? 0;
break;
case 14:
virtualAddress = this.Model.OptionalHeader.DelayImportDescriptor?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.DelayImportDescriptor?.Size ?? 0;
virtualAddress = Model.OptionalHeader.DelayImportDescriptor?.VirtualAddress ?? 0;
size = Model.OptionalHeader.DelayImportDescriptor?.Size ?? 0;
break;
case 15:
virtualAddress = this.Model.OptionalHeader.CLRRuntimeHeader?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.CLRRuntimeHeader?.Size ?? 0;
virtualAddress = Model.OptionalHeader.CLRRuntimeHeader?.VirtualAddress ?? 0;
size = Model.OptionalHeader.CLRRuntimeHeader?.Size ?? 0;
break;
case 16: // Reserved
virtualAddress = 0;
@@ -1656,11 +1679,11 @@ namespace SabreTools.Serialization.Wrappers
}
// If there is no section table
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// Get the physical address from the virtual one
uint address = virtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
uint address = virtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (address == 0 || size == 0)
return null;
@@ -1690,7 +1713,7 @@ namespace SabreTools.Serialization.Wrappers
public List<string>? GetTableStrings(int index)
{
// If the table doesn't exist
if (this.Model.OptionalHeader == null || index < 0 || index > 16)
if (Model.OptionalHeader == null || index < 0 || index > 16)
return null;
// Get the virtual address and size from the entries
@@ -1698,64 +1721,64 @@ namespace SabreTools.Serialization.Wrappers
switch (index)
{
case 1:
virtualAddress = this.Model.OptionalHeader.ExportTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ExportTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ExportTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ExportTable?.Size ?? 0;
break;
case 2:
virtualAddress = this.Model.OptionalHeader.ImportTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ImportTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ImportTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ImportTable?.Size ?? 0;
break;
case 3:
virtualAddress = this.Model.OptionalHeader.ResourceTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ResourceTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ResourceTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ResourceTable?.Size ?? 0;
break;
case 4:
virtualAddress = this.Model.OptionalHeader.ExceptionTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ExceptionTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ExceptionTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ExceptionTable?.Size ?? 0;
break;
case 5:
virtualAddress = this.Model.OptionalHeader.CertificateTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.CertificateTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.CertificateTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.CertificateTable?.Size ?? 0;
break;
case 6:
virtualAddress = this.Model.OptionalHeader.BaseRelocationTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.BaseRelocationTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.BaseRelocationTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.BaseRelocationTable?.Size ?? 0;
break;
case 7:
virtualAddress = this.Model.OptionalHeader.Debug?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.Debug?.Size ?? 0;
virtualAddress = Model.OptionalHeader.Debug?.VirtualAddress ?? 0;
size = Model.OptionalHeader.Debug?.Size ?? 0;
break;
case 8: // Architecture Table
virtualAddress = 0;
size = 0;
break;
case 9:
virtualAddress = this.Model.OptionalHeader.GlobalPtr?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.GlobalPtr?.Size ?? 0;
virtualAddress = Model.OptionalHeader.GlobalPtr?.VirtualAddress ?? 0;
size = Model.OptionalHeader.GlobalPtr?.Size ?? 0;
break;
case 10:
virtualAddress = this.Model.OptionalHeader.ThreadLocalStorageTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ThreadLocalStorageTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ThreadLocalStorageTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ThreadLocalStorageTable?.Size ?? 0;
break;
case 11:
virtualAddress = this.Model.OptionalHeader.LoadConfigTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.LoadConfigTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.LoadConfigTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.LoadConfigTable?.Size ?? 0;
break;
case 12:
virtualAddress = this.Model.OptionalHeader.BoundImport?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.BoundImport?.Size ?? 0;
virtualAddress = Model.OptionalHeader.BoundImport?.VirtualAddress ?? 0;
size = Model.OptionalHeader.BoundImport?.Size ?? 0;
break;
case 13:
virtualAddress = this.Model.OptionalHeader.ImportAddressTable?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.ImportAddressTable?.Size ?? 0;
virtualAddress = Model.OptionalHeader.ImportAddressTable?.VirtualAddress ?? 0;
size = Model.OptionalHeader.ImportAddressTable?.Size ?? 0;
break;
case 14:
virtualAddress = this.Model.OptionalHeader.DelayImportDescriptor?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.DelayImportDescriptor?.Size ?? 0;
virtualAddress = Model.OptionalHeader.DelayImportDescriptor?.VirtualAddress ?? 0;
size = Model.OptionalHeader.DelayImportDescriptor?.Size ?? 0;
break;
case 15:
virtualAddress = this.Model.OptionalHeader.CLRRuntimeHeader?.VirtualAddress ?? 0;
size = this.Model.OptionalHeader.CLRRuntimeHeader?.Size ?? 0;
virtualAddress = Model.OptionalHeader.CLRRuntimeHeader?.VirtualAddress ?? 0;
size = Model.OptionalHeader.CLRRuntimeHeader?.Size ?? 0;
break;
case 16: // Reserved
virtualAddress = 0;
@@ -1764,11 +1787,11 @@ namespace SabreTools.Serialization.Wrappers
}
// If there is no section table
if (this.Model.SectionTable == null)
if (Model.SectionTable == null)
return null;
// Get the physical address from the virtual one
uint address = virtualAddress.ConvertVirtualAddress(this.Model.SectionTable);
uint address = virtualAddress.ConvertVirtualAddress(Model.SectionTable);
if (address == 0 || size == 0)
return null;

View File

@@ -43,9 +43,9 @@ namespace SabreTools.Serialization.Wrappers
return null;
// Get the archive count
int archiveCount = this.Model.DirectoryItems == null
int archiveCount = Model.DirectoryItems == null
? 0
: this.Model.DirectoryItems
: Model.DirectoryItems
.Select(di => di?.DirectoryEntry)
.Select(de => de?.ArchiveIndex ?? 0)
.Where(ai => ai != HL_VPK_NO_ARCHIVE)

View File

@@ -29,6 +29,11 @@ namespace SabreTools.Serialization.Wrappers
/// </summary>
protected DataSource _dataSource = DataSource.UNKNOWN;
/// <summary>
/// Lock object for reading from the source
/// </summary>
private readonly object _streamDataLock = new();
/// <summary>
/// Source byte array data
/// </summary>
@@ -86,7 +91,7 @@ namespace SabreTools.Serialization.Wrappers
if (offset < 0 || offset >= data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
this.Model = model;
Model = model;
_dataSource = DataSource.ByteArray;
_byteArrayData = data;
_byteArrayOffset = offset;
@@ -104,7 +109,7 @@ namespace SabreTools.Serialization.Wrappers
if (data.Length == 0 || !data.CanSeek || !data.CanRead)
throw new ArgumentOutOfRangeException(nameof(data));
this.Model = model;
Model = model;
_dataSource = DataSource.Stream;
_streamData = data;
}
@@ -193,11 +198,14 @@ namespace SabreTools.Serialization.Wrappers
break;
case DataSource.Stream:
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
lock (_streamDataLock)
{
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
}
}
return sectionData;

View File

@@ -21,6 +21,7 @@ namespace SabreTools.Serialization.Wrappers
case WrapperType.BSP: return BSP.Create(data);
case WrapperType.BZip2: return null; // TODO: Implement wrapper
case WrapperType.CFB: return CFB.Create(data);
case WrapperType.CHD: return CHD.Create(data);
case WrapperType.CIA: return CIA.Create(data);
case WrapperType.Executable: return CreateExecutableWrapper(data);
case WrapperType.GCF: return GCF.Create(data);
@@ -218,6 +219,13 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region CHD
if (magic.StartsWith(new byte?[] { 0x4D, 0x43, 0x6F, 0x6D, 0x70, 0x72, 0x48, 0x44 }))
return WrapperType.CHD;
#endregion
#region CIA
if (extension.Equals("cia", StringComparison.OrdinalIgnoreCase))

View File

@@ -41,6 +41,11 @@ namespace SabreTools.Serialization.Wrappers
/// </summary>
CFB,
/// <summary>
/// MAME Compressed Hunks of Data
/// </summary>
CHD,
/// <summary>
/// CTR Importable Archive
/// </summary>

View File

@@ -22,7 +22,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
var publisherIdentifier = this.Model.PublisherIdentifier;
var publisherIdentifier = Model.PublisherIdentifier;
if (string.IsNullOrEmpty(publisherIdentifier))
return "Unknown";
@@ -40,7 +40,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
var regionIdentifier = this.Model.RegionIdentifier;
var regionIdentifier = Model.RegionIdentifier;
if (Regions.ContainsKey(regionIdentifier))
return Regions[regionIdentifier];
@@ -51,12 +51,12 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// Get the human-readable serial string
/// </summary>
public string Serial => $"{this.Model.PublisherIdentifier}-{this.Model.GameID}";
public string Serial => $"{Model.PublisherIdentifier}-{Model.GameID}";
/// <summary>
/// Get the human-readable version string
/// </summary>
public string Version => $"1.{this.Model.VersionNumber}";
public string Version => $"1.{Model.VersionNumber}";
#endregion

View File

@@ -22,7 +22,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
char mediaSubtype = this.Model.MediaSubtypeIdentifier;
char mediaSubtype = Model.MediaSubtypeIdentifier;
if (MediaSubtypes.ContainsKey(mediaSubtype))
return MediaSubtypes[mediaSubtype];
@@ -37,7 +37,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
var publisherIdentifier = this.Model.PublisherIdentifier;
var publisherIdentifier = Model.PublisherIdentifier;
if (string.IsNullOrEmpty(publisherIdentifier))
return "Unknown";
@@ -55,7 +55,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
var regionIdentifier = this.Model.RegionIdentifier;
var regionIdentifier = Model.RegionIdentifier;
if (Regions.ContainsKey(regionIdentifier))
return Regions[regionIdentifier];
@@ -66,12 +66,12 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// Get the human-readable serial string
/// </summary>
public string Serial => $"{this.Model.PublisherIdentifier}-{this.Model.PlatformIdentifier}{this.Model.GameID}";
public string Serial => $"{Model.PublisherIdentifier}-{Model.PlatformIdentifier}{Model.GameID}";
/// <summary>
/// Get the human-readable version string
/// </summary>
public string Version => $"1.{this.Model.SKU}";
public string Version => $"1.{Model.SKU}";
#endregion

View File

@@ -17,8 +17,6 @@ namespace Test
if (options == null)
{
Options.DisplayHelp();
Console.WriteLine("Press enter to close the program...");
Console.ReadLine();
return;
}

View File

@@ -2,7 +2,6 @@
<PropertyGroup>
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
<OutputType>Exe</OutputType>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
@@ -12,6 +11,20 @@
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.Serialization\SabreTools.Serialization.csproj" />
</ItemGroup>

View File

@@ -1,4 +1,4 @@
#! /bin/bash
#!/bin/bash
# This batch file assumes the following:
# - .NET 8.0 (or newer) SDK is installed and in PATH
@@ -12,9 +12,8 @@
USE_ALL=false
NO_BUILD=false
NO_ARCHIVE=false
while getopts "uba" OPTION
do
case $OPTION in
while getopts "uba" OPTION; do
case $OPTION in
u)
USE_ALL=true
;;
@@ -35,27 +34,32 @@ done
BUILD_FOLDER=$PWD
# Set the current commit hash
COMMIT=`git log --pretty=%H -1`
COMMIT=$(git log --pretty=%H -1)
# Output the selected options
echo "Selected Options:"
echo " Use all frameworks (-u) $USE_ALL"
echo " No build (-b) $NO_BUILD"
echo " No archive (-a) $NO_ARCHIVE"
echo " "
# Create the build matrix arrays
FRAMEWORKS=("net8.0")
RUNTIMES=("win-x86" "win-x64" "linux-x64" "osx-x64")
RUNTIMES=("win-x86" "win-x64" "win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Use expanded lists, if requested
if [ $USE_ALL = true ]
then
if [ $USE_ALL = true ]; then
FRAMEWORKS=("net20" "net35" "net40" "net452" "net462" "net472" "net48" "netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0")
RUNTIMES=("win-x86" "win-x64" "win-arm64" "linux-x64" "linux-arm64" "osx-x64")
fi
# Create the filter arrays
SINGLE_FILE_CAPABLE=("net5.0" "net6.0" "net7.0" "net8.0")
VALID_APPLE_FRAMEWORKS=("net6.0" "net7.0" "net8.0")
VALID_CROSS_PLATFORM_FRAMEWORKS=("netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0")
VALID_CROSS_PLATFORM_RUNTIMES=("win-arm64" "linux-x64" "linux-arm64" "osx-x64")
VALID_CROSS_PLATFORM_RUNTIMES=("win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Only build if requested
if [ $NO_BUILD = false ]
then
if [ $NO_BUILD = false ]; then
# Restore Nuget packages for all builds
echo "Restoring Nuget packages"
dotnet restore
@@ -63,24 +67,40 @@ then
# Create Nuget Package
dotnet pack SabreTools.Serialization/SabreTools.Serialization.csproj --output $BUILD_FOLDER
# Build Test
for FRAMEWORK in "${FRAMEWORKS[@]}"
do
for RUNTIME in "${RUNTIMES[@]}"
do
# Build Program
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Build Program - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ] && [ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]
then
continue
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only .NET 5 and above can publish to a single file
if [[ $(echo ${SINGLE_FILE_CAPABLE[@]} | fgrep -w $FRAMEWORK) ]]
then
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
if [[ $(echo ${SINGLE_FILE_CAPABLE[@]} | fgrep -w $FRAMEWORK) ]]; then
# Only include Debug if building all
if [ $USE_ALL = true ]; then
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
fi
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
else
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
# Only include Debug if building all
if [ $USE_ALL = true ]; then
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
fi
dotnet publish Test/Test.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
fi
done
@@ -88,21 +108,34 @@ then
fi
# Only create archives if requested
if [ $NO_ARCHIVE = false ]
then
if [ $NO_ARCHIVE = false ]; then
# Create Test archives
for FRAMEWORK in "${FRAMEWORKS[@]}"
do
for RUNTIME in "${RUNTIMES[@]}"
do
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Archive Program - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ] && [ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]
then
continue
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
cd $BUILD_FOLDER/Test/bin/Debug/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_debug.zip .
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only include Debug if building all
if [ $USE_ALL = true ]; then
cd $BUILD_FOLDER/Test/bin/Debug/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_debug.zip .
fi
cd $BUILD_FOLDER/Test/bin/Release/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_release.zip .
done
@@ -110,4 +143,4 @@ then
# Reset the directory
cd $BUILD_FOLDER
fi
fi

View File

@@ -27,25 +27,30 @@ $BUILD_FOLDER = $PSScriptRoot
# Set the current commit hash
$COMMIT = git log --pretty=format:"%H" -1
# Output the selected options
Write-Host "Selected Options:"
Write-Host " Use all frameworks (-UseAll) $USE_ALL"
Write-Host " No build (-NoBuild) $NO_BUILD"
Write-Host " No archive (-NoArchive) $NO_ARCHIVE"
Write-Host " "
# Create the build matrix arrays
$FRAMEWORKS = @('net8.0')
$RUNTIMES = @('win-x86', 'win-x64', 'linux-x64', 'osx-x64')
$RUNTIMES = @('win-x86', 'win-x64', 'win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Use expanded lists, if requested
if ($USE_ALL.IsPresent)
{
if ($USE_ALL.IsPresent) {
$FRAMEWORKS = @('net20', 'net35', 'net40', 'net452', 'net462', 'net472', 'net48', 'netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0')
$RUNTIMES = @('win-x86', 'win-x64', 'win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64')
}
# Create the filter arrays
$SINGLE_FILE_CAPABLE = @('net5.0', 'net6.0', 'net7.0', 'net8.0')
$VALID_APPLE_FRAMEWORKS = @('net6.0', 'net7.0', 'net8.0')
$VALID_CROSS_PLATFORM_FRAMEWORKS = @('netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0')
$VALID_CROSS_PLATFORM_RUNTIMES = @('win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64')
$VALID_CROSS_PLATFORM_RUNTIMES = @('win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Only build if requested
if (!$NO_BUILD.IsPresent)
{
if (!$NO_BUILD.IsPresent) {
# Restore Nuget packages for all builds
Write-Host "Restoring Nuget packages"
dotnet restore
@@ -54,25 +59,36 @@ if (!$NO_BUILD.IsPresent)
dotnet pack SabreTools.Serialization\SabreTools.Serialization.csproj --output $BUILD_FOLDER
# Build Test
foreach ($FRAMEWORK in $FRAMEWORKS)
{
foreach ($RUNTIME in $RUNTIMES)
{
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Build Program - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME)
{
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only .NET 5 and above can publish to a single file
if ($SINGLE_FILE_CAPABLE -contains $FRAMEWORK)
{
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
if ($SINGLE_FILE_CAPABLE -contains $FRAMEWORK) {
# Only include Debug if building all
if ($USE_ALL.IsPresent) {
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
}
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
}
else
{
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
else {
# Only include Debug if building all
if ($USE_ALL.IsPresent) {
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
}
dotnet publish Test\Test.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
}
}
@@ -80,21 +96,31 @@ if (!$NO_BUILD.IsPresent)
}
# Only create archives if requested
if (!$NO_ARCHIVE.IsPresent)
{
if (!$NO_ARCHIVE.IsPresent) {
# Create Test archives
foreach ($FRAMEWORK in $FRAMEWORKS)
{
foreach ($RUNTIME in $RUNTIMES)
{
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Archive Program - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME)
{
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
Set-Location -Path $BUILD_FOLDER\Test\bin\Debug\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_debug.zip *
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only include Debug if building all
if ($USE_ALL.IsPresent) {
Set-Location -Path $BUILD_FOLDER\Test\bin\Debug\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_debug.zip *
}
Set-Location -Path $BUILD_FOLDER\Test\bin\Release\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\SabreTools.Serialization_${FRAMEWORK}_${RUNTIME}_release.zip *
}