mirror of
https://github.com/SabreTools/SabreTools.Serialization.git
synced 2026-02-04 05:36:12 +00:00
Compare commits
111 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9214a22cc9 | ||
|
|
5ba1156245 | ||
|
|
cb91cdff1d | ||
|
|
4df6a4e79d | ||
|
|
5b82a48267 | ||
|
|
8f70c50a48 | ||
|
|
5fe4d81fa4 | ||
|
|
7d3addbf0a | ||
|
|
b7d5873eb7 | ||
|
|
4e40cc19d5 | ||
|
|
1bc9316bc1 | ||
|
|
c995ec1dca | ||
|
|
4d2fbbae04 | ||
|
|
2776928946 | ||
|
|
8cc87c6540 | ||
|
|
3c212022aa | ||
|
|
511c4d09e5 | ||
|
|
d7eba27dc5 | ||
|
|
09370618ca | ||
|
|
2197167088 | ||
|
|
b527635fe7 | ||
|
|
695309bc32 | ||
|
|
97b2f68ec7 | ||
|
|
593044dbf3 | ||
|
|
1fcf44fb8d | ||
|
|
a2a472baf9 | ||
|
|
b5b4a50d94 | ||
|
|
f1b5464052 | ||
|
|
2c0224db22 | ||
|
|
1e78eecb40 | ||
|
|
3626faea60 | ||
|
|
a0177f1174 | ||
|
|
db5fe4a2cd | ||
|
|
5716143168 | ||
|
|
2a59b23149 | ||
|
|
bdbec4ed02 | ||
|
|
25193f1805 | ||
|
|
4840c816a2 | ||
|
|
d0a8e3770b | ||
|
|
1cf3d50864 | ||
|
|
d1b98f7d6d | ||
|
|
4bc87ff812 | ||
|
|
e1df11b360 | ||
|
|
34606a4f04 | ||
|
|
c4c5fc4bf6 | ||
|
|
cd87ce5373 | ||
|
|
90fc16b888 | ||
|
|
c2d0b71d22 | ||
|
|
e54473682c | ||
|
|
1c8d64d98c | ||
|
|
a19437f42f | ||
|
|
855e2f2c77 | ||
|
|
bd3cf88123 | ||
|
|
e4578ad3fc | ||
|
|
39e56ef864 | ||
|
|
51b77da760 | ||
|
|
4b83219a9b | ||
|
|
3ed07dd299 | ||
|
|
bb7daed7f6 | ||
|
|
0c84c47752 | ||
|
|
c18a185474 | ||
|
|
8ff66b04d8 | ||
|
|
94d6556e04 | ||
|
|
6d960265e4 | ||
|
|
cf4ca76e10 | ||
|
|
c7760e9903 | ||
|
|
d51bedceb6 | ||
|
|
125dc021d5 | ||
|
|
5bce481648 | ||
|
|
20153f62cf | ||
|
|
e302dfccf1 | ||
|
|
594b841490 | ||
|
|
40c354f79f | ||
|
|
b77959f300 | ||
|
|
59d6026a2b | ||
|
|
14226d1270 | ||
|
|
955f4da708 | ||
|
|
700b0359ea | ||
|
|
fe95b894d7 | ||
|
|
38a2712a8f | ||
|
|
d1ea091574 | ||
|
|
6bc812fc2f | ||
|
|
61b89fbd72 | ||
|
|
a2c065bdf2 | ||
|
|
88479f674b | ||
|
|
5edbacde74 | ||
|
|
67fc51224b | ||
|
|
101f3294b4 | ||
|
|
6c5622f732 | ||
|
|
f2a6fe1445 | ||
|
|
b0b593443f | ||
|
|
9b05185add | ||
|
|
17316da536 | ||
|
|
f3ca4dd989 | ||
|
|
e2b7bdac8c | ||
|
|
f86f6dc438 | ||
|
|
2bac0ed505 | ||
|
|
ae4078bb7f | ||
|
|
afaffbd9a2 | ||
|
|
b878e59e2e | ||
|
|
4bb3f625dd | ||
|
|
b7978cafa5 | ||
|
|
17f376c76f | ||
|
|
2774fdf158 | ||
|
|
11081efcb0 | ||
|
|
1b412c3027 | ||
|
|
73ec66e627 | ||
|
|
4ae4cd80b1 | ||
|
|
6eb27c66fc | ||
|
|
f96fd17fd3 | ||
|
|
c255a2494d |
8
.github/workflows/build_nupkg.yml
vendored
8
.github/workflows/build_nupkg.yml
vendored
@@ -16,10 +16,16 @@ jobs:
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build library
|
||||
run: dotnet build
|
||||
|
||||
- name: Run tests
|
||||
run: dotnet test
|
||||
|
||||
- name: Pack
|
||||
run: dotnet pack
|
||||
|
||||
18
.github/workflows/build_test.yml
vendored
18
.github/workflows/build_test.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Build Test
|
||||
name: Build InfoPrint
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -10,10 +10,10 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
project: [Test]
|
||||
runtime: [win-x86, win-x64, linux-x64, osx-x64] #[win-x86, win-x64, win-arm64, linux-x64, linux-arm64, osx-x64]
|
||||
framework: [net8.0] #[net20, net35, net40, net452, net472, net48, netcoreapp3.1, net5.0, net6.0, net7.0, net8.0]
|
||||
conf: [Release, Debug]
|
||||
project: [InfoPrint]
|
||||
runtime: [win-x86, win-x64, win-arm64, linux-x64, linux-arm64, osx-x64]
|
||||
framework: [net9.0] #[net20, net35, net40, net452, net472, net48, netcoreapp3.1, net5.0, net6.0, net7.0, net8.0, net9.0]
|
||||
conf: [Debug] #[Release, Debug]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -23,16 +23,18 @@ jobs:
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build
|
||||
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
|
||||
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8') || startsWith(matrix.framework, 'net9')) && '-p:PublishSingleFile=true' || ''}}
|
||||
|
||||
- name: Archive build
|
||||
run: zip -r ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ${{ matrix.project }}/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
|
||||
run: |
|
||||
cd ${{ matrix.project }}/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
|
||||
zip -r ${{ github.workspace }}/${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ./
|
||||
|
||||
- name: Upload build
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
7
.github/workflows/check_pr.yml
vendored
7
.github/workflows/check_pr.yml
vendored
@@ -11,7 +11,10 @@ jobs:
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Build
|
||||
run: dotnet build
|
||||
run: dotnet build
|
||||
|
||||
- name: Run tests
|
||||
run: dotnet test
|
||||
328
.gitignore
vendored
328
.gitignore
vendored
@@ -1,15 +1,7 @@
|
||||
*.swp
|
||||
*.*~
|
||||
project.lock.json
|
||||
.DS_Store
|
||||
*.pyc
|
||||
nupkg/
|
||||
|
||||
# Visual Studio Code
|
||||
.vscode
|
||||
|
||||
# Rider
|
||||
.idea
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
##
|
||||
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
|
||||
|
||||
# User-specific files
|
||||
*.suo
|
||||
@@ -17,6 +9,9 @@ nupkg/
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
@@ -24,15 +19,312 @@ nupkg/
|
||||
[Rr]eleases/
|
||||
x64/
|
||||
x86/
|
||||
build/
|
||||
bld/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
[Oo]ut/
|
||||
msbuild.log
|
||||
msbuild.err
|
||||
msbuild.wrn
|
||||
[Ll]og/
|
||||
|
||||
# Visual Studio 2015
|
||||
# Visual Studio 2015/2017 cache/options directory
|
||||
.vs/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
|
||||
# Visual Studio 2017 auto generated files
|
||||
Generated\ Files/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
# NUNIT
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# Benchmark Results
|
||||
BenchmarkDotNet.Artifacts/
|
||||
|
||||
# .NET Core
|
||||
project.lock.json
|
||||
project.fragment.lock.json
|
||||
artifacts/
|
||||
**/Properties/launchSettings.json
|
||||
|
||||
# StyleCop
|
||||
StyleCopReport.xml
|
||||
|
||||
# Files built by Visual Studio
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_i.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.iobj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.ipdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*.log
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
*.VC.db
|
||||
*.VC.VC.opendb
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# Visual Studio Trace Files
|
||||
*.e2e
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# JustCode is a .NET coding add-in
|
||||
.JustCode
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# AxoCover is a Code Coverage Tool
|
||||
.axoCover/*
|
||||
!.axoCover/settings.json
|
||||
|
||||
# Visual Studio code coverage results
|
||||
*.coverage
|
||||
*.coveragexml
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
# Note: Comment the next line if you want to checkin your web deploy settings,
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||
# checkin your Azure Web App publish settings, but sensitive information contained
|
||||
# in these scripts will be unencrypted
|
||||
PublishScripts/
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/[Pp]ackages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/[Pp]ackages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/[Pp]ackages/repositories.config
|
||||
# NuGet v3's project.json files produces more ignorable files
|
||||
*.nuget.props
|
||||
*.nuget.targets
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Windows Store app package directories and files
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
Package.StoreAssociation.xml
|
||||
_pkginfo.txt
|
||||
*.appx
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.jfm
|
||||
*.pfx
|
||||
*.publishsettings
|
||||
orleans.codegen.cs
|
||||
|
||||
# Including strong name files can present a security risk
|
||||
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
|
||||
#*.snk
|
||||
|
||||
# Since there are multiple workflows, uncomment next line to ignore bower_components
|
||||
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
|
||||
#bower_components/
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
ServiceFabricBackup/
|
||||
*.rptproj.bak
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
*.ndf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
*.rptproj.rsuser
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
node_modules/
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
|
||||
*.vbw
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
paket-files/
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
|
||||
# JetBrains Rider
|
||||
.idea/
|
||||
*.sln.iml
|
||||
|
||||
# CodeRush
|
||||
.cr/
|
||||
|
||||
# Python Tools for Visual Studio (PTVS)
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
# Cake - Uncomment if you are using it
|
||||
# tools/**
|
||||
# !tools/packages.config
|
||||
|
||||
# Tabs Studio
|
||||
*.tss
|
||||
|
||||
# Telerik's JustMock configuration file
|
||||
*.jmconfig
|
||||
|
||||
# BizTalk build output
|
||||
*.btp.cs
|
||||
*.btm.cs
|
||||
*.odx.cs
|
||||
*.xsd.cs
|
||||
|
||||
# OpenCover UI analysis results
|
||||
OpenCover/
|
||||
|
||||
# Azure Stream Analytics local run output
|
||||
ASALocalRun/
|
||||
|
||||
# MSBuild Binary and Structured Log
|
||||
*.binlog
|
||||
|
||||
# NVidia Nsight GPU debugger configuration file
|
||||
*.nvuser
|
||||
|
||||
# MFractors (Xamarin productivity tool) working folder
|
||||
.mfractor/
|
||||
|
||||
28
.vscode/launch.json
vendored
Normal file
28
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
// Use IntelliSense to find out which attributes exist for C# debugging
|
||||
// Use hover for the description of the existing attributes
|
||||
// For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": ".NET Core Launch (InfoPrint)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
// If you have changed target frameworks, make sure to update the program path.
|
||||
"program": "${workspaceFolder}/InfoPrint/bin/Debug/net9.0/InfoPrint.dll",
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}",
|
||||
// For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false,
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": ".NET Core Attach",
|
||||
"type": "coreclr",
|
||||
"request": "attach",
|
||||
"processId": "${command:pickProcess}"
|
||||
}
|
||||
]
|
||||
}
|
||||
24
.vscode/tasks.json
vendored
Normal file
24
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "build",
|
||||
"command": "dotnet",
|
||||
"type": "shell",
|
||||
"args": [
|
||||
"build",
|
||||
// Ask dotnet build to generate full paths for file names.
|
||||
"/property:GenerateFullPaths=true",
|
||||
// Do not generate summary otherwise it leads to duplicate errors in Problems panel
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"reveal": "silent"
|
||||
},
|
||||
"problemMatcher": "$msCompile"
|
||||
}
|
||||
]
|
||||
}
|
||||
37
InfoPrint/InfoPrint.csproj
Normal file
37
InfoPrint/InfoPrint.csproj
Normal file
@@ -0,0 +1,37 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
|
||||
<OutputType>Exe</OutputType>
|
||||
<CheckEolTargetFramework>false</CheckEolTargetFramework>
|
||||
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
|
||||
<LangVersion>latest</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<Version>1.7.6</Version>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Support All Frameworks -->
|
||||
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
|
||||
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
|
||||
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`)) OR $(TargetFramework.StartsWith(`net9`))">
|
||||
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
|
||||
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\SabreTools.Serialization\SabreTools.Serialization.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="SabreTools.IO" Version="1.5.1" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -1,7 +1,7 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Test
|
||||
namespace InfoPrint
|
||||
{
|
||||
/// <summary>
|
||||
/// Set of options for the test executable
|
||||
@@ -104,15 +104,15 @@ namespace Test
|
||||
/// </summary>
|
||||
public static void DisplayHelp()
|
||||
{
|
||||
Console.WriteLine("SabreTools.Serialization Test Program");
|
||||
Console.WriteLine("Information Printing Program");
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("test.exe <options> file|directory ...");
|
||||
Console.WriteLine("infoprint.exe <options> file|directory ...");
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Options:");
|
||||
Console.WriteLine("-?, -h, --help Display this help text and quit");
|
||||
Console.WriteLine("-d, --debug Enable debug mode");
|
||||
#if NET6_0_OR_GREATER
|
||||
Console.WriteLine("-j, --json Print executable info as JSON");
|
||||
Console.WriteLine("-j, --json Print info as JSON");
|
||||
#endif
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@ using SabreTools.IO.Extensions;
|
||||
using SabreTools.Serialization;
|
||||
using SabreTools.Serialization.Wrappers;
|
||||
|
||||
namespace Test
|
||||
namespace InfoPrint
|
||||
{
|
||||
public static class Program
|
||||
{
|
||||
@@ -17,8 +17,6 @@ namespace Test
|
||||
if (options == null)
|
||||
{
|
||||
Options.DisplayHelp();
|
||||
Console.WriteLine("Press enter to close the program...");
|
||||
Console.ReadLine();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -50,11 +48,7 @@ namespace Test
|
||||
}
|
||||
else if (Directory.Exists(path))
|
||||
{
|
||||
#if NET20 || NET35
|
||||
foreach (string file in Directory.GetFiles(path, "*", SearchOption.AllDirectories))
|
||||
#else
|
||||
foreach (string file in Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories))
|
||||
#endif
|
||||
foreach (string file in IOExtensions.SafeEnumerateFiles(path, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
PrintFileInfo(file, json, debug);
|
||||
}
|
||||
@@ -4,6 +4,12 @@ This library comprises of serializers that both read and write from files and st
|
||||
|
||||
Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTools.Serialization).
|
||||
|
||||
## Releases
|
||||
|
||||
For the most recent stable build, download the latest release here: [Releases Page](https://github.com/SabreTools/SabreTools.Serialization/releases)
|
||||
|
||||
For the latest WIP build here: [Rolling Release](https://github.com/SabreTools/SabreTools.Serialization/releases/tag/rolling)
|
||||
|
||||
## Interfaces
|
||||
|
||||
Below is a table representing the various conversion interfaces that are implemented within this library.
|
||||
|
||||
@@ -31,13 +31,9 @@ namespace SabreTools.Serialization.Test
|
||||
Assert.Equal(count, dat.File.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dat.ADDITIONAL_ELEMENTS);
|
||||
foreach (var file in dat.File)
|
||||
{
|
||||
Assert.NotNull(file);
|
||||
Assert.Null(file.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(file.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,7 +55,6 @@ namespace SabreTools.Serialization.Test
|
||||
foreach (var file in dat.Row)
|
||||
{
|
||||
Assert.NotNull(file);
|
||||
Assert.Null(file.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,103 +71,12 @@ namespace SabreTools.Serialization.Test
|
||||
|
||||
// Validate the values
|
||||
if (expectHeader)
|
||||
{
|
||||
Assert.NotNull(dat?.ClrMamePro);
|
||||
Assert.Null(dat.ClrMamePro.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Null(dat?.ClrMamePro);
|
||||
}
|
||||
|
||||
Assert.NotNull(dat?.Game);
|
||||
Assert.Equal(count, dat.Game.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
Assert.NotNull(dat?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(dat.ADDITIONAL_ELEMENTS);
|
||||
foreach (var game in dat.Game)
|
||||
{
|
||||
Assert.NotNull(game?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(game.ADDITIONAL_ELEMENTS);
|
||||
foreach (var release in game.Release ?? Array.Empty<Models.ClrMamePro.Release>())
|
||||
{
|
||||
Assert.NotNull(release?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(release.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var biosset in game.BiosSet ?? Array.Empty<Models.ClrMamePro.BiosSet>())
|
||||
{
|
||||
Assert.NotNull(biosset?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(biosset.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var rom in game.Rom ?? Array.Empty<Models.ClrMamePro.Rom>())
|
||||
{
|
||||
Assert.NotNull(rom?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(rom.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var disk in game.Disk ?? Array.Empty<Models.ClrMamePro.Disk>())
|
||||
{
|
||||
Assert.NotNull(disk?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(disk.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var media in game.Media ?? Array.Empty<Models.ClrMamePro.Media>())
|
||||
{
|
||||
Assert.NotNull(media?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(media.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var sample in game.Sample ?? Array.Empty<Models.ClrMamePro.Sample>())
|
||||
{
|
||||
Assert.NotNull(sample?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(sample.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var archive in game.Archive ?? Array.Empty<Models.ClrMamePro.Archive>())
|
||||
{
|
||||
Assert.NotNull(archive?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(archive.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var chip in game.Chip ?? Array.Empty<Models.ClrMamePro.Chip>())
|
||||
{
|
||||
Assert.NotNull(chip?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(chip.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var video in game.Video ?? Array.Empty<Models.ClrMamePro.Video>())
|
||||
{
|
||||
Assert.NotNull(video?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(video.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (game.Sound != null)
|
||||
{
|
||||
Assert.NotNull(game.Sound?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(game.Sound.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (game.Input != null)
|
||||
{
|
||||
Assert.NotNull(game.Input?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(game.Input.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var dipswitch in game.DipSwitch ?? Array.Empty<Models.ClrMamePro.DipSwitch>())
|
||||
{
|
||||
Assert.NotNull(dipswitch?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(dipswitch.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (game.Driver != null)
|
||||
{
|
||||
Assert.NotNull(game.Driver?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(game.Driver.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
@@ -192,23 +96,9 @@ namespace SabreTools.Serialization.Test
|
||||
Assert.NotNull(dat?.Game);
|
||||
Assert.Equal(count, dat.Game.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
Assert.NotNull(dat?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(dat.ADDITIONAL_ELEMENTS);
|
||||
|
||||
Assert.NotNull(dat.DosCenter?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(dat.DosCenter.ADDITIONAL_ELEMENTS);
|
||||
foreach (var game in dat.Game)
|
||||
{
|
||||
Assert.NotNull(game?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(game.ADDITIONAL_ELEMENTS);
|
||||
|
||||
Assert.NotNull(game.File);
|
||||
foreach (var file in game.File)
|
||||
{
|
||||
Assert.NotNull(file?.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(file.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -225,20 +115,10 @@ namespace SabreTools.Serialization.Test
|
||||
// Validate the values
|
||||
Assert.NotNull(dat?.Row);
|
||||
Assert.Equal(count, dat.Row.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
foreach (var file in dat.Row)
|
||||
{
|
||||
Assert.Null(file.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("test-sfv-files.sfv", HashType.CRC32, 100)]
|
||||
[InlineData("test-sfv-files.sfv", HashType.CRC32_ISO, 100)]
|
||||
[InlineData("test-sfv-files.sfv", HashType.CRC32_Naive, 100)]
|
||||
[InlineData("test-sfv-files.sfv", HashType.CRC32_Optimized, 100)]
|
||||
[InlineData("test-sfv-files.sfv", HashType.CRC32_Parallel, 100)]
|
||||
[InlineData("test-md5-files.md5", HashType.MD5, 100)]
|
||||
[InlineData("test-sha1-files.sha1", HashType.SHA1, 100)]
|
||||
[InlineData("test-sha256.sha256", HashType.SHA256, 1)]
|
||||
@@ -259,10 +139,6 @@ namespace SabreTools.Serialization.Test
|
||||
switch (hash)
|
||||
{
|
||||
case HashType.CRC32:
|
||||
case HashType.CRC32_ISO:
|
||||
case HashType.CRC32_Naive:
|
||||
case HashType.CRC32_Optimized:
|
||||
case HashType.CRC32_Parallel:
|
||||
Assert.NotNull(dat.SFV);
|
||||
Assert.Equal(count, dat.SFV.Length);
|
||||
break;
|
||||
@@ -308,10 +184,6 @@ namespace SabreTools.Serialization.Test
|
||||
// Validate the values
|
||||
Assert.NotNull(dat?.Set);
|
||||
Assert.Equal(count, dat.Set.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
Assert.NotNull(dat.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(dat.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
@@ -328,219 +200,6 @@ namespace SabreTools.Serialization.Test
|
||||
// Validate the values
|
||||
Assert.NotNull(dat?.Game);
|
||||
Assert.Equal(count, dat.Game.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dat.ADDITIONAL_ELEMENTS);
|
||||
foreach (var game in dat.Game)
|
||||
{
|
||||
Assert.Null(game.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(game.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var biosset in game.BiosSet ?? Array.Empty<Models.Listxml.BiosSet>())
|
||||
{
|
||||
Assert.Null(biosset.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(biosset.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var rom in game.Rom ?? Array.Empty<Models.Listxml.Rom>())
|
||||
{
|
||||
Assert.Null(rom.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(rom.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var disk in game.Disk ?? Array.Empty<Models.Listxml.Disk>())
|
||||
{
|
||||
Assert.Null(disk.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(disk.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var deviceRef in game.DeviceRef ?? Array.Empty<Models.Listxml.DeviceRef>())
|
||||
{
|
||||
Assert.Null(deviceRef.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(deviceRef.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var sample in game.Sample ?? Array.Empty<Models.Listxml.Sample>())
|
||||
{
|
||||
Assert.Null(sample.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(sample.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var chip in game.Chip ?? Array.Empty<Models.Listxml.Chip>())
|
||||
{
|
||||
Assert.Null(chip.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(chip.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var display in game.Display ?? Array.Empty<Models.Listxml.Display>())
|
||||
{
|
||||
Assert.Null(display.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(display.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var video in game.Video ?? Array.Empty<Models.Listxml.Video>())
|
||||
{
|
||||
Assert.Null(video.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(video.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (game.Sound != null)
|
||||
{
|
||||
Assert.Null(game.Sound.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(game.Sound.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (game.Input != null)
|
||||
{
|
||||
Assert.Null(game.Input.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(game.Input.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var control in game.Input.Control ?? Array.Empty<Models.Listxml.Control>())
|
||||
{
|
||||
Assert.Null(control.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(control.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var dipswitch in game.DipSwitch ?? Array.Empty<Models.Listxml.DipSwitch>())
|
||||
{
|
||||
Assert.Null(dipswitch.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dipswitch.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (dipswitch.Condition != null)
|
||||
{
|
||||
Assert.Null(dipswitch.Condition.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dipswitch.Condition.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var diplocation in dipswitch.DipLocation ?? Array.Empty<Models.Listxml.DipLocation>())
|
||||
{
|
||||
Assert.Null(diplocation.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(diplocation.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var dipvalue in dipswitch.DipValue ?? Array.Empty<Models.Listxml.DipValue>())
|
||||
{
|
||||
Assert.Null(dipvalue.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dipvalue.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (dipvalue.Condition != null)
|
||||
{
|
||||
Assert.Null(dipvalue.Condition.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dipvalue.Condition.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var configuration in game.Configuration ?? Array.Empty<Models.Listxml.Configuration>())
|
||||
{
|
||||
Assert.Null(configuration.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(configuration.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (configuration.Condition != null)
|
||||
{
|
||||
Assert.Null(configuration.Condition.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(configuration.Condition.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var conflocation in configuration.ConfLocation ?? Array.Empty<Models.Listxml.ConfLocation>())
|
||||
{
|
||||
Assert.Null(conflocation.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(conflocation.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var confsetting in configuration.ConfSetting ?? Array.Empty<Models.Listxml.ConfSetting>())
|
||||
{
|
||||
Assert.Null(confsetting.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(confsetting.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (confsetting.Condition != null)
|
||||
{
|
||||
Assert.Null(confsetting.Condition.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(confsetting.Condition.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var port in game.Port ?? Array.Empty<Models.Listxml.Port>())
|
||||
{
|
||||
Assert.Null(port.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(port.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var analog in port.Analog ?? Array.Empty<Models.Listxml.Analog>())
|
||||
{
|
||||
Assert.Null(analog.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(analog.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var adjuster in game.Adjuster ?? Array.Empty<Models.Listxml.Adjuster>())
|
||||
{
|
||||
Assert.Null(adjuster.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(adjuster.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (adjuster.Condition != null)
|
||||
{
|
||||
Assert.Null(adjuster.Condition.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(adjuster.Condition.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
if (game.Driver != null)
|
||||
{
|
||||
Assert.Null(game.Driver.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(game.Driver.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var feature in game.Feature ?? Array.Empty<Models.Listxml.Feature>())
|
||||
{
|
||||
Assert.Null(feature.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(feature.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var device in game.Device ?? Array.Empty<Models.Listxml.Device>())
|
||||
{
|
||||
Assert.Null(device.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(device.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (device.Instance != null)
|
||||
{
|
||||
Assert.Null(device.Instance.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(device.Instance.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var extension in device.Extension ?? Array.Empty<Models.Listxml.Extension>())
|
||||
{
|
||||
Assert.Null(extension.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(extension.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var slot in game.Slot ?? Array.Empty<Models.Listxml.Slot>())
|
||||
{
|
||||
Assert.Null(slot.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(slot.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var slotoption in slot.SlotOption ?? Array.Empty<Models.Listxml.SlotOption>())
|
||||
{
|
||||
Assert.Null(slotoption.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(slotoption.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var softwarelist in game.SoftwareList ?? Array.Empty<Models.Listxml.SoftwareList>())
|
||||
{
|
||||
Assert.Null(softwarelist.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(softwarelist.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var ramoption in game.RamOption ?? Array.Empty<Models.Listxml.RamOption>())
|
||||
{
|
||||
Assert.Null(ramoption.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(ramoption.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
@@ -557,180 +216,6 @@ namespace SabreTools.Serialization.Test
|
||||
// Validate the values
|
||||
Assert.NotNull(dat?.Game);
|
||||
Assert.Equal(count, dat.Game.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dat.ADDITIONAL_ELEMENTS);
|
||||
if (dat.Header != null)
|
||||
{
|
||||
var header = dat.Header;
|
||||
Assert.Null(header.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(header.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (header.ClrMamePro != null)
|
||||
{
|
||||
var cmp = header.ClrMamePro;
|
||||
Assert.Null(cmp.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(cmp.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (header.RomCenter != null)
|
||||
{
|
||||
var rc = header.RomCenter;
|
||||
Assert.Null(rc.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(rc.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var game in dat.Game)
|
||||
{
|
||||
Assert.Null(game.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(game.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var item in game.Release ?? Array.Empty<Models.Logiqx.Release>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.BiosSet ?? Array.Empty<Models.Logiqx.BiosSet>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.Rom ?? Array.Empty<Models.Logiqx.Rom>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.Disk ?? Array.Empty<Models.Logiqx.Disk>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.Media ?? Array.Empty<Models.Logiqx.Media>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.DeviceRef ?? Array.Empty<Models.Logiqx.DeviceRef>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.Sample ?? Array.Empty<Models.Logiqx.Sample>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.Archive ?? Array.Empty<Models.Logiqx.Archive>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (game.Driver != null)
|
||||
{
|
||||
Assert.Null(game.Driver.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(game.Driver.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.SoftwareList ?? Array.Empty<Models.Logiqx.SoftwareList>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (game.Trurip != null)
|
||||
{
|
||||
var trurip = game.Trurip;
|
||||
Assert.Null(trurip.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(trurip.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var dir in dat.Dir ?? Array.Empty<Models.Logiqx.Dir>())
|
||||
{
|
||||
Assert.NotNull(dir.Game);
|
||||
foreach (var game in dir.Game)
|
||||
{
|
||||
Assert.Null(game.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(game.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var item in game.Release ?? Array.Empty<Models.Logiqx.Release>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.BiosSet ?? Array.Empty<Models.Logiqx.BiosSet>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.Rom ?? Array.Empty<Models.Logiqx.Rom>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.Disk ?? Array.Empty<Models.Logiqx.Disk>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.Media ?? Array.Empty<Models.Logiqx.Media>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.DeviceRef ?? Array.Empty<Models.Logiqx.DeviceRef>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.Sample ?? Array.Empty<Models.Logiqx.Sample>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.Archive ?? Array.Empty<Models.Logiqx.Archive>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (game.Driver != null)
|
||||
{
|
||||
Assert.Null(game.Driver.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(game.Driver.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var item in game.SoftwareList ?? Array.Empty<Models.Logiqx.SoftwareList>())
|
||||
{
|
||||
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(item.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (game.Trurip != null)
|
||||
{
|
||||
var trurip = game.Trurip;
|
||||
Assert.Null(trurip.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(trurip.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
@@ -746,196 +231,6 @@ namespace SabreTools.Serialization.Test
|
||||
// Validate the values
|
||||
Assert.NotNull(dat?.Games?.Game);
|
||||
Assert.Equal(count, dat.Games.Game.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dat.ADDITIONAL_ELEMENTS);
|
||||
if (dat.Configuration != null)
|
||||
{
|
||||
var configuration = dat.Configuration;
|
||||
Assert.Null(configuration.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(configuration.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (configuration.Infos != null)
|
||||
{
|
||||
var infos = configuration.Infos;
|
||||
Assert.Null(infos.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(infos.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (infos.Title != null)
|
||||
{
|
||||
var title = infos.Title;
|
||||
Assert.Null(title.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(title.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.Location != null)
|
||||
{
|
||||
var location = infos.Location;
|
||||
Assert.Null(location.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(location.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.Publisher != null)
|
||||
{
|
||||
var publisher = infos.Publisher;
|
||||
Assert.Null(publisher.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(publisher.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.SourceRom != null)
|
||||
{
|
||||
var sourceRom = infos.SourceRom;
|
||||
Assert.Null(sourceRom.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(sourceRom.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.SaveType != null)
|
||||
{
|
||||
var saveType = infos.SaveType;
|
||||
Assert.Null(saveType.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(saveType.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.RomSize != null)
|
||||
{
|
||||
var romSize = infos.RomSize;
|
||||
Assert.Null(romSize.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(romSize.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.ReleaseNumber != null)
|
||||
{
|
||||
var releaseNumber = infos.ReleaseNumber;
|
||||
Assert.Null(releaseNumber.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(releaseNumber.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.LanguageNumber != null)
|
||||
{
|
||||
var languageNumber = infos.LanguageNumber;
|
||||
Assert.Null(languageNumber.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(languageNumber.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.Comment != null)
|
||||
{
|
||||
var comment = infos.Comment;
|
||||
Assert.Null(comment.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(comment.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.RomCRC != null)
|
||||
{
|
||||
var romCRC = infos.RomCRC;
|
||||
Assert.Null(romCRC.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(romCRC.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.Im1CRC != null)
|
||||
{
|
||||
var im1CRC = infos.Im1CRC;
|
||||
Assert.Null(im1CRC.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(im1CRC.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.Im2CRC != null)
|
||||
{
|
||||
var im2CRC = infos.Im2CRC;
|
||||
Assert.Null(im2CRC.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(im2CRC.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (infos.Languages != null)
|
||||
{
|
||||
var languages = infos.Languages;
|
||||
Assert.Null(languages.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(languages.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
if (configuration.CanOpen != null)
|
||||
{
|
||||
var canOpen = configuration.CanOpen;
|
||||
Assert.Null(canOpen.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(canOpen.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (configuration.NewDat != null)
|
||||
{
|
||||
var newDat = configuration.NewDat;
|
||||
Assert.Null(newDat.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(newDat.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (newDat.DatUrl != null)
|
||||
{
|
||||
var datURL = newDat.DatUrl;
|
||||
Assert.Null(datURL.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(datURL.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
if (configuration.Search != null)
|
||||
{
|
||||
var search = configuration.Search;
|
||||
Assert.Null(search.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(search.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var to in search.To ?? Array.Empty<Models.OfflineList.To>())
|
||||
{
|
||||
Assert.Null(to.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(to.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var find in to.Find ?? Array.Empty<Models.OfflineList.Find>())
|
||||
{
|
||||
Assert.Null(find.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(find.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Assert.Null(dat.Games.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dat.Games.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var game in dat.Games.Game)
|
||||
{
|
||||
Assert.Null(game.ADDITIONAL_ATTRIBUTES);
|
||||
//Assert.Null(game.ADDITIONAL_ELEMENTS); // TODO: Re-enable line when Models is fixed again
|
||||
|
||||
if (game.Files != null)
|
||||
{
|
||||
var files = game.Files;
|
||||
Assert.Null(files.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(files.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var romCRC in files.RomCRC ?? Array.Empty<Models.OfflineList.FileRomCRC>())
|
||||
{
|
||||
Assert.Null(romCRC.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(romCRC.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (dat.GUI != null)
|
||||
{
|
||||
var gui = dat.GUI;
|
||||
Assert.Null(gui.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(gui.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (gui.Images != null)
|
||||
{
|
||||
var images = gui.Images;
|
||||
Assert.Null(images.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(images.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var image in images.Image ?? Array.Empty<Models.OfflineList.Image>())
|
||||
{
|
||||
Assert.Null(image.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(image.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
@@ -952,33 +247,6 @@ namespace SabreTools.Serialization.Test
|
||||
Assert.NotNull(dat);
|
||||
Assert.NotNull(dat.Software);
|
||||
Assert.Equal(count, dat.Software.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dat.ADDITIONAL_ELEMENTS);
|
||||
foreach (var software in dat.Software)
|
||||
{
|
||||
Assert.Null(software.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(software.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var dump in software.Dump ?? Array.Empty<Models.OpenMSX.Dump>())
|
||||
{
|
||||
Assert.Null(dump.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dump.ADDITIONAL_ELEMENTS);
|
||||
|
||||
if (dump.Original != null)
|
||||
{
|
||||
Assert.Null(dump.Original.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dump.Original.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (dump.Rom != null)
|
||||
{
|
||||
Assert.Null(dump.Rom.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dump.Rom.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
@@ -994,37 +262,6 @@ namespace SabreTools.Serialization.Test
|
||||
// Validate the values
|
||||
Assert.NotNull(dat?.Games?.Rom);
|
||||
Assert.Equal(count, dat.Games.Rom.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
Assert.NotNull(dat.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(dat.ADDITIONAL_ELEMENTS);
|
||||
if (dat.Credits != null)
|
||||
{
|
||||
Assert.NotNull(dat.Credits.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(dat.Credits.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (dat.Dat != null)
|
||||
{
|
||||
Assert.NotNull(dat.Dat.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(dat.Dat.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (dat.Emulator != null)
|
||||
{
|
||||
Assert.NotNull(dat.Emulator.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(dat.Emulator.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
if (dat.Games != null)
|
||||
{
|
||||
Assert.NotNull(dat.Games.ADDITIONAL_ELEMENTS);
|
||||
Assert.Empty(dat.Games.ADDITIONAL_ELEMENTS);
|
||||
foreach (var rom in dat.Games.Rom ?? Array.Empty<Models.RomCenter.Rom>())
|
||||
{
|
||||
Assert.Null(rom.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
@@ -1045,12 +282,6 @@ namespace SabreTools.Serialization.Test
|
||||
// Validate the values
|
||||
Assert.NotNull(dat?.Row);
|
||||
Assert.Equal(count, dat.Row.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
foreach (var rom in dat.Row ?? Array.Empty<Models.SeparatedValue.Row>())
|
||||
{
|
||||
Assert.Null(rom.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
@@ -1069,77 +300,8 @@ namespace SabreTools.Serialization.Test
|
||||
Assert.NotNull(dat);
|
||||
Assert.NotNull(dat.Software);
|
||||
Assert.Equal(count, dat.Software.Length);
|
||||
|
||||
// Validate we're not missing any attributes or elements
|
||||
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dat.ADDITIONAL_ELEMENTS);
|
||||
foreach (var software in dat.Software)
|
||||
{
|
||||
Assert.Null(software.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(software.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var info in software.Info ?? Array.Empty<Models.SoftwareList.Info>())
|
||||
{
|
||||
Assert.Null(info.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(info.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var sharedfeat in software.SharedFeat ?? Array.Empty<Models.SoftwareList.SharedFeat>())
|
||||
{
|
||||
Assert.Null(sharedfeat.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(sharedfeat.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var part in software.Part ?? Array.Empty<Models.SoftwareList.Part>())
|
||||
{
|
||||
Assert.Null(part.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(part.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var feature in part.Feature ?? Array.Empty<Models.SoftwareList.Feature>())
|
||||
{
|
||||
Assert.Null(feature.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(feature.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
|
||||
foreach (var dataarea in part.DataArea ?? Array.Empty<Models.SoftwareList.DataArea>())
|
||||
{
|
||||
Assert.Null(dataarea.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dataarea.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var rom in dataarea.Rom ?? Array.Empty<Models.SoftwareList.Rom>())
|
||||
{
|
||||
Assert.Null(rom.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(rom.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var diskarea in part.DiskArea ?? Array.Empty<Models.SoftwareList.DiskArea>())
|
||||
{
|
||||
Assert.Null(diskarea.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(diskarea.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var disk in diskarea.Disk ?? Array.Empty<Models.SoftwareList.Disk>())
|
||||
{
|
||||
Assert.Null(disk.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(disk.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var dipswitch in part.DipSwitch ?? Array.Empty<Models.SoftwareList.DipSwitch>())
|
||||
{
|
||||
Assert.Null(dipswitch.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dipswitch.ADDITIONAL_ELEMENTS);
|
||||
|
||||
foreach (var dipvalue in dipswitch.DipValue ?? Array.Empty<Models.SoftwareList.DipValue>())
|
||||
{
|
||||
Assert.Null(dipvalue.ADDITIONAL_ATTRIBUTES);
|
||||
Assert.Null(dipvalue.ADDITIONAL_ELEMENTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Get the path to the test file
|
||||
/// </summary>
|
||||
|
||||
@@ -63,9 +63,9 @@ namespace SabreTools.Serialization.Test
|
||||
|
||||
var dump = new Models.OpenMSX.Dump[]
|
||||
{
|
||||
new Models.OpenMSX.Dump { Original = original, Rom = rom },
|
||||
new Models.OpenMSX.Dump { Rom = megaRom },
|
||||
new Models.OpenMSX.Dump { Rom = sccPlusCart },
|
||||
new() { Original = original, Rom = rom },
|
||||
new() { Rom = megaRom },
|
||||
new() { Rom = sccPlusCart },
|
||||
};
|
||||
|
||||
var software = new Models.OpenMSX.Software
|
||||
|
||||
@@ -1,38 +1,38 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFrameworks>net6.0;net8.0</TargetFrameworks>
|
||||
<IsPackable>false</IsPackable>
|
||||
<LangVersion>latest</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<TargetFrameworks>net6.0;net8.0;net9.0</TargetFrameworks>
|
||||
<IsPackable>false</IsPackable>
|
||||
<LangVersion>latest</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\SabreTools.Serialization\SabreTools.Serialization.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\SabreTools.Serialization\SabreTools.Serialization.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Remove="TestData\*" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Remove="TestData\*" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="TestData\*">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Content Include="TestData\*">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.9.0" />
|
||||
<PackageReference Include="SabreTools.Models" Version="1.4.8" />
|
||||
<PackageReference Include="xunit" Version="2.8.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.0">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
|
||||
<PackageReference Include="SabreTools.Hashing" Version="1.4.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
</Project>
|
||||
@@ -5,7 +5,7 @@ VisualStudioVersion = 17.0.31903.59
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.Serialization", "SabreTools.Serialization\SabreTools.Serialization.csproj", "{5B688801-5F36-483E-B2E8-F219BA5923A2}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Test", "Test\Test.csproj", "{F3DEE31A-4726-464C-A90C-C19D78F51898}"
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InfoPrint", "InfoPrint\InfoPrint.csproj", "{F3DEE31A-4726-464C-A90C-C19D78F51898}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.Serialization.Test", "SabreTools.Serialization.Test\SabreTools.Serialization.Test.csproj", "{B8A04C5E-A14F-4842-9035-2F6871A1DA10}"
|
||||
EndProject
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using SabreTools.Models.ArchiveDotOrg;
|
||||
using System.Collections.Generic;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.CrossModel
|
||||
@@ -16,14 +15,13 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var files = new Models.ArchiveDotOrg.Files();
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
var items = new List<Models.ArchiveDotOrg.File>();
|
||||
foreach (var machine in machines ?? [])
|
||||
{
|
||||
files.File = machines
|
||||
.Where(m => m != null)
|
||||
.SelectMany(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
items.AddRange(ConvertFromInternalModel(machine));
|
||||
}
|
||||
|
||||
files.File = [.. items];
|
||||
return files;
|
||||
}
|
||||
|
||||
@@ -36,9 +34,7 @@ namespace SabreTools.Serialization.CrossModel
|
||||
if (roms == null)
|
||||
return [];
|
||||
|
||||
return roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertFromInternalModel).ToArray();
|
||||
return Array.ConvertAll(roms, ConvertFromInternalModel);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.CrossModel
|
||||
@@ -16,13 +16,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
|
||||
};
|
||||
|
||||
if (item?.File != null && item.File.Any())
|
||||
if (item?.File != null && item.File.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.File
|
||||
.Where(f => f != null)
|
||||
.Select(ConvertMachineToInternalModel)
|
||||
.Where(m => m != null)
|
||||
.ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(item.File, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Collections.Generic;
|
||||
using SabreTools.Models.AttractMode;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,14 +17,13 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var metadataFile = header != null ? ConvertHeaderFromInternalModel(header) : new MetadataFile();
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
var items = new List<Row>();
|
||||
foreach (var machine in machines ?? [])
|
||||
{
|
||||
metadataFile.Row = machines
|
||||
.Where(m => m != null)
|
||||
.SelectMany(ConvertMachineFromInternalModel)
|
||||
.ToArray();
|
||||
items.AddRange(ConvertMachineFromInternalModel(machine));
|
||||
}
|
||||
|
||||
metadataFile.Row = [.. items];
|
||||
return metadataFile;
|
||||
}
|
||||
|
||||
@@ -46,13 +45,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
private static Row[] ConvertMachineFromInternalModel(Models.Metadata.Machine item)
|
||||
{
|
||||
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
|
||||
if (roms == null || !roms.Any())
|
||||
if (roms == null || roms.Length == 0)
|
||||
return [];
|
||||
|
||||
return roms
|
||||
.Where(r => r != null)
|
||||
.Select(rom => ConvertFromInternalModel(rom, item))
|
||||
.ToArray();
|
||||
return Array.ConvertAll(roms, r => ConvertFromInternalModel(r, item));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.AttractMode;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,13 +17,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(obj),
|
||||
};
|
||||
|
||||
if (obj?.Row != null && obj.Row.Any())
|
||||
if (obj?.Row != null && obj.Row.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Row
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertMachineToInternalModel)
|
||||
.Where(m => m != null)
|
||||
.ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(obj.Row, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.ClrMamePro;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -22,12 +22,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
metadataFile.ClrMamePro = ConvertHeaderFromInternalModel(header);
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
if (machines != null && machines.Length > 0)
|
||||
{
|
||||
metadataFile.Game = machines
|
||||
.Where(m => m != null)
|
||||
.Select(machine => ConvertMachineFromInternalModel(machine, game))
|
||||
.ToArray();
|
||||
metadataFile.Game
|
||||
= Array.ConvertAll(machines, m => ConvertMachineFromInternalModel(m));
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
@@ -76,85 +74,40 @@ namespace SabreTools.Serialization.CrossModel
|
||||
gameBase.SampleOf = item.ReadString(Models.Metadata.Machine.SampleOfKey);
|
||||
|
||||
var releases = item.Read<Models.Metadata.Release[]>(Models.Metadata.Machine.ReleaseKey);
|
||||
if (releases != null && releases.Any())
|
||||
{
|
||||
gameBase.Release = releases
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (releases != null && releases.Length > 0)
|
||||
gameBase.Release = Array.ConvertAll(releases, ConvertFromInternalModel);
|
||||
|
||||
var biosSets = item.Read<Models.Metadata.BiosSet[]>(Models.Metadata.Machine.BiosSetKey);
|
||||
if (biosSets != null && biosSets.Any())
|
||||
{
|
||||
gameBase.BiosSet = biosSets
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (biosSets != null && biosSets.Length > 0)
|
||||
gameBase.BiosSet = Array.ConvertAll(biosSets, ConvertFromInternalModel);
|
||||
|
||||
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
|
||||
if (roms != null && roms.Any())
|
||||
{
|
||||
gameBase.Rom = roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (roms != null && roms.Length > 0)
|
||||
gameBase.Rom = Array.ConvertAll(roms, ConvertFromInternalModel);
|
||||
|
||||
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.Machine.DiskKey);
|
||||
if (disks != null && disks.Any())
|
||||
{
|
||||
gameBase.Disk = disks
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (disks != null && disks.Length > 0)
|
||||
gameBase.Disk = Array.ConvertAll(disks, ConvertFromInternalModel);
|
||||
|
||||
var medias = item.Read<Models.Metadata.Media[]>(Models.Metadata.Machine.MediaKey);
|
||||
if (medias != null && medias.Any())
|
||||
{
|
||||
gameBase.Media = medias
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (medias != null && medias.Length > 0)
|
||||
gameBase.Media = Array.ConvertAll(medias, ConvertFromInternalModel);
|
||||
|
||||
var samples = item.Read<Models.Metadata.Sample[]>(Models.Metadata.Machine.SampleKey);
|
||||
if (samples != null && samples.Any())
|
||||
{
|
||||
gameBase.Sample = samples
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (samples != null && samples.Length > 0)
|
||||
gameBase.Sample = Array.ConvertAll(samples, ConvertFromInternalModel);
|
||||
|
||||
var archives = item.Read<Models.Metadata.Archive[]>(Models.Metadata.Machine.ArchiveKey);
|
||||
if (archives != null && archives.Any())
|
||||
{
|
||||
gameBase.Archive = archives
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (archives != null && archives.Length > 0)
|
||||
gameBase.Archive = Array.ConvertAll(archives, ConvertFromInternalModel);
|
||||
|
||||
var chips = item.Read<Models.Metadata.Chip[]>(Models.Metadata.Machine.ChipKey);
|
||||
if (chips != null && chips.Any())
|
||||
{
|
||||
gameBase.Chip = chips
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (chips != null && chips.Length > 0)
|
||||
gameBase.Chip = Array.ConvertAll(chips, ConvertFromInternalModel);
|
||||
|
||||
var videos = item.Read<Models.Metadata.Video[]>(Models.Metadata.Machine.VideoKey);
|
||||
if (videos != null && videos.Any())
|
||||
{
|
||||
gameBase.Video = videos
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (videos != null && videos.Length > 0)
|
||||
gameBase.Video = Array.ConvertAll(videos, ConvertFromInternalModel);
|
||||
|
||||
var sound = item.Read<Models.Metadata.Sound>(Models.Metadata.Machine.SoundKey);
|
||||
if (sound != null)
|
||||
@@ -165,13 +118,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
gameBase.Input = ConvertFromInternalModel(input);
|
||||
|
||||
var dipSwitches = item.Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Machine.DipSwitchKey);
|
||||
if (dipSwitches != null && dipSwitches.Any())
|
||||
{
|
||||
gameBase.DipSwitch = dipSwitches
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (dipSwitches != null && dipSwitches.Length > 0)
|
||||
gameBase.DipSwitch = Array.ConvertAll(dipSwitches, ConvertFromInternalModel);
|
||||
|
||||
var driver = item.Read<Models.Metadata.Driver>(Models.Metadata.Machine.DriverKey);
|
||||
if (driver != null)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.ClrMamePro;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,13 +17,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
if (obj?.ClrMamePro != null)
|
||||
metadataFile[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(obj.ClrMamePro);
|
||||
|
||||
if (obj?.Game != null && obj.Game.Any())
|
||||
if (obj?.Game != null && obj.Game.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Game
|
||||
.Where(g => g != null)
|
||||
.Select(ConvertMachineToInternalModel)
|
||||
.Where(m => m != null)
|
||||
.ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(obj.Game, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
@@ -75,76 +72,58 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Machine.SampleOfKey] = item.SampleOf,
|
||||
};
|
||||
|
||||
if (item.Release != null && item.Release.Any())
|
||||
if (item.Release != null && item.Release.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.ReleaseKey] = item.Release
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.ReleaseKey]
|
||||
= Array.ConvertAll(item.Release, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.BiosSet != null && item.BiosSet.Any())
|
||||
if (item.BiosSet != null && item.BiosSet.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.BiosSetKey] = item.BiosSet
|
||||
.Where(b => b != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.BiosSetKey]
|
||||
= Array.ConvertAll(item.BiosSet, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Rom != null && item.Rom.Any())
|
||||
if (item.Rom != null && item.Rom.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.RomKey] = item.Rom
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.RomKey]
|
||||
= Array.ConvertAll(item.Rom, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Disk != null && item.Disk.Any())
|
||||
if (item.Disk != null && item.Disk.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.DiskKey] = item.Disk
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.DiskKey]
|
||||
= Array.ConvertAll(item.Disk, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Media != null && item.Media.Any())
|
||||
if (item.Media != null && item.Media.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.MediaKey] = item.Media
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.MediaKey]
|
||||
= Array.ConvertAll(item.Media, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Sample != null && item.Sample.Any())
|
||||
if (item.Sample != null && item.Sample.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.SampleKey] = item.Sample
|
||||
.Where(s => s != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.SampleKey]
|
||||
= Array.ConvertAll(item.Sample, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Archive != null && item.Archive.Any())
|
||||
if (item.Archive != null && item.Archive.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.ArchiveKey] = item.Archive
|
||||
.Where(a => a != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.ArchiveKey]
|
||||
= Array.ConvertAll(item.Archive, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Chip != null && item.Chip.Any())
|
||||
if (item.Chip != null && item.Chip.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.ChipKey] = item.Chip
|
||||
.Where(c => c != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.ChipKey]
|
||||
= Array.ConvertAll(item.Chip, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Video != null)
|
||||
{
|
||||
machine[Models.Metadata.Machine.VideoKey] = item.Video
|
||||
.Where(v => v != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.VideoKey]
|
||||
= Array.ConvertAll(item.Video, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Sound != null)
|
||||
@@ -153,12 +132,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
if (item.Input != null)
|
||||
machine[Models.Metadata.Machine.InputKey] = ConvertToInternalModel(item.Input);
|
||||
|
||||
if (item.DipSwitch != null && item.DipSwitch.Any())
|
||||
if (item.DipSwitch != null && item.DipSwitch.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.DipSwitchKey] = item.DipSwitch
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.DipSwitchKey]
|
||||
= Array.ConvertAll(item.DipSwitch, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Driver != null)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.DosCenter;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -19,13 +19,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
metadataFile.DosCenter = ConvertHeaderFromInternalModel(header);
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
{
|
||||
metadataFile.Game = machines
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertMachineFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (machines != null && machines.Length > 0)
|
||||
metadataFile.Game = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
|
||||
|
||||
return metadataFile;
|
||||
}
|
||||
@@ -59,13 +54,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
|
||||
if (roms != null && roms.Any())
|
||||
{
|
||||
game.File = roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (roms != null && roms.Length > 0)
|
||||
game.File = Array.ConvertAll(roms, ConvertFromInternalModel);
|
||||
|
||||
return game;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.DosCenter;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
if (obj?.DosCenter != null)
|
||||
metadataFile[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(obj.DosCenter);
|
||||
|
||||
if (obj?.Game != null && obj.Game.Any())
|
||||
if (obj?.Game != null && obj.Game.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Game
|
||||
.Where(g => g != null)
|
||||
.Select(ConvertMachineToInternalModel)
|
||||
.ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(obj.Game, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
@@ -56,12 +54,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Machine.NameKey] = item.Name,
|
||||
};
|
||||
|
||||
if (item.File != null && item.File.Any())
|
||||
if (item.File != null && item.File.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.RomKey] = item.File
|
||||
.Where(f => f != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.RomKey]
|
||||
= Array.ConvertAll(item.File, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
return machine;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Collections.Generic;
|
||||
using SabreTools.Models.EverdriveSMDB;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -16,13 +16,13 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var metadataFile = new MetadataFile();
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
{
|
||||
metadataFile.Row = machines
|
||||
.Where(m => m != null)
|
||||
.SelectMany(ConvertMachineFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
var items = new List<Row>();
|
||||
foreach (var machine in machines ?? [])
|
||||
{
|
||||
items.AddRange(ConvertMachineFromInternalModel(machine));
|
||||
}
|
||||
|
||||
metadataFile.Row = [.. items];
|
||||
|
||||
return metadataFile;
|
||||
}
|
||||
@@ -33,13 +33,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
private static Row[] ConvertMachineFromInternalModel(Models.Metadata.Machine item)
|
||||
{
|
||||
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
|
||||
if (roms == null || !roms.Any())
|
||||
if (roms == null || roms.Length == 0)
|
||||
return [];
|
||||
|
||||
return roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
return Array.ConvertAll(roms, ConvertFromInternalModel);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.EverdriveSMDB;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(),
|
||||
};
|
||||
|
||||
if (obj?.Row != null && obj.Row.Any())
|
||||
if (obj?.Row != null && obj.Row.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Row
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertMachineToInternalModel)
|
||||
.ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(obj.Row, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using SabreTools.Hashing;
|
||||
using SabreTools.Models.Hashfile;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
@@ -18,12 +18,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
return null;
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines == null || !machines.Any())
|
||||
if (machines == null || machines.Length == 0)
|
||||
return null;
|
||||
|
||||
var hashfiles = machines
|
||||
.Where(m => m != null)
|
||||
.Select(machine => ConvertMachineFromInternalModel(machine, hash));
|
||||
var hashfiles = Array.ConvertAll(machines, m => ConvertMachineFromInternalModel(m, hash));
|
||||
|
||||
var sfvs = new List<SFV>();
|
||||
var md5s = new List<MD5>();
|
||||
@@ -35,38 +33,38 @@ namespace SabreTools.Serialization.CrossModel
|
||||
|
||||
foreach (var hashfile in hashfiles)
|
||||
{
|
||||
if (hashfile.SFV != null && hashfile.SFV.Any())
|
||||
if (hashfile.SFV != null && hashfile.SFV.Length > 0)
|
||||
sfvs.AddRange(hashfile.SFV);
|
||||
if (hashfile.MD5 != null && hashfile.MD5.Any())
|
||||
if (hashfile.MD5 != null && hashfile.MD5.Length > 0)
|
||||
md5s.AddRange(hashfile.MD5);
|
||||
if (hashfile.SHA1 != null && hashfile.SHA1.Any())
|
||||
if (hashfile.SHA1 != null && hashfile.SHA1.Length > 0)
|
||||
sha1s.AddRange(hashfile.SHA1);
|
||||
if (hashfile.SHA256 != null && hashfile.SHA256.Any())
|
||||
if (hashfile.SHA256 != null && hashfile.SHA256.Length > 0)
|
||||
sha256s.AddRange(hashfile.SHA256);
|
||||
if (hashfile.SHA384 != null && hashfile.SHA384.Any())
|
||||
if (hashfile.SHA384 != null && hashfile.SHA384.Length > 0)
|
||||
sha384s.AddRange(hashfile.SHA384);
|
||||
if (hashfile.SHA512 != null && hashfile.SHA512.Any())
|
||||
if (hashfile.SHA512 != null && hashfile.SHA512.Length > 0)
|
||||
sha512s.AddRange(hashfile.SHA512);
|
||||
if (hashfile.SpamSum != null && hashfile.SpamSum.Any())
|
||||
if (hashfile.SpamSum != null && hashfile.SpamSum.Length > 0)
|
||||
spamsums.AddRange(hashfile.SpamSum);
|
||||
}
|
||||
|
||||
var hashfileItem = new Models.Hashfile.Hashfile();
|
||||
|
||||
if (sfvs.Any())
|
||||
hashfileItem.SFV = sfvs.ToArray();
|
||||
if (md5s.Any())
|
||||
hashfileItem.MD5 = md5s.ToArray();
|
||||
if (sha1s.Any())
|
||||
hashfileItem.SHA1 = sha1s.ToArray();
|
||||
if (sha256s.Any())
|
||||
hashfileItem.SHA256 = sha256s.ToArray();
|
||||
if (sha384s.Any())
|
||||
hashfileItem.SHA384 = sha384s.ToArray();
|
||||
if (sha512s.Any())
|
||||
hashfileItem.SHA512 = sha512s.ToArray();
|
||||
if (spamsums.Any())
|
||||
hashfileItem.SpamSum = spamsums.ToArray();
|
||||
if (sfvs.Count > 0)
|
||||
hashfileItem.SFV = [.. sfvs];
|
||||
if (md5s.Count > 0)
|
||||
hashfileItem.MD5 = [.. md5s];
|
||||
if (sha1s.Count > 0)
|
||||
hashfileItem.SHA1 = [.. sha1s];
|
||||
if (sha256s.Count > 0)
|
||||
hashfileItem.SHA256 = [.. sha256s];
|
||||
if (sha384s.Count > 0)
|
||||
hashfileItem.SHA384 = [.. sha384s];
|
||||
if (sha512s.Count > 0)
|
||||
hashfileItem.SHA512 = [.. sha512s];
|
||||
if (spamsums.Count > 0)
|
||||
hashfileItem.SpamSum = [.. spamsums];
|
||||
|
||||
return hashfileItem;
|
||||
}
|
||||
@@ -80,13 +78,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
return null;
|
||||
|
||||
var machines = item.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
{
|
||||
return machines
|
||||
.Where(m => m != null)
|
||||
.Select(machine => ConvertMachineFromInternalModel(machine, hash))
|
||||
.ToArray();
|
||||
}
|
||||
if (machines != null && machines.Length > 0)
|
||||
return Array.ConvertAll(machines, m => ConvertMachineFromInternalModel(m, hash));
|
||||
|
||||
return null;
|
||||
}
|
||||
@@ -102,47 +95,26 @@ namespace SabreTools.Serialization.CrossModel
|
||||
|
||||
return new Models.Hashfile.Hashfile
|
||||
{
|
||||
SFV = hash == HashType.CRC32 || hash == HashType.CRC32_ISO || hash == HashType.CRC32_Naive || hash == HashType.CRC32_Optimized || hash == HashType.CRC32_Parallel
|
||||
? roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertToSFV)
|
||||
.ToArray()
|
||||
SFV = hash == HashType.CRC32
|
||||
? Array.ConvertAll(roms, ConvertToSFV)
|
||||
: null,
|
||||
MD5 = hash == HashType.MD5
|
||||
? roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertToMD5)
|
||||
.ToArray()
|
||||
? Array.ConvertAll(roms, ConvertToMD5)
|
||||
: null,
|
||||
SHA1 = hash == HashType.SHA1
|
||||
? roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertToSHA1)
|
||||
.ToArray()
|
||||
? Array.ConvertAll(roms, ConvertToSHA1)
|
||||
: null,
|
||||
SHA256 = hash == HashType.SHA256
|
||||
? roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertToSHA256)
|
||||
.ToArray()
|
||||
? Array.ConvertAll(roms, ConvertToSHA256)
|
||||
: null,
|
||||
SHA384 = hash == HashType.SHA384
|
||||
? roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertToSHA384)
|
||||
.ToArray()
|
||||
? Array.ConvertAll(roms, ConvertToSHA384)
|
||||
: null,
|
||||
SHA512 = hash == HashType.SHA512
|
||||
? roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertToSHA512)
|
||||
.ToArray()
|
||||
? Array.ConvertAll(roms, ConvertToSHA512)
|
||||
: null,
|
||||
SpamSum = hash == HashType.SpamSum
|
||||
? roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertToSpamSum)
|
||||
.ToArray()
|
||||
? Array.ConvertAll(roms, ConvertToSpamSum)
|
||||
: null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.Hashfile;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -42,20 +42,20 @@ namespace SabreTools.Serialization.CrossModel
|
||||
{
|
||||
var machine = new Models.Metadata.Machine();
|
||||
|
||||
if (item.SFV != null && item.SFV.Any())
|
||||
machine[Models.Metadata.Machine.RomKey] = item.SFV.Select(ConvertToInternalModel).ToArray();
|
||||
else if (item.MD5 != null && item.MD5.Any())
|
||||
machine[Models.Metadata.Machine.RomKey] = item.MD5.Select(ConvertToInternalModel).ToArray();
|
||||
else if (item.SHA1 != null && item.SHA1.Any())
|
||||
machine[Models.Metadata.Machine.RomKey] = item.SHA1.Select(ConvertToInternalModel).ToArray();
|
||||
else if (item.SHA256 != null && item.SHA256.Any())
|
||||
machine[Models.Metadata.Machine.RomKey] = item.SHA256.Select(ConvertToInternalModel).ToArray();
|
||||
else if (item.SHA384 != null && item.SHA384.Any())
|
||||
machine[Models.Metadata.Machine.RomKey] = item.SHA384.Select(ConvertToInternalModel).ToArray();
|
||||
else if (item.SHA512 != null && item.SHA512.Any())
|
||||
machine[Models.Metadata.Machine.RomKey] = item.SHA512.Select(ConvertToInternalModel).ToArray();
|
||||
else if (item.SpamSum != null && item.SpamSum.Any())
|
||||
machine[Models.Metadata.Machine.RomKey] = item.SpamSum.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.SFV != null && item.SFV.Length > 0)
|
||||
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SFV, ConvertToInternalModel);
|
||||
else if (item.MD5 != null && item.MD5.Length > 0)
|
||||
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.MD5, ConvertToInternalModel);
|
||||
else if (item.SHA1 != null && item.SHA1.Length > 0)
|
||||
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SHA1, ConvertToInternalModel);
|
||||
else if (item.SHA256 != null && item.SHA256.Length > 0)
|
||||
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SHA256, ConvertToInternalModel);
|
||||
else if (item.SHA384 != null && item.SHA384.Length > 0)
|
||||
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SHA384, ConvertToInternalModel);
|
||||
else if (item.SHA512 != null && item.SHA512.Length > 0)
|
||||
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SHA512, ConvertToInternalModel);
|
||||
else if (item.SpamSum != null && item.SpamSum.Length > 0)
|
||||
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SpamSum, ConvertToInternalModel);
|
||||
|
||||
return machine;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using SabreTools.Models.Listrom;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -16,13 +16,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var metadataFile = new MetadataFile();
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
{
|
||||
metadataFile.Set = machines
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertMachineFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (machines != null && machines.Length > 0)
|
||||
metadataFile.Set = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
|
||||
|
||||
return metadataFile;
|
||||
}
|
||||
@@ -43,14 +38,14 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
|
||||
if (roms != null)
|
||||
{
|
||||
rowItems.AddRange(roms.Where(r => r != null).Select(ConvertFromInternalModel));
|
||||
rowItems.AddRange(Array.ConvertAll(roms, ConvertFromInternalModel));
|
||||
}
|
||||
|
||||
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.Machine.DiskKey);
|
||||
if (disks != null)
|
||||
rowItems.AddRange(disks.Where(d => d != null).Select(ConvertFromInternalModel));
|
||||
rowItems.AddRange(Array.ConvertAll(disks, ConvertFromInternalModel));
|
||||
|
||||
set.Row = rowItems.ToArray();
|
||||
set.Row = [.. rowItems];
|
||||
return set;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using SabreTools.Models.Listrom;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -18,12 +18,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(),
|
||||
};
|
||||
|
||||
if (obj?.Set != null && obj.Set.Any())
|
||||
if (obj?.Set != null && obj.Set.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Set
|
||||
.Where(s => s != null)
|
||||
.Select(ConvertMachineToInternalModel)
|
||||
.ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(obj.Set, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
@@ -57,16 +55,21 @@ namespace SabreTools.Serialization.CrossModel
|
||||
machine[Models.Metadata.Machine.NameKey] = item.Driver;
|
||||
}
|
||||
|
||||
if (item.Row != null && item.Row.Any())
|
||||
if (item.Row != null && item.Row.Length > 0)
|
||||
{
|
||||
var datItems = new List<Models.Metadata.DatItem>();
|
||||
var disks = new List<Models.Metadata.Disk>();
|
||||
var roms = new List<Models.Metadata.Rom>();
|
||||
foreach (var file in item.Row)
|
||||
{
|
||||
datItems.Add(ConvertToInternalModel(file));
|
||||
var datItem = ConvertToInternalModel(file);
|
||||
if (datItem is Models.Metadata.Disk disk)
|
||||
disks.Add(disk);
|
||||
else if (datItem is Models.Metadata.Rom rom)
|
||||
roms.Add(rom);
|
||||
}
|
||||
|
||||
machine[Models.Metadata.Machine.DiskKey] = datItems.Where(i => i.ReadString(Models.Metadata.DatItem.TypeKey) == "disk").Select(d => d as Models.Metadata.Disk).ToArray();
|
||||
machine[Models.Metadata.Machine.RomKey] = datItems.Where(i => i.ReadString(Models.Metadata.DatItem.TypeKey) == "rom").Select(d => d as Models.Metadata.Rom).ToArray();
|
||||
machine[Models.Metadata.Machine.DiskKey] = disks.ToArray();
|
||||
machine[Models.Metadata.Machine.RomKey] = roms.ToArray();
|
||||
}
|
||||
|
||||
return machine;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.Listxml;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -16,13 +16,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var mame = header != null ? ConvertMameFromInternalModel(header) : new Mame();
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
{
|
||||
mame.Game = machines
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertMachineFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (machines != null && machines.Length > 0)
|
||||
mame.Game = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
|
||||
|
||||
return mame;
|
||||
}
|
||||
@@ -39,13 +34,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var mame = header != null ? ConvertMameFromInternalModel(header) : new Mame();
|
||||
|
||||
var machines = item.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
{
|
||||
mame.Game = machines
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertMachineFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (machines != null && machines.Length > 0)
|
||||
mame.Game = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
|
||||
|
||||
return mame;
|
||||
}
|
||||
@@ -88,76 +78,36 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var biosSets = item.Read<Models.Metadata.BiosSet[]>(Models.Metadata.Machine.BiosSetKey);
|
||||
if (biosSets != null && biosSets.Any())
|
||||
{
|
||||
machine.BiosSet = biosSets
|
||||
.Where(b => b != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (biosSets != null && biosSets.Length > 0)
|
||||
machine.BiosSet = Array.ConvertAll(biosSets, ConvertFromInternalModel);
|
||||
|
||||
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
|
||||
if (roms != null && roms.Any())
|
||||
{
|
||||
machine.Rom = roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (roms != null && roms.Length > 0)
|
||||
machine.Rom = Array.ConvertAll(roms, ConvertFromInternalModel);
|
||||
|
||||
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.Machine.DiskKey);
|
||||
if (disks != null && disks.Any())
|
||||
{
|
||||
machine.Disk = disks
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (disks != null && disks.Length > 0)
|
||||
machine.Disk = Array.ConvertAll(disks, ConvertFromInternalModel);
|
||||
|
||||
var deviceRefs = item.Read<Models.Metadata.DeviceRef[]>(Models.Metadata.Machine.DeviceRefKey);
|
||||
if (deviceRefs != null && deviceRefs.Any())
|
||||
{
|
||||
machine.DeviceRef = deviceRefs
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (deviceRefs != null && deviceRefs.Length > 0)
|
||||
machine.DeviceRef = Array.ConvertAll(deviceRefs, ConvertFromInternalModel);
|
||||
|
||||
var samples = item.Read<Models.Metadata.Sample[]>(Models.Metadata.Machine.SampleKey);
|
||||
if (samples != null && samples.Any())
|
||||
{
|
||||
machine.Sample = samples
|
||||
.Where(s => s != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (samples != null && samples.Length > 0)
|
||||
machine.Sample = Array.ConvertAll(samples, ConvertFromInternalModel);
|
||||
|
||||
var chips = item.Read<Models.Metadata.Chip[]>(Models.Metadata.Machine.ChipKey);
|
||||
if (chips != null && chips.Any())
|
||||
{
|
||||
machine.Chip = chips
|
||||
.Where(c => c != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (chips != null && chips.Length > 0)
|
||||
machine.Chip = Array.ConvertAll(chips, ConvertFromInternalModel);
|
||||
|
||||
var displays = item.Read<Models.Metadata.Display[]>(Models.Metadata.Machine.DisplayKey);
|
||||
if (displays != null && displays.Any())
|
||||
{
|
||||
machine.Display = displays
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (displays != null && displays.Length > 0)
|
||||
machine.Display = Array.ConvertAll(displays, ConvertFromInternalModel);
|
||||
|
||||
var videos = item.Read<Models.Metadata.Video[]>(Models.Metadata.Machine.VideoKey);
|
||||
if (videos != null && videos.Any())
|
||||
{
|
||||
machine.Video = videos
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (videos != null && videos.Length > 0)
|
||||
machine.Video = Array.ConvertAll(videos, ConvertFromInternalModel);
|
||||
|
||||
var sound = item.Read<Models.Metadata.Sound>(Models.Metadata.Machine.SoundKey);
|
||||
if (sound != null)
|
||||
@@ -168,89 +118,44 @@ namespace SabreTools.Serialization.CrossModel
|
||||
machine.Input = ConvertFromInternalModel(input);
|
||||
|
||||
var dipSwitches = item.Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Machine.DipSwitchKey);
|
||||
if (dipSwitches != null && dipSwitches.Any())
|
||||
{
|
||||
machine.DipSwitch = dipSwitches
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (dipSwitches != null && dipSwitches.Length > 0)
|
||||
machine.DipSwitch = Array.ConvertAll(dipSwitches, ConvertFromInternalModel);
|
||||
|
||||
var configurations = item.Read<Models.Metadata.Configuration[]>(Models.Metadata.Machine.ConfigurationKey);
|
||||
if (configurations != null && configurations.Any())
|
||||
{
|
||||
machine.Configuration = configurations
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (configurations != null && configurations.Length > 0)
|
||||
machine.Configuration = Array.ConvertAll(configurations, ConvertFromInternalModel);
|
||||
|
||||
var ports = item.Read<Models.Metadata.Port[]>(Models.Metadata.Machine.PortKey);
|
||||
if (ports != null && ports.Any())
|
||||
{
|
||||
machine.Port = ports
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (ports != null && ports.Length > 0)
|
||||
machine.Port = Array.ConvertAll(ports, ConvertFromInternalModel);
|
||||
|
||||
var adjusters = item.Read<Models.Metadata.Adjuster[]>(Models.Metadata.Machine.AdjusterKey);
|
||||
if (adjusters != null && adjusters.Any())
|
||||
{
|
||||
machine.Adjuster = adjusters
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (adjusters != null && adjusters.Length > 0)
|
||||
machine.Adjuster = Array.ConvertAll(adjusters, ConvertFromInternalModel);
|
||||
|
||||
var driver = item.Read<Models.Metadata.Driver>(Models.Metadata.Machine.DriverKey);
|
||||
if (driver != null)
|
||||
machine.Driver = ConvertFromInternalModel(driver);
|
||||
|
||||
var features = item.Read<Models.Metadata.Feature[]>(Models.Metadata.Machine.FeatureKey);
|
||||
if (features != null && features.Any())
|
||||
{
|
||||
machine.Feature = features
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (features != null && features.Length > 0)
|
||||
machine.Feature = Array.ConvertAll(features, ConvertFromInternalModel);
|
||||
|
||||
var devices = item.Read<Models.Metadata.Device[]>(Models.Metadata.Machine.DeviceKey);
|
||||
if (devices != null && devices.Any())
|
||||
{
|
||||
machine.Device = devices
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (devices != null && devices.Length > 0)
|
||||
machine.Device = Array.ConvertAll(devices, ConvertFromInternalModel);
|
||||
|
||||
var slots = item.Read<Models.Metadata.Slot[]>(Models.Metadata.Machine.SlotKey);
|
||||
if (slots != null && slots.Any())
|
||||
{
|
||||
machine.Slot = slots
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (slots != null && slots.Length > 0)
|
||||
machine.Slot = Array.ConvertAll(slots, ConvertFromInternalModel);
|
||||
|
||||
var softwareLists = item.Read<Models.Metadata.SoftwareList[]>(Models.Metadata.Machine.SoftwareListKey);
|
||||
if (softwareLists != null && softwareLists.Any())
|
||||
{
|
||||
machine.SoftwareList = softwareLists
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (softwareLists != null && softwareLists.Length > 0)
|
||||
machine.SoftwareList = Array.ConvertAll(softwareLists, ConvertFromInternalModel);
|
||||
|
||||
var ramOptions = item.Read<Models.Metadata.RamOption[]>(Models.Metadata.Machine.RamOptionKey);
|
||||
if (ramOptions != null && ramOptions.Any())
|
||||
{
|
||||
machine.RamOption = ramOptions
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (ramOptions != null && ramOptions.Length > 0)
|
||||
machine.RamOption = Array.ConvertAll(ramOptions, ConvertFromInternalModel);
|
||||
|
||||
return machine;
|
||||
}
|
||||
@@ -347,22 +252,12 @@ namespace SabreTools.Serialization.CrossModel
|
||||
configuration.Condition = ConvertFromInternalModel(condition);
|
||||
|
||||
var confLocations = item.Read<Models.Metadata.ConfLocation[]>(Models.Metadata.Configuration.ConfLocationKey);
|
||||
if (confLocations != null && confLocations.Any())
|
||||
{
|
||||
configuration.ConfLocation = confLocations
|
||||
.Where(c => c != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (confLocations != null && confLocations.Length > 0)
|
||||
configuration.ConfLocation = Array.ConvertAll(confLocations, ConvertFromInternalModel);
|
||||
|
||||
var confSettings = item.Read<Models.Metadata.ConfSetting[]>(Models.Metadata.Configuration.ConfSettingKey);
|
||||
if (confSettings != null && confSettings.Any())
|
||||
{
|
||||
configuration.ConfSetting = confSettings
|
||||
.Where(c => c != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (confSettings != null && confSettings.Length > 0)
|
||||
configuration.ConfSetting = Array.ConvertAll(confSettings, ConvertFromInternalModel);
|
||||
|
||||
return configuration;
|
||||
}
|
||||
@@ -442,13 +337,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
device.Instance = ConvertFromInternalModel(instance);
|
||||
|
||||
var extensions = item.Read<Models.Metadata.Extension[]>(Models.Metadata.Device.ExtensionKey);
|
||||
if (extensions != null && extensions.Any())
|
||||
{
|
||||
device.Extension = extensions
|
||||
.Where(e => e != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (extensions != null && extensions.Length > 0)
|
||||
device.Extension = Array.ConvertAll(extensions, ConvertFromInternalModel);
|
||||
|
||||
return device;
|
||||
}
|
||||
@@ -496,22 +386,12 @@ namespace SabreTools.Serialization.CrossModel
|
||||
dipSwitch.Condition = ConvertFromInternalModel(condition);
|
||||
|
||||
var dipLocations = item.Read<Models.Metadata.DipLocation[]>(Models.Metadata.DipSwitch.DipLocationKey);
|
||||
if (dipLocations != null && dipLocations.Any())
|
||||
{
|
||||
dipSwitch.DipLocation = dipLocations
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (dipLocations != null && dipLocations.Length > 0)
|
||||
dipSwitch.DipLocation = Array.ConvertAll(dipLocations, ConvertFromInternalModel);
|
||||
|
||||
var dipValues = item.Read<Models.Metadata.DipValue[]>(Models.Metadata.DipSwitch.DipValueKey);
|
||||
if (dipValues != null && dipValues.Any())
|
||||
{
|
||||
dipSwitch.DipValue = dipValues
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (dipValues != null && dipValues.Length > 0)
|
||||
dipSwitch.DipValue = Array.ConvertAll(dipValues, ConvertFromInternalModel);
|
||||
|
||||
return dipSwitch;
|
||||
}
|
||||
@@ -644,13 +524,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var controls = item.Read<Models.Metadata.Control[]>(Models.Metadata.Input.ControlKey);
|
||||
if (controls != null && controls.Any())
|
||||
{
|
||||
input.Control = controls
|
||||
.Where(c => c != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (controls != null && controls.Length > 0)
|
||||
input.Control = Array.ConvertAll(controls, ConvertFromInternalModel);
|
||||
|
||||
return input;
|
||||
}
|
||||
@@ -679,13 +554,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var analogs = item.Read<Models.Metadata.Analog[]>(Models.Metadata.Port.AnalogKey);
|
||||
if (analogs != null && analogs.Any())
|
||||
{
|
||||
port.Analog = analogs
|
||||
.Where(a => a != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (analogs != null && analogs.Length > 0)
|
||||
port.Analog = Array.ConvertAll(analogs, ConvertFromInternalModel);
|
||||
|
||||
return port;
|
||||
}
|
||||
@@ -750,13 +620,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var slotOptions = item.Read<Models.Metadata.SlotOption[]>(Models.Metadata.Slot.SlotOptionKey);
|
||||
if (slotOptions != null && slotOptions.Any())
|
||||
{
|
||||
slot.SlotOption = slotOptions
|
||||
.Where(s => s != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (slotOptions != null && slotOptions.Length > 0)
|
||||
slot.SlotOption = Array.ConvertAll(slotOptions, ConvertFromInternalModel);
|
||||
|
||||
return slot;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.Listxml;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
|
||||
};
|
||||
|
||||
if (item?.Game != null && item.Game.Any())
|
||||
if (item?.Game != null && item.Game.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.Game
|
||||
.Where(g => g != null)
|
||||
.Select(ConvertMachineToInternalModel)
|
||||
.ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(item.Game, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
@@ -64,68 +62,52 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Machine.HistoryKey] = item.History,
|
||||
};
|
||||
|
||||
if (item.BiosSet != null && item.BiosSet.Any())
|
||||
if (item.BiosSet != null && item.BiosSet.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.BiosSetKey] = item.BiosSet
|
||||
.Where(b => b != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.BiosSetKey]
|
||||
= Array.ConvertAll(item.BiosSet, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Rom != null && item.Rom.Any())
|
||||
if (item.Rom != null && item.Rom.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.RomKey] = item.Rom
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.RomKey]
|
||||
= Array.ConvertAll(item.Rom, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Disk != null && item.Disk.Any())
|
||||
if (item.Disk != null && item.Disk.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.DiskKey] = item.Disk
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.DiskKey]
|
||||
= Array.ConvertAll(item.Disk, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.DeviceRef != null && item.DeviceRef.Any())
|
||||
if (item.DeviceRef != null && item.DeviceRef.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.DeviceRefKey] = item.DeviceRef
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.DeviceRefKey]
|
||||
= Array.ConvertAll(item.DeviceRef, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Sample != null && item.Sample.Any())
|
||||
if (item.Sample != null && item.Sample.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.SampleKey] = item.Sample
|
||||
.Where(s => s != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.SampleKey]
|
||||
= Array.ConvertAll(item.Sample, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Chip != null && item.Chip.Any())
|
||||
if (item.Chip != null && item.Chip.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.ChipKey] = item.Chip
|
||||
.Where(c => c != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.ChipKey]
|
||||
= Array.ConvertAll(item.Chip, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Display != null && item.Display.Any())
|
||||
if (item.Display != null && item.Display.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.DisplayKey] = item.Display
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.DisplayKey]
|
||||
= Array.ConvertAll(item.Display, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Video != null && item.Video.Any())
|
||||
if (item.Video != null && item.Video.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.VideoKey] = item.Video
|
||||
.Where(v => v != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.VideoKey]
|
||||
= Array.ConvertAll(item.Video, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Sound != null)
|
||||
@@ -134,79 +116,61 @@ namespace SabreTools.Serialization.CrossModel
|
||||
if (item.Input != null)
|
||||
machine[Models.Metadata.Machine.InputKey] = ConvertToInternalModel(item.Input);
|
||||
|
||||
if (item.DipSwitch != null && item.DipSwitch.Any())
|
||||
if (item.DipSwitch != null && item.DipSwitch.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.DipSwitchKey] = item.DipSwitch
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.DipSwitchKey]
|
||||
= Array.ConvertAll(item.DipSwitch, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Configuration != null && item.Configuration.Any())
|
||||
if (item.Configuration != null && item.Configuration.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.ConfigurationKey] = item.Configuration
|
||||
.Where(c => c != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.ConfigurationKey]
|
||||
= Array.ConvertAll(item.Configuration, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Port != null && item.Port.Any())
|
||||
if (item.Port != null && item.Port.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.PortKey] = item.Port
|
||||
.Where(p => p != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.PortKey]
|
||||
= Array.ConvertAll(item.Port, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Adjuster != null && item.Adjuster.Any())
|
||||
if (item.Adjuster != null && item.Adjuster.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.AdjusterKey] = item.Adjuster
|
||||
.Where(a => a != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.AdjusterKey]
|
||||
= Array.ConvertAll(item.Adjuster, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Driver != null)
|
||||
machine[Models.Metadata.Machine.DriverKey] = ConvertToInternalModel(item.Driver);
|
||||
|
||||
if (item.Feature != null && item.Feature.Any())
|
||||
if (item.Feature != null && item.Feature.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.FeatureKey] = item.Feature
|
||||
.Where(f => f != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.FeatureKey]
|
||||
= Array.ConvertAll(item.Feature, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Device != null && item.Device.Any())
|
||||
if (item.Device != null && item.Device.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.DeviceKey] = item.Device
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.DeviceKey]
|
||||
= Array.ConvertAll(item.Device, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.Slot != null && item.Slot.Any())
|
||||
if (item.Slot != null && item.Slot.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.SlotKey] = item.Slot
|
||||
.Where(s => s != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.SlotKey]
|
||||
= Array.ConvertAll(item.Slot, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.SoftwareList != null && item.SoftwareList.Any())
|
||||
if (item.SoftwareList != null && item.SoftwareList.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.SoftwareListKey] = item.SoftwareList
|
||||
.Where(s => s != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.SoftwareListKey]
|
||||
= Array.ConvertAll(item.SoftwareList, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.RamOption != null && item.RamOption.Any())
|
||||
if (item.RamOption != null && item.RamOption.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.RamOptionKey] = item.RamOption
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.RamOptionKey]
|
||||
= Array.ConvertAll(item.RamOption, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
return machine;
|
||||
@@ -301,20 +265,16 @@ namespace SabreTools.Serialization.CrossModel
|
||||
if (item.Condition != null)
|
||||
configuration[Models.Metadata.Configuration.ConditionKey] = ConvertToInternalModel(item.Condition);
|
||||
|
||||
if (item.ConfLocation != null && item.ConfLocation.Any())
|
||||
if (item.ConfLocation != null && item.ConfLocation.Length > 0)
|
||||
{
|
||||
configuration[Models.Metadata.Configuration.ConfLocationKey] = item.ConfLocation
|
||||
.Where(c => c != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
configuration[Models.Metadata.Configuration.ConfLocationKey]
|
||||
= Array.ConvertAll(item.ConfLocation, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.ConfSetting != null && item.ConfSetting.Any())
|
||||
if (item.ConfSetting != null && item.ConfSetting.Length > 0)
|
||||
{
|
||||
configuration[Models.Metadata.Configuration.ConfSettingKey] = item.ConfSetting
|
||||
.Where(c => c != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
configuration[Models.Metadata.Configuration.ConfSettingKey]
|
||||
= Array.ConvertAll(item.ConfSetting, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
return configuration;
|
||||
@@ -392,12 +352,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
if (item.Instance != null)
|
||||
device[Models.Metadata.Device.InstanceKey] = ConvertToInternalModel(item.Instance);
|
||||
|
||||
if (item.Extension != null && item.Extension.Any())
|
||||
if (item.Extension != null && item.Extension.Length > 0)
|
||||
{
|
||||
device[Models.Metadata.Device.ExtensionKey] = item.Extension
|
||||
.Where(e => e != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
device[Models.Metadata.Device.ExtensionKey]
|
||||
= Array.ConvertAll(item.Extension, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
return device;
|
||||
@@ -444,20 +402,16 @@ namespace SabreTools.Serialization.CrossModel
|
||||
if (item.Condition != null)
|
||||
dipSwitch[Models.Metadata.DipSwitch.ConditionKey] = ConvertToInternalModel(item.Condition);
|
||||
|
||||
if (item.DipLocation != null && item.DipLocation.Any())
|
||||
if (item.DipLocation != null && item.DipLocation.Length > 0)
|
||||
{
|
||||
dipSwitch[Models.Metadata.DipSwitch.DipLocationKey] = item.DipLocation
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
dipSwitch[Models.Metadata.DipSwitch.DipLocationKey]
|
||||
= Array.ConvertAll(item.DipLocation, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
if (item.DipValue != null && item.DipValue.Any())
|
||||
if (item.DipValue != null && item.DipValue.Length > 0)
|
||||
{
|
||||
dipSwitch[Models.Metadata.DipSwitch.DipValueKey] = item.DipValue
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
dipSwitch[Models.Metadata.DipSwitch.DipValueKey]
|
||||
= Array.ConvertAll(item.DipValue, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
return dipSwitch;
|
||||
@@ -589,12 +543,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Input.CoinsKey] = item.Coins,
|
||||
};
|
||||
|
||||
if (item.Control != null && item.Control.Any())
|
||||
if (item.Control != null && item.Control.Length > 0)
|
||||
{
|
||||
input[Models.Metadata.Input.ControlKey] = item.Control
|
||||
.Where(c => c != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
input[Models.Metadata.Input.ControlKey]
|
||||
= Array.ConvertAll(item.Control, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
return input;
|
||||
@@ -623,12 +575,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Port.TagKey] = item.Tag,
|
||||
};
|
||||
|
||||
if (item.Analog != null && item.Analog.Any())
|
||||
if (item.Analog != null && item.Analog.Length > 0)
|
||||
{
|
||||
port[Models.Metadata.Port.AnalogKey] = item.Analog
|
||||
.Where(a => a != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
port[Models.Metadata.Port.AnalogKey]
|
||||
= Array.ConvertAll(item.Analog, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
return port;
|
||||
@@ -693,12 +643,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Slot.NameKey] = item.Name,
|
||||
};
|
||||
|
||||
if (item.SlotOption != null && item.SlotOption.Any())
|
||||
if (item.SlotOption != null && item.SlotOption.Length > 0)
|
||||
{
|
||||
slot[Models.Metadata.Slot.SlotOptionKey] = item.SlotOption
|
||||
.Where(s => s != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
slot[Models.Metadata.Slot.SlotOptionKey]
|
||||
= Array.ConvertAll(item.SlotOption, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
return slot;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.Logiqx;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -28,13 +28,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
|
||||
// TODO: Handle Dir items - Currently need to be generated from the machines
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
{
|
||||
datafile.Game = machines
|
||||
.Where(m => m != null)
|
||||
.Select(machine => ConvertMachineFromInternalModel(machine, game))
|
||||
.ToArray();
|
||||
}
|
||||
if (machines != null && machines.Length > 0)
|
||||
datafile.Game = Array.ConvertAll(machines, m => ConvertMachineFromInternalModel(m, game));
|
||||
|
||||
return datafile;
|
||||
}
|
||||
@@ -130,89 +125,44 @@ namespace SabreTools.Serialization.CrossModel
|
||||
gameBase.Trurip = trurip;
|
||||
|
||||
var releases = item.Read<Models.Metadata.Release[]>(Models.Metadata.Machine.ReleaseKey);
|
||||
if (releases != null && releases.Any())
|
||||
{
|
||||
gameBase.Release = releases
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (releases != null && releases.Length > 0)
|
||||
gameBase.Release = Array.ConvertAll(releases, ConvertFromInternalModel);
|
||||
|
||||
var biosSets = item.Read<Models.Metadata.BiosSet[]>(Models.Metadata.Machine.BiosSetKey);
|
||||
if (biosSets != null && biosSets.Any())
|
||||
{
|
||||
gameBase.BiosSet = biosSets
|
||||
.Where(b => b != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (biosSets != null && biosSets.Length > 0)
|
||||
gameBase.BiosSet = Array.ConvertAll(biosSets, ConvertFromInternalModel);
|
||||
|
||||
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
|
||||
if (roms != null && roms.Any())
|
||||
{
|
||||
gameBase.Rom = roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (roms != null && roms.Length > 0)
|
||||
gameBase.Rom = Array.ConvertAll(roms, ConvertFromInternalModel);
|
||||
|
||||
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.Machine.DiskKey);
|
||||
if (disks != null && disks.Any())
|
||||
{
|
||||
gameBase.Disk = disks
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (disks != null && disks.Length > 0)
|
||||
gameBase.Disk = Array.ConvertAll(disks, ConvertFromInternalModel);
|
||||
|
||||
var medias = item.Read<Models.Metadata.Media[]>(Models.Metadata.Machine.MediaKey);
|
||||
if (medias != null && medias.Any())
|
||||
{
|
||||
gameBase.Media = medias
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (medias != null && medias.Length > 0)
|
||||
gameBase.Media = Array.ConvertAll(medias, ConvertFromInternalModel);
|
||||
|
||||
var deviceRefs = item.Read<Models.Metadata.DeviceRef[]>(Models.Metadata.Machine.DeviceRefKey);
|
||||
if (deviceRefs != null && deviceRefs.Any())
|
||||
{
|
||||
gameBase.DeviceRef = deviceRefs
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (deviceRefs != null && deviceRefs.Length > 0)
|
||||
gameBase.DeviceRef = Array.ConvertAll(deviceRefs, ConvertFromInternalModel);
|
||||
|
||||
var samples = item.Read<Models.Metadata.Sample[]>(Models.Metadata.Machine.SampleKey);
|
||||
if (samples != null && samples.Any())
|
||||
{
|
||||
gameBase.Sample = samples
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (samples != null && samples.Length > 0)
|
||||
gameBase.Sample = Array.ConvertAll(samples, ConvertFromInternalModel);
|
||||
|
||||
var archives = item.Read<Models.Metadata.Archive[]>(Models.Metadata.Machine.ArchiveKey);
|
||||
if (archives != null && archives.Any())
|
||||
{
|
||||
gameBase.Archive = archives
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (archives != null && archives.Length > 0)
|
||||
gameBase.Archive = Array.ConvertAll(archives, ConvertFromInternalModel);
|
||||
|
||||
var driver = item.Read<Models.Metadata.Driver>(Models.Metadata.Machine.DriverKey);
|
||||
if (driver != null)
|
||||
gameBase.Driver = ConvertFromInternalModel(driver);
|
||||
|
||||
var softwareLists = item.Read<Models.Metadata.SoftwareList[]>(Models.Metadata.Machine.SoftwareListKey);
|
||||
if (softwareLists != null && softwareLists.Any())
|
||||
{
|
||||
gameBase.SoftwareList = softwareLists
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (softwareLists != null && softwareLists.Length > 0)
|
||||
gameBase.SoftwareList = Array.ConvertAll(softwareLists, ConvertFromInternalModel);
|
||||
|
||||
return gameBase;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using SabreTools.Models.Logiqx;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -20,21 +19,15 @@ namespace SabreTools.Serialization.CrossModel
|
||||
|
||||
var machines = new List<Models.Metadata.Machine>();
|
||||
|
||||
if (item.Game != null && item.Game.Any())
|
||||
if (item.Game != null && item.Game.Length > 0)
|
||||
machines.AddRange(Array.ConvertAll(item.Game, ConvertMachineToInternalModel));
|
||||
|
||||
foreach (var dir in item.Dir ?? [])
|
||||
{
|
||||
machines.AddRange(item.Game
|
||||
.Where(g => g != null)
|
||||
.Select(ConvertMachineToInternalModel));
|
||||
machines.AddRange(ConvertDirToInternalModel(dir));
|
||||
}
|
||||
|
||||
if (item.Dir != null && item.Dir.Any())
|
||||
{
|
||||
machines.AddRange(item.Dir
|
||||
.Where(d => d != null)
|
||||
.SelectMany(ConvertDirToInternalModel));
|
||||
}
|
||||
|
||||
if (machines.Any())
|
||||
if (machines.Count > 0)
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = machines.ToArray();
|
||||
|
||||
return metadataFile;
|
||||
@@ -103,18 +96,15 @@ namespace SabreTools.Serialization.CrossModel
|
||||
/// </summary>
|
||||
private static Models.Metadata.Machine[] ConvertDirToInternalModel(Dir item)
|
||||
{
|
||||
if (item.Game == null || !item.Game.Any())
|
||||
if (item.Game == null || item.Game.Length == 0)
|
||||
return [];
|
||||
|
||||
return item.Game
|
||||
.Where(g => g != null)
|
||||
.Select(game =>
|
||||
return Array.ConvertAll(item.Game, g =>
|
||||
{
|
||||
var machine = ConvertMachineToInternalModel(game);
|
||||
var machine = ConvertMachineToInternalModel(g);
|
||||
machine[Models.Metadata.Machine.DirNameKey] = item.Name;
|
||||
return machine;
|
||||
})
|
||||
.ToArray();
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -146,35 +136,35 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Machine.TruripKey] = item.Trurip,
|
||||
};
|
||||
|
||||
if (item.Release != null && item.Release.Any())
|
||||
machine[Models.Metadata.Machine.ReleaseKey] = item.Release.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.Release != null && item.Release.Length > 0)
|
||||
machine[Models.Metadata.Machine.ReleaseKey] = Array.ConvertAll(item.Release, ConvertToInternalModel);
|
||||
|
||||
if (item.BiosSet != null && item.BiosSet.Any())
|
||||
machine[Models.Metadata.Machine.BiosSetKey] = item.BiosSet.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.BiosSet != null && item.BiosSet.Length > 0)
|
||||
machine[Models.Metadata.Machine.BiosSetKey] = Array.ConvertAll(item.BiosSet, ConvertToInternalModel);
|
||||
|
||||
if (item.Rom != null && item.Rom.Any())
|
||||
machine[Models.Metadata.Machine.RomKey] = item.Rom.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.Rom != null && item.Rom.Length > 0)
|
||||
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.Rom, ConvertToInternalModel);
|
||||
|
||||
if (item.Disk != null && item.Disk.Any())
|
||||
machine[Models.Metadata.Machine.DiskKey] = item.Disk.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.Disk != null && item.Disk.Length > 0)
|
||||
machine[Models.Metadata.Machine.DiskKey] = Array.ConvertAll(item.Disk, ConvertToInternalModel);
|
||||
|
||||
if (item.Media != null && item.Media.Any())
|
||||
machine[Models.Metadata.Machine.MediaKey] = item.Media.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.Media != null && item.Media.Length > 0)
|
||||
machine[Models.Metadata.Machine.MediaKey] = Array.ConvertAll(item.Media, ConvertToInternalModel);
|
||||
|
||||
if (item.DeviceRef != null && item.DeviceRef.Any())
|
||||
machine[Models.Metadata.Machine.DeviceRefKey] = item.DeviceRef.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.DeviceRef != null && item.DeviceRef.Length > 0)
|
||||
machine[Models.Metadata.Machine.DeviceRefKey] = Array.ConvertAll(item.DeviceRef, ConvertToInternalModel);
|
||||
|
||||
if (item.Sample != null && item.Sample.Any())
|
||||
machine[Models.Metadata.Machine.SampleKey] = item.Sample.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.Sample != null && item.Sample.Length > 0)
|
||||
machine[Models.Metadata.Machine.SampleKey] = Array.ConvertAll(item.Sample, ConvertToInternalModel);
|
||||
|
||||
if (item.Archive != null && item.Archive.Any())
|
||||
machine[Models.Metadata.Machine.ArchiveKey] = item.Archive.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.Archive != null && item.Archive.Length > 0)
|
||||
machine[Models.Metadata.Machine.ArchiveKey] = Array.ConvertAll(item.Archive, ConvertToInternalModel);
|
||||
|
||||
if (item.Driver != null)
|
||||
machine[Models.Metadata.Machine.DriverKey] = ConvertToInternalModel(item.Driver);
|
||||
|
||||
if (item.SoftwareList != null && item.SoftwareList.Any())
|
||||
machine[Models.Metadata.Machine.SoftwareListKey] = item.SoftwareList.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.SoftwareList != null && item.SoftwareList.Length > 0)
|
||||
machine[Models.Metadata.Machine.SoftwareListKey] = Array.ConvertAll(item.SoftwareList, ConvertToInternalModel);
|
||||
|
||||
return machine;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.CrossModel
|
||||
@@ -15,13 +15,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var m1 = header != null ? ConvertM1FromInternalModel(header) : new Models.Listxml.M1();
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
{
|
||||
m1.Game = machines
|
||||
.Where(m => m != null)
|
||||
.Select(Listxml.ConvertMachineFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (machines != null && machines.Length > 0)
|
||||
m1.Game = Array.ConvertAll(machines, Listxml.ConvertMachineFromInternalModel);
|
||||
|
||||
return m1;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.CrossModel
|
||||
@@ -16,12 +16,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
|
||||
};
|
||||
|
||||
if (item?.Game != null && item.Game.Any())
|
||||
if (item?.Game != null && item.Game.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.Game
|
||||
.Where(g => g != null)
|
||||
.Select(Listxml.ConvertMachineToInternalModel)
|
||||
.ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(item.Game, Listxml.ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.OfflineList;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -16,14 +16,11 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var dat = header != null ? ConvertHeaderFromInternalModel(header) : new Dat();
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
if (machines != null && machines.Length > 0)
|
||||
{
|
||||
dat.Games = new Games
|
||||
{
|
||||
Game = machines
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertMachineFromInternalModel)
|
||||
.ToArray()
|
||||
Game = Array.ConvertAll(machines, ConvertMachineFromInternalModel),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -101,17 +98,12 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
|
||||
if (roms != null && roms.Any())
|
||||
if (roms != null && roms.Length > 0)
|
||||
{
|
||||
game.RomSize = roms
|
||||
.Select(rom => rom.ReadString(Models.Metadata.Rom.SizeKey))
|
||||
.FirstOrDefault(s => s != null);
|
||||
|
||||
var romCRCs = roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
var romSizes = Array.ConvertAll(roms, r => r.ReadLong(Models.Metadata.Rom.SizeKey) ?? -1);
|
||||
game.RomSize = Array.Find(romSizes, s => s > -1).ToString();
|
||||
|
||||
var romCRCs = Array.ConvertAll(roms, ConvertFromInternalModel);;
|
||||
game.Files = new Models.OfflineList.Files { RomCRC = romCRCs };
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.OfflineList;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
|
||||
};
|
||||
|
||||
if (item?.Games?.Game != null && item.Games.Game.Any())
|
||||
if (item?.Games?.Game != null && item.Games.Game.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.Games.Game
|
||||
.Where(g => g != null)
|
||||
.Select(ConvertMachineToInternalModel)
|
||||
.ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(item.Games.Game, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
@@ -82,17 +80,15 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Machine.DuplicateIDKey] = item.DuplicateID,
|
||||
};
|
||||
|
||||
if (item.Files?.RomCRC != null && item.Files.RomCRC.Any())
|
||||
if (item.Files?.RomCRC != null && item.Files.RomCRC.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.RomKey] = item.Files.RomCRC
|
||||
.Where(r => r != null)
|
||||
.Select(romCRC =>
|
||||
{
|
||||
var rom = ConvertToInternalModel(romCRC);
|
||||
rom[Models.Metadata.Rom.SizeKey] = item.RomSize;
|
||||
return rom;
|
||||
})
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.RomKey]
|
||||
= Array.ConvertAll(item.Files.RomCRC, romCRC =>
|
||||
{
|
||||
var rom = ConvertToInternalModel(romCRC);
|
||||
rom[Models.Metadata.Rom.SizeKey] = item.RomSize;
|
||||
return rom;
|
||||
});
|
||||
}
|
||||
|
||||
return machine;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.OpenMSX;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -16,13 +16,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var softwareDb = header != null ? ConvertHeaderFromInternalModel(header) : new SoftwareDb();
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
{
|
||||
softwareDb.Software = machines
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertMachineFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (machines != null && machines.Length > 0)
|
||||
softwareDb.Software = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
|
||||
|
||||
return softwareDb;
|
||||
}
|
||||
@@ -55,13 +50,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var dumps = item.Read<Models.Metadata.Dump[]>(Models.Metadata.Machine.DumpKey);
|
||||
if (dumps != null && dumps.Any())
|
||||
{
|
||||
game.Dump = dumps
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (dumps != null && dumps.Length > 0)
|
||||
game.Dump = Array.ConvertAll(dumps, ConvertFromInternalModel);
|
||||
|
||||
return game;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.OpenMSX;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
|
||||
};
|
||||
|
||||
if (item?.Software != null && item.Software.Any())
|
||||
if (item?.Software != null && item.Software.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.Software
|
||||
.Where(s => s != null)
|
||||
.Select(ConvertMachineToInternalModel)
|
||||
.ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(item.Software, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
@@ -55,12 +53,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Machine.CountryKey] = item.Country,
|
||||
};
|
||||
|
||||
if (item.Dump != null && item.Dump.Any())
|
||||
if (item.Dump != null && item.Dump.Length > 0)
|
||||
{
|
||||
machine[Models.Metadata.Machine.DumpKey] = item.Dump
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertToInternalModel)
|
||||
.ToArray();
|
||||
machine[Models.Metadata.Machine.DumpKey]
|
||||
= Array.ConvertAll(item.Dump, ConvertToInternalModel);
|
||||
}
|
||||
|
||||
return machine;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Collections.Generic;
|
||||
using SabreTools.Models.RomCenter;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,17 +17,13 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var metadataFile = header != null ? ConvertHeaderFromInternalModel(header) : new MetadataFile();
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
var items = new List<Rom>();
|
||||
foreach (var machine in machines ?? [])
|
||||
{
|
||||
metadataFile.Games = new Games
|
||||
{
|
||||
Rom = machines
|
||||
.Where(m => m != null)
|
||||
.SelectMany(ConvertMachineFromInternalModel)
|
||||
.ToArray()
|
||||
};
|
||||
items.AddRange(ConvertMachineFromInternalModel(machine));
|
||||
}
|
||||
|
||||
metadataFile.Games = new Games { Rom = [.. items] };
|
||||
return metadataFile;
|
||||
}
|
||||
|
||||
@@ -93,10 +89,7 @@ namespace SabreTools.Serialization.CrossModel
|
||||
if (roms == null)
|
||||
return [];
|
||||
|
||||
return roms
|
||||
.Where(r => r != null)
|
||||
.Select(rom => ConvertFromInternalModel(rom, item))
|
||||
.ToArray();
|
||||
return Array.ConvertAll(roms, r => ConvertFromInternalModel(r, item));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.RomCenter;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,11 +17,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(obj),
|
||||
};
|
||||
|
||||
if (obj?.Games?.Rom != null && obj.Games.Rom.Any())
|
||||
if (obj?.Games?.Rom != null && obj.Games.Rom.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Games.Rom
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertMachineToInternalModel).ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(obj.Games.Rom, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using SabreTools.Models.SeparatedValue;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,14 +17,13 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var metadataFile = header != null ? ConvertHeaderFromInternalModel(header) : new MetadataFile();
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
var items = new List<Row>();
|
||||
foreach (var machine in machines ?? [])
|
||||
{
|
||||
metadataFile.Row = machines
|
||||
.Where(m => m != null)
|
||||
.SelectMany(m => ConvertMachineFromInternalModel(m, header))
|
||||
.ToArray();
|
||||
items.AddRange(ConvertMachineFromInternalModel(machine, header));
|
||||
}
|
||||
|
||||
metadataFile.Row = [.. items];
|
||||
return metadataFile;
|
||||
}
|
||||
|
||||
@@ -48,27 +47,24 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var rowItems = new List<Row>();
|
||||
|
||||
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
|
||||
if (roms != null && roms.Any())
|
||||
if (roms != null && roms.Length > 0)
|
||||
{
|
||||
rowItems.AddRange(roms
|
||||
.Where(r => r != null)
|
||||
.Select(rom => ConvertFromInternalModel(rom, item, header)));
|
||||
rowItems.AddRange(
|
||||
Array.ConvertAll(roms, r => ConvertFromInternalModel(r, item, header)));
|
||||
}
|
||||
|
||||
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.Machine.DiskKey);
|
||||
if (disks != null && disks.Any())
|
||||
if (disks != null && disks.Length > 0)
|
||||
{
|
||||
rowItems.AddRange(disks
|
||||
.Where(d => d != null)
|
||||
.Select(disk => ConvertFromInternalModel(disk, item, header)));
|
||||
rowItems.AddRange(
|
||||
Array.ConvertAll(disks, d => ConvertFromInternalModel(d, item, header)));
|
||||
}
|
||||
|
||||
var media = item.Read<Models.Metadata.Media[]>(Models.Metadata.Machine.MediaKey);
|
||||
if (media != null && media.Any())
|
||||
if (media != null && media.Length > 0)
|
||||
{
|
||||
rowItems.AddRange(media
|
||||
.Where(m => m != null)
|
||||
.Select(medium => ConvertFromInternalModel(medium, item, header)));
|
||||
rowItems.AddRange(
|
||||
Array.ConvertAll(media, m => ConvertFromInternalModel(m, item, header)));
|
||||
}
|
||||
|
||||
return rowItems.ToArray();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.SeparatedValue;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,8 +17,11 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(obj),
|
||||
};
|
||||
|
||||
if (obj?.Row != null && obj.Row.Any())
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Row.Select(ConvertMachineToInternalModel).ToArray();
|
||||
if (obj?.Row != null && obj.Row.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(obj.Row, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
}
|
||||
@@ -33,7 +36,7 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Header.HeaderKey] = item.Header,
|
||||
};
|
||||
|
||||
if (item.Row != null && item.Row.Any())
|
||||
if (item.Row != null && item.Row.Length > 0)
|
||||
{
|
||||
var first = item.Row[0];
|
||||
//header[Models.Metadata.Header.FileNameKey] = first.FileName; // Not possible to map
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.SoftwareList;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -16,13 +16,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
var metadataFile = header != null ? ConvertHeaderFromInternalModel(header) : new Models.SoftwareList.SoftwareList();
|
||||
|
||||
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
|
||||
if (machines != null && machines.Any())
|
||||
{
|
||||
metadataFile.Software = machines
|
||||
.Where(m => m != null)
|
||||
.Select(ConvertMachineFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (machines != null && machines.Length > 0)
|
||||
metadataFile.Software = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
|
||||
|
||||
return metadataFile;
|
||||
}
|
||||
@@ -58,31 +53,16 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var infos = item.Read<Models.Metadata.Info[]>(Models.Metadata.Machine.InfoKey);
|
||||
if (infos != null && infos.Any())
|
||||
{
|
||||
software.Info = infos
|
||||
.Where(i => i != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (infos != null && infos.Length > 0)
|
||||
software.Info = Array.ConvertAll(infos, ConvertFromInternalModel);
|
||||
|
||||
var sharedFeats = item.Read<Models.Metadata.SharedFeat[]>(Models.Metadata.Machine.SharedFeatKey);
|
||||
if (sharedFeats != null && sharedFeats.Any())
|
||||
{
|
||||
software.SharedFeat = sharedFeats
|
||||
.Where(s => s != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (sharedFeats != null && sharedFeats.Length > 0)
|
||||
software.SharedFeat = Array.ConvertAll(sharedFeats, ConvertFromInternalModel);
|
||||
|
||||
var parts = item.Read<Models.Metadata.Part[]>(Models.Metadata.Machine.PartKey);
|
||||
if (parts != null && parts.Any())
|
||||
{
|
||||
software.Part = parts
|
||||
.Where(p => p != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (parts != null && parts.Length > 0)
|
||||
software.Part = Array.ConvertAll(parts, ConvertFromInternalModel);
|
||||
|
||||
return software;
|
||||
}
|
||||
@@ -101,13 +81,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.DataArea.RomKey);
|
||||
if (roms != null && roms.Any())
|
||||
{
|
||||
dataArea.Rom = roms
|
||||
.Where(r => r != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (roms != null && roms.Length > 0)
|
||||
dataArea.Rom = Array.ConvertAll(roms,ConvertFromInternalModel);
|
||||
|
||||
return dataArea;
|
||||
}
|
||||
@@ -125,13 +100,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var dipValues = item.Read<Models.Metadata.DipValue[]>(Models.Metadata.DipSwitch.DipValueKey);
|
||||
if (dipValues != null && dipValues.Any())
|
||||
{
|
||||
dipSwitch.DipValue = dipValues
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (dipValues != null && dipValues.Length > 0)
|
||||
dipSwitch.DipValue = Array.ConvertAll(dipValues, ConvertFromInternalModel);
|
||||
|
||||
return dipSwitch;
|
||||
}
|
||||
@@ -177,13 +147,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.DiskArea.DiskKey);
|
||||
if (disks != null && disks.Any())
|
||||
{
|
||||
diskArea.Disk = disks
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (disks != null && disks.Length > 0)
|
||||
diskArea.Disk = Array.ConvertAll(disks, ConvertFromInternalModel);
|
||||
|
||||
return diskArea;
|
||||
}
|
||||
@@ -226,40 +191,20 @@ namespace SabreTools.Serialization.CrossModel
|
||||
};
|
||||
|
||||
var features = item.Read<Models.Metadata.Feature[]>(Models.Metadata.Part.FeatureKey);
|
||||
if (features != null && features.Any())
|
||||
{
|
||||
part.Feature = features
|
||||
.Where(f => f != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (features != null && features.Length > 0)
|
||||
part.Feature = Array.ConvertAll(features, ConvertFromInternalModel);
|
||||
|
||||
var dataAreas = item.Read<Models.Metadata.DataArea[]>(Models.Metadata.Part.DataAreaKey);
|
||||
if (dataAreas != null && dataAreas.Any())
|
||||
{
|
||||
part.DataArea = dataAreas
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (dataAreas != null && dataAreas.Length > 0)
|
||||
part.DataArea = Array.ConvertAll(dataAreas, ConvertFromInternalModel);
|
||||
|
||||
var diskAreas = item.Read<Models.Metadata.DiskArea[]>(Models.Metadata.Part.DiskAreaKey);
|
||||
if (diskAreas != null && diskAreas.Any())
|
||||
{
|
||||
part.DiskArea = diskAreas
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (diskAreas != null && diskAreas.Length > 0)
|
||||
part.DiskArea = Array.ConvertAll(diskAreas, ConvertFromInternalModel);
|
||||
|
||||
var dipSwitches = item.Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Part.DipSwitchKey);
|
||||
if (dipSwitches != null && dipSwitches.Any())
|
||||
{
|
||||
part.DipSwitch = dipSwitches
|
||||
.Where(d => d != null)
|
||||
.Select(ConvertFromInternalModel)
|
||||
.ToArray();
|
||||
}
|
||||
if (dipSwitches != null && dipSwitches.Length > 0)
|
||||
part.DipSwitch = Array.ConvertAll(dipSwitches, ConvertFromInternalModel);
|
||||
|
||||
return part;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Linq;
|
||||
using System;
|
||||
using SabreTools.Models.SoftwareList;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
|
||||
};
|
||||
|
||||
if (item?.Software != null && item.Software.Any())
|
||||
if (item?.Software != null && item.Software.Length > 0)
|
||||
{
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.Software
|
||||
.Where(s => s != null)
|
||||
.Select(ConvertMachineToInternalModel)
|
||||
.ToArray();
|
||||
metadataFile[Models.Metadata.MetadataFile.MachineKey]
|
||||
= Array.ConvertAll(item.Software, ConvertMachineToInternalModel);
|
||||
}
|
||||
|
||||
return metadataFile;
|
||||
@@ -58,14 +56,14 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Machine.NotesKey] = item.Notes,
|
||||
};
|
||||
|
||||
if (item.Info != null && item.Info.Any())
|
||||
machine[Models.Metadata.Machine.InfoKey] = item.Info.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.Info != null && item.Info.Length > 0)
|
||||
machine[Models.Metadata.Machine.InfoKey] = Array.ConvertAll(item.Info, ConvertToInternalModel);
|
||||
|
||||
if (item.SharedFeat != null && item.SharedFeat.Any())
|
||||
machine[Models.Metadata.Machine.SharedFeatKey] = item.SharedFeat.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.SharedFeat != null && item.SharedFeat.Length > 0)
|
||||
machine[Models.Metadata.Machine.SharedFeatKey] = Array.ConvertAll(item.SharedFeat, ConvertToInternalModel);
|
||||
|
||||
if (item.Part != null && item.Part.Any())
|
||||
machine[Models.Metadata.Machine.PartKey] = item.Part.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.Part != null && item.Part.Length > 0)
|
||||
machine[Models.Metadata.Machine.PartKey] = Array.ConvertAll(item.Part, ConvertToInternalModel);
|
||||
|
||||
return machine;
|
||||
}
|
||||
@@ -83,8 +81,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.DataArea.EndiannessKey] = item.Endianness,
|
||||
};
|
||||
|
||||
if (item.Rom != null && item.Rom.Any())
|
||||
dataArea[Models.Metadata.DataArea.RomKey] = item.Rom.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.Rom != null && item.Rom.Length > 0)
|
||||
dataArea[Models.Metadata.DataArea.RomKey] = Array.ConvertAll(item.Rom, ConvertToInternalModel);
|
||||
|
||||
return dataArea;
|
||||
}
|
||||
@@ -101,8 +99,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.DipSwitch.MaskKey] = item.Mask,
|
||||
};
|
||||
|
||||
if (item.DipValue != null && item.DipValue.Any())
|
||||
dipSwitch[Models.Metadata.DipSwitch.DipValueKey] = item.DipValue.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.DipValue != null && item.DipValue.Length > 0)
|
||||
dipSwitch[Models.Metadata.DipSwitch.DipValueKey] = Array.ConvertAll(item.DipValue, ConvertToInternalModel);
|
||||
|
||||
return dipSwitch;
|
||||
}
|
||||
@@ -147,8 +145,8 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.DiskArea.NameKey] = item.Name,
|
||||
};
|
||||
|
||||
if (item.Disk != null && item.Disk.Any())
|
||||
diskArea[Models.Metadata.DiskArea.DiskKey] = item.Disk.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.Disk != null && item.Disk.Length > 0)
|
||||
diskArea[Models.Metadata.DiskArea.DiskKey] = Array.ConvertAll(item.Disk, ConvertToInternalModel);
|
||||
|
||||
return diskArea;
|
||||
}
|
||||
@@ -190,17 +188,17 @@ namespace SabreTools.Serialization.CrossModel
|
||||
[Models.Metadata.Part.InterfaceKey] = item.Interface,
|
||||
};
|
||||
|
||||
if (item.Feature != null && item.Feature.Any())
|
||||
part[Models.Metadata.Part.FeatureKey] = item.Feature.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.Feature != null && item.Feature.Length > 0)
|
||||
part[Models.Metadata.Part.FeatureKey] = Array.ConvertAll(item.Feature, ConvertToInternalModel);
|
||||
|
||||
if (item.DataArea != null && item.DataArea.Any())
|
||||
part[Models.Metadata.Part.DataAreaKey] = item.DataArea.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.DataArea != null && item.DataArea.Length > 0)
|
||||
part[Models.Metadata.Part.DataAreaKey] = Array.ConvertAll(item.DataArea, ConvertToInternalModel);
|
||||
|
||||
if (item.DiskArea != null && item.DiskArea.Any())
|
||||
part[Models.Metadata.Part.DiskAreaKey] = item.DiskArea.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.DiskArea != null && item.DiskArea.Length > 0)
|
||||
part[Models.Metadata.Part.DiskAreaKey] = Array.ConvertAll(item.DiskArea, ConvertToInternalModel);
|
||||
|
||||
if (item.DipSwitch != null && item.DipSwitch.Any())
|
||||
part[Models.Metadata.Part.DipSwitchKey] = item.DipSwitch.Select(ConvertToInternalModel).ToArray();
|
||||
if (item.DipSwitch != null && item.DipSwitch.Length > 0)
|
||||
part[Models.Metadata.Part.DipSwitchKey] = Array.ConvertAll(item.DipSwitch, ConvertToInternalModel);
|
||||
|
||||
return part;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
@@ -20,9 +19,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new media key block to fill
|
||||
var mediaKeyBlock = new MediaKeyBlock();
|
||||
|
||||
@@ -59,7 +55,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Set the records
|
||||
mediaKeyBlock.Records = records.ToArray();
|
||||
mediaKeyBlock.Records = [.. records];
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -73,38 +69,27 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled record on success, null on error</returns>
|
||||
private static Record? ParseRecord(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
|
||||
// The first 4 bytes make up the type and length
|
||||
byte[]? typeAndLength = data.ReadBytes(4);
|
||||
if (typeAndLength == null)
|
||||
return null;
|
||||
|
||||
RecordType type = (RecordType)typeAndLength[0];
|
||||
|
||||
// Remove the first byte and parse as big-endian
|
||||
typeAndLength[0] = 0x00;
|
||||
Array.Reverse(typeAndLength);
|
||||
uint length = BitConverter.ToUInt32(typeAndLength, 0);
|
||||
RecordType type = (RecordType)data.ReadByteValue();
|
||||
uint length = data.ReadUInt24();
|
||||
|
||||
// Create a record based on the type
|
||||
switch (type)
|
||||
return type switch
|
||||
{
|
||||
// Recognized record types
|
||||
case RecordType.EndOfMediaKeyBlock: return ParseEndOfMediaKeyBlockRecord(data, type, length);
|
||||
case RecordType.ExplicitSubsetDifference: return ParseExplicitSubsetDifferenceRecord(data, type, length);
|
||||
case RecordType.MediaKeyData: return ParseMediaKeyDataRecord(data, type, length);
|
||||
case RecordType.SubsetDifferenceIndex: return ParseSubsetDifferenceIndexRecord(data, type, length);
|
||||
case RecordType.TypeAndVersion: return ParseTypeAndVersionRecord(data, type, length);
|
||||
case RecordType.DriveRevocationList: return ParseDriveRevocationListRecord(data, type, length);
|
||||
case RecordType.HostRevocationList: return ParseHostRevocationListRecord(data, type, length);
|
||||
case RecordType.VerifyMediaKey: return ParseVerifyMediaKeyRecord(data, type, length);
|
||||
case RecordType.Copyright: return ParseCopyrightRecord(data, type, length);
|
||||
|
||||
// Unrecognized record type
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
// Known record types
|
||||
RecordType.EndOfMediaKeyBlock => ParseEndOfMediaKeyBlockRecord(data, type, length),
|
||||
RecordType.ExplicitSubsetDifference => ParseExplicitSubsetDifferenceRecord(data, type, length),
|
||||
RecordType.MediaKeyData => ParseMediaKeyDataRecord(data, type, length),
|
||||
RecordType.SubsetDifferenceIndex => ParseSubsetDifferenceIndexRecord(data, type, length),
|
||||
RecordType.TypeAndVersion => ParseTypeAndVersionRecord(data, type, length),
|
||||
RecordType.DriveRevocationList => ParseDriveRevocationListRecord(data, type, length),
|
||||
RecordType.HostRevocationList => ParseHostRevocationListRecord(data, type, length),
|
||||
RecordType.VerifyMediaKey => ParseVerifyMediaKeyRecord(data, type, length),
|
||||
RecordType.Copyright => ParseCopyrightRecord(data, type, length),
|
||||
|
||||
// Unknown record type
|
||||
_ => null,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -118,7 +103,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.EndOfMediaKeyBlock)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new EndOfMediaKeyBlockRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -140,7 +124,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.ExplicitSubsetDifference)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new ExplicitSubsetDifferenceRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -164,7 +147,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Set the subset differences
|
||||
record.SubsetDifferences = subsetDifferences.ToArray();
|
||||
record.SubsetDifferences = [.. subsetDifferences];
|
||||
|
||||
// If there's any data left, discard it
|
||||
if (data.Position < initialOffset + length)
|
||||
@@ -184,7 +167,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.MediaKeyData)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new MediaKeyDataRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -205,7 +187,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Set the media keys
|
||||
record.MediaKeyData = mediaKeys.ToArray();
|
||||
record.MediaKeyData = [.. mediaKeys];
|
||||
|
||||
return record;
|
||||
}
|
||||
@@ -221,7 +203,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.SubsetDifferenceIndex)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new SubsetDifferenceIndexRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -243,7 +224,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Set the offsets
|
||||
record.Offsets = offsets.ToArray();
|
||||
record.Offsets = [.. offsets];
|
||||
|
||||
return record;
|
||||
}
|
||||
@@ -259,7 +240,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.TypeAndVersion)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new TypeAndVersionRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -281,7 +261,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.DriveRevocationList)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new DriveRevocationListRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -322,7 +301,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Set the signature blocks
|
||||
record.SignatureBlocks = blocks.ToArray();
|
||||
record.SignatureBlocks = [.. blocks];
|
||||
|
||||
// If there's any data left, discard it
|
||||
if (data.Position < initialOffset + length)
|
||||
@@ -342,7 +321,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.HostRevocationList)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new HostRevocationListRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -383,7 +361,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Set the signature blocks
|
||||
record.SignatureBlocks = blocks.ToArray();
|
||||
record.SignatureBlocks = [.. blocks];
|
||||
|
||||
// If there's any data left, discard it
|
||||
if (data.Position < initialOffset + length)
|
||||
@@ -403,7 +381,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.VerifyMediaKey)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new VerifyMediaKeyRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
@@ -424,7 +401,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (type != RecordType.Copyright)
|
||||
return null;
|
||||
|
||||
// TODO: Use marshalling here instead of building
|
||||
var record = new CopyrightRecord();
|
||||
|
||||
record.RecordType = type;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Readers;
|
||||
using SabreTools.Models.AttractMode;
|
||||
@@ -38,7 +37,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (!reader.ReadHeader() || reader.HeaderValues == null)
|
||||
return null;
|
||||
|
||||
dat.Header = reader.HeaderValues.ToArray();
|
||||
dat.Header = [.. reader.HeaderValues];
|
||||
|
||||
// Loop through the rows and parse out values
|
||||
var rows = new List<Row>();
|
||||
@@ -72,10 +71,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
Extra = reader.Line[15],
|
||||
Buttons = reader.Line[16],
|
||||
};
|
||||
|
||||
// If we have additional fields
|
||||
if (reader.Line.Count > HeaderWithoutRomnameCount)
|
||||
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithoutRomnameCount).ToArray();
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -99,17 +94,13 @@ namespace SabreTools.Serialization.Deserializers
|
||||
Extra = reader.Line[15],
|
||||
Buttons = reader.Line[16],
|
||||
};
|
||||
|
||||
// If we have additional fields
|
||||
if (reader.Line.Count > HeaderWithRomnameCount)
|
||||
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithRomnameCount).ToArray();
|
||||
}
|
||||
|
||||
rows.Add(row);
|
||||
}
|
||||
|
||||
// Assign the rows to the Dat and return
|
||||
dat.Row = rows.ToArray();
|
||||
dat.Row = [.. rows];
|
||||
return dat;
|
||||
}
|
||||
|
||||
|
||||
@@ -56,8 +56,8 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
svm.Unknown2 = data.ReadBytes(4);
|
||||
svm.Length = data.ReadUInt32();
|
||||
// if (svm.Length > 0)
|
||||
// svm.Data = data.ReadBytes((int)svm.Length);
|
||||
if (svm.Length > 0)
|
||||
svm.Data = data.ReadBytes((int)svm.Length);
|
||||
|
||||
return svm;
|
||||
}
|
||||
|
||||
@@ -40,24 +40,21 @@ namespace SabreTools.Serialization.Deserializers
|
||||
#region Files
|
||||
|
||||
// If we have any files
|
||||
if (header.Files > 0)
|
||||
var files = new FileEntry[header.Files];
|
||||
|
||||
// Read all entries in turn
|
||||
for (int i = 0; i < header.Files; i++)
|
||||
{
|
||||
var files = new FileEntry[header.Files];
|
||||
var file = ParseFileEntry(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
// Read all entries in turn
|
||||
for (int i = 0; i < header.Files; i++)
|
||||
{
|
||||
var file = ParseFileEntry(data);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
files[i] = file;
|
||||
}
|
||||
|
||||
// Set the files
|
||||
archive.Files = files;
|
||||
files[i] = file;
|
||||
}
|
||||
|
||||
// Set the files
|
||||
archive.Files = files;
|
||||
|
||||
#endregion
|
||||
|
||||
return archive;
|
||||
@@ -72,9 +69,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Magic != SignatureString)
|
||||
if (header?.Magic != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.BSP;
|
||||
@@ -7,10 +8,10 @@ using static SabreTools.Models.BSP.Constants;
|
||||
|
||||
namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
public class BSP : BaseBinaryDeserializer<Models.BSP.File>
|
||||
public class BSP : BaseBinaryDeserializer<BspFile>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public override Models.BSP.File? Deserialize(Stream? data)
|
||||
public override BspFile? Deserialize(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
@@ -20,17 +21,14 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new Half-Life Level to fill
|
||||
var file = new Models.BSP.File();
|
||||
var file = new BspFile();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
if (header?.Lumps == null)
|
||||
return null;
|
||||
|
||||
// Set the level header
|
||||
@@ -40,59 +38,70 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
#region Lumps
|
||||
|
||||
// Create the lump array
|
||||
file.Lumps = new Lump[HL_BSP_LUMP_COUNT];
|
||||
|
||||
// Try to parse the lumps
|
||||
for (int i = 0; i < HL_BSP_LUMP_COUNT; i++)
|
||||
for (int l = 0; l < BSP_HEADER_LUMPS; l++)
|
||||
{
|
||||
var lump = ParseLump(data);
|
||||
if (lump == null)
|
||||
return null;
|
||||
|
||||
file.Lumps[i] = lump;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Texture header
|
||||
|
||||
// Try to get the texture header lump
|
||||
var textureDataLump = file.Lumps[HL_BSP_LUMP_TEXTUREDATA];
|
||||
if (textureDataLump == null || textureDataLump.Offset == 0 || textureDataLump.Length == 0)
|
||||
return null;
|
||||
|
||||
// Seek to the texture header
|
||||
data.Seek(textureDataLump.Offset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the texture header
|
||||
var textureHeader = ParseTextureHeader(data);
|
||||
if (textureHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the texture header
|
||||
file.TextureHeader = textureHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Textures
|
||||
|
||||
// Create the texture array
|
||||
file.Textures = new Texture[textureHeader.TextureCount];
|
||||
|
||||
// Try to parse the textures
|
||||
for (int i = 0; i < textureHeader.TextureCount; i++)
|
||||
{
|
||||
// Get the texture offset
|
||||
int offset = (int)(textureHeader.Offsets![i] + file.Lumps[HL_BSP_LUMP_TEXTUREDATA]!.Offset);
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
// Get the next lump entry
|
||||
var lumpEntry = header.Lumps[l];
|
||||
if (lumpEntry == null)
|
||||
continue;
|
||||
if (lumpEntry.Offset == 0 || lumpEntry.Length == 0)
|
||||
continue;
|
||||
|
||||
// Seek to the texture
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
// Seek to the lump offset
|
||||
data.Seek(lumpEntry.Offset, SeekOrigin.Begin);
|
||||
|
||||
var texture = ParseTexture(data);
|
||||
file.Textures[i] = texture;
|
||||
// Read according to the lump type
|
||||
switch ((LumpType)l)
|
||||
{
|
||||
case LumpType.LUMP_ENTITIES:
|
||||
file.Entities = ParseEntitiesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_PLANES:
|
||||
file.PlanesLump = ParsePlanesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_TEXTURES:
|
||||
file.TextureLump = ParseTextureLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_VERTICES:
|
||||
file.VerticesLump = ParseVerticesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_VISIBILITY:
|
||||
file.VisibilityLump = ParseVisibilityLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_NODES:
|
||||
file.NodesLump = ParseNodesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_TEXINFO:
|
||||
file.TexinfoLump = ParseTexinfoLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_FACES:
|
||||
file.FacesLump = ParseFacesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_LIGHTING:
|
||||
file.LightmapLump = ParseLightmapLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_CLIPNODES:
|
||||
file.ClipnodesLump = ParseClipnodesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_LEAVES:
|
||||
file.LeavesLump = ParseLeavesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_MARKSURFACES:
|
||||
file.MarksurfacesLump = ParseMarksurfacesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_EDGES:
|
||||
file.EdgesLump = ParseEdgesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_SURFEDGES:
|
||||
file.SurfedgesLump = ParseSurfedgesLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
case LumpType.LUMP_MODELS:
|
||||
file.ModelsLump = ParseModelsLump(data, lumpEntry.Offset, lumpEntry.Length);
|
||||
break;
|
||||
default:
|
||||
// Unsupported LumpType value, ignore
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -106,26 +115,98 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Level header on success, null on error</returns>
|
||||
/// <remarks>Only recognized versions are 29 and 30</remarks>
|
||||
private static Header? ParseHeader(Stream data)
|
||||
private static BspHeader? ParseHeader(Stream data)
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
var header = data.ReadType<BspHeader>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Version != 29 && header.Version != 30)
|
||||
if (header.Version < 29 || header.Version > 30)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a lump
|
||||
/// Parse a Stream into LUMP_ENTITIES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled lump on success, null on error</returns>
|
||||
private static Lump? ParseLump(Stream data)
|
||||
/// <returns>Filled LUMP_ENTITIES on success, null on error</returns>
|
||||
private static EntitiesLump? ParseEntitiesLump(Stream data, int offset, int length)
|
||||
{
|
||||
return data.ReadType<Lump>();
|
||||
var entities = new List<Entity>();
|
||||
|
||||
// Read the entire lump as text
|
||||
byte[] lumpData = data.ReadBytes(length);
|
||||
string lumpText = Encoding.ASCII.GetString(lumpData);
|
||||
|
||||
// Break the text by ending curly braces
|
||||
string[] lumpSections = lumpText.Split('}');
|
||||
Array.ForEach(lumpSections, s => s.Trim('{', '}'));
|
||||
|
||||
// Loop through all sections
|
||||
for (int i = 0; i < lumpSections.Length; i++)
|
||||
{
|
||||
// Prepare an attributes list
|
||||
var attributes = new List<KeyValuePair<string, string>>();
|
||||
|
||||
// Split the section by newlines
|
||||
string section = lumpSections[i];
|
||||
string[] lines = section.Split('\n');
|
||||
Array.ForEach(lines, l => l.Trim());
|
||||
|
||||
// Convert each line into a key-value pair and add
|
||||
for (int j = 0; j < lines.Length; j++)
|
||||
{
|
||||
// TODO: Split lines and add
|
||||
}
|
||||
|
||||
// Create a new entity and add
|
||||
var entity = new Entity { Attributes = attributes };
|
||||
entities.Add(entity);
|
||||
}
|
||||
|
||||
return new EntitiesLump { Entities = [.. entities] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_PLANES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_PLANES on success, null on error</returns>
|
||||
private static PlanesLump? ParsePlanesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var planes = new List<Plane>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var plane = data.ReadType<Plane>();
|
||||
if (plane != null)
|
||||
planes.Add(plane);
|
||||
}
|
||||
|
||||
return new PlanesLump { Planes = [.. planes] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_TEXTURES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_TEXTURES on success, null on error</returns>
|
||||
private static TextureLump? ParseTextureLump(Stream data, int offset, int length)
|
||||
{
|
||||
var lump = new TextureLump();
|
||||
|
||||
lump.Header = ParseTextureHeader(data);
|
||||
var textures = new List<MipTexture>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var texture = data.ReadType<MipTexture>();
|
||||
if (texture != null)
|
||||
textures.Add(texture);
|
||||
}
|
||||
|
||||
lump.Textures = [.. textures];
|
||||
return lump;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -135,83 +216,231 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled Half-Life Level texture header on success, null on error</returns>
|
||||
private static TextureHeader ParseTextureHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var textureHeader = new TextureHeader();
|
||||
|
||||
textureHeader.TextureCount = data.ReadUInt32();
|
||||
|
||||
var offsets = new uint[textureHeader.TextureCount];
|
||||
|
||||
for (int i = 0; i < textureHeader.TextureCount; i++)
|
||||
textureHeader.MipTextureCount = data.ReadUInt32();
|
||||
textureHeader.Offsets = new int[textureHeader.MipTextureCount];
|
||||
for (int i = 0; i < textureHeader.Offsets.Length; i++)
|
||||
{
|
||||
offsets[i] = data.ReadUInt32();
|
||||
if (data.Position >= data.Length)
|
||||
break;
|
||||
textureHeader.Offsets[i] = data.ReadInt32();
|
||||
}
|
||||
|
||||
textureHeader.Offsets = offsets;
|
||||
|
||||
return textureHeader;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a texture
|
||||
/// Parse a Stream into LUMP_VERTICES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="mipmap">Mipmap level</param>
|
||||
/// <returns>Filled texture on success, null on error</returns>
|
||||
private static Texture ParseTexture(Stream data, uint mipmap = 0)
|
||||
/// <returns>Filled LUMP_VERTICES on success, null on error</returns>
|
||||
private static VerticesLump? ParseVerticesLump(Stream data, int offset, int length)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var texture = new Texture();
|
||||
|
||||
byte[]? name = data.ReadBytes(16)?.TakeWhile(c => c != '\0')?.ToArray();
|
||||
if (name != null)
|
||||
texture.Name = Encoding.ASCII.GetString(name);
|
||||
texture.Width = data.ReadUInt32();
|
||||
texture.Height = data.ReadUInt32();
|
||||
texture.Offsets = new uint[4];
|
||||
for (int i = 0; i < 4; i++)
|
||||
var vertices = new List<Vector3D>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
texture.Offsets[i] = data.ReadUInt32();
|
||||
vertices.Add(data.ReadType<Vector3D>());
|
||||
}
|
||||
|
||||
// Get the size of the pixel data
|
||||
uint pixelSize = 0;
|
||||
for (int i = 0; i < HL_BSP_MIPMAP_COUNT; i++)
|
||||
return new VerticesLump { Vertices = [.. vertices] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_VISIBILITY
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_VISIBILITY on success, null on error</returns>
|
||||
private static VisibilityLump? ParseVisibilityLump(Stream data, int offset, int length)
|
||||
{
|
||||
var lump = new VisibilityLump();
|
||||
|
||||
lump.NumClusters = data.ReadInt32();
|
||||
lump.ByteOffsets = new int[lump.NumClusters][];
|
||||
for (int i = 0; i < lump.NumClusters; i++)
|
||||
{
|
||||
if (texture.Offsets[i] != 0)
|
||||
lump.ByteOffsets[i] = new int[2];
|
||||
for (int j = 0; j < 2; j++)
|
||||
{
|
||||
pixelSize += (texture.Width >> i) * (texture.Height >> i);
|
||||
lump.ByteOffsets[i][j] = data.ReadInt32();
|
||||
}
|
||||
}
|
||||
|
||||
// If we have no pixel data
|
||||
if (pixelSize == 0)
|
||||
return texture;
|
||||
return lump;
|
||||
}
|
||||
|
||||
texture.TextureData = data.ReadBytes((int)pixelSize);
|
||||
texture.PaletteSize = data.ReadUInt16();
|
||||
texture.PaletteData = data.ReadBytes((int)(texture.PaletteSize * 3));
|
||||
|
||||
// Adjust the dimensions based on mipmap level
|
||||
switch (mipmap)
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_NODES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_NODES on success, null on error</returns>
|
||||
private static BspNodesLump? ParseNodesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var nodes = new List<BspNode>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
case 1:
|
||||
texture.Width /= 2;
|
||||
texture.Height /= 2;
|
||||
break;
|
||||
case 2:
|
||||
texture.Width /= 4;
|
||||
texture.Height /= 4;
|
||||
break;
|
||||
case 3:
|
||||
texture.Width /= 8;
|
||||
texture.Height /= 8;
|
||||
break;
|
||||
var node = data.ReadType<BspNode>();
|
||||
if (node != null)
|
||||
nodes.Add(node);
|
||||
}
|
||||
|
||||
return texture;
|
||||
return new BspNodesLump { Nodes = [.. nodes] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_TEXINFO
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_TEXINFO on success, null on error</returns>
|
||||
private static BspTexinfoLump? ParseTexinfoLump(Stream data, int offset, int length)
|
||||
{
|
||||
var texinfos = new List<BspTexinfo>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var texinfo = data.ReadType<BspTexinfo>();
|
||||
if (texinfo != null)
|
||||
texinfos.Add(texinfo);
|
||||
}
|
||||
|
||||
return new BspTexinfoLump { Texinfos = [.. texinfos] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_FACES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_FACES on success, null on error</returns>
|
||||
private static BspFacesLump? ParseFacesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var faces = new List<BspFace>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var face = data.ReadType<BspFace>();
|
||||
if (face != null)
|
||||
faces.Add(face);
|
||||
}
|
||||
|
||||
return new BspFacesLump { Faces = [.. faces] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_LIGHTING
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_LIGHTING on success, null on error</returns>
|
||||
private static LightmapLump? ParseLightmapLump(Stream data, int offset, int length)
|
||||
{
|
||||
var lump = new LightmapLump();
|
||||
lump.Lightmap = new byte[length / 3][];
|
||||
|
||||
for (int i = 0; i < length / 3; i++)
|
||||
{
|
||||
lump.Lightmap[i] = data.ReadBytes(3);
|
||||
}
|
||||
|
||||
return lump;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_CLIPNODES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_CLIPNODES on success, null on error</returns>
|
||||
private static ClipnodesLump? ParseClipnodesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var clipnodes = new List<Clipnode>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var clipnode = data.ReadType<Clipnode>();
|
||||
if (clipnode != null)
|
||||
clipnodes.Add(clipnode);
|
||||
}
|
||||
|
||||
return new ClipnodesLump { Clipnodes = [.. clipnodes] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_LEAVES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_LEAVES on success, null on error</returns>
|
||||
private static BspLeavesLump? ParseLeavesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var leaves = new List<BspLeaf>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var leaf = data.ReadType<BspLeaf>();
|
||||
if (leaf != null)
|
||||
leaves.Add(leaf);
|
||||
}
|
||||
|
||||
return new BspLeavesLump { Leaves = [.. leaves] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_MARKSURFACES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_MARKSURFACES on success, null on error</returns>
|
||||
private static MarksurfacesLump? ParseMarksurfacesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var marksurfaces = new List<ushort>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
marksurfaces.Add(data.ReadUInt16());
|
||||
}
|
||||
|
||||
return new MarksurfacesLump { Marksurfaces = [.. marksurfaces] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_EDGES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_EDGES on success, null on error</returns>
|
||||
private static EdgesLump? ParseEdgesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var edges = new List<Edge>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var edge = data.ReadType<Edge>();
|
||||
if (edge != null)
|
||||
edges.Add(edge);
|
||||
}
|
||||
|
||||
return new EdgesLump { Edges = [.. edges] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_SURFEDGES
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_SURFEDGES on success, null on error</returns>
|
||||
private static SurfedgesLump? ParseSurfedgesLump(Stream data, int offset, int length)
|
||||
{
|
||||
var surfedges = new List<int>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
surfedges.Add(data.ReadInt32());
|
||||
}
|
||||
|
||||
return new SurfedgesLump { Surfedges = [.. surfedges] };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into LUMP_MODELS
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled LUMP_MODELS on success, null on error</returns>
|
||||
private static BspModelsLump? ParseModelsLump(Stream data, int offset, int length)
|
||||
{
|
||||
var models = new List<BspModel>();
|
||||
while (data.Position < offset + length)
|
||||
{
|
||||
var model = data.ReadType<BspModel>();
|
||||
if (model != null)
|
||||
models.Add(model);
|
||||
}
|
||||
|
||||
return new BspModelsLump { Models = [.. models] };
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
@@ -108,12 +106,13 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (deserializerName == null)
|
||||
return default;
|
||||
|
||||
// If the deserializer has no model type
|
||||
Type? modelType = typeof(TDeserializer).GetGenericArguments()?.FirstOrDefault();
|
||||
if (modelType == null)
|
||||
// If the deserializer has no generic arguments
|
||||
var genericArgs = typeof(TDeserializer).GetGenericArguments();
|
||||
if (genericArgs.Length == 0)
|
||||
return default;
|
||||
|
||||
// Loop through all loaded assemblies
|
||||
Type modelType = genericArgs[0];
|
||||
foreach (var assembly in AppDomain.CurrentDomain.GetAssemblies())
|
||||
{
|
||||
// If the assembly is invalid
|
||||
@@ -121,19 +120,23 @@ namespace SabreTools.Serialization.Deserializers
|
||||
return default;
|
||||
|
||||
// If not all types can be loaded, use the ones that could be
|
||||
List<Type> assemblyTypes = [];
|
||||
Type?[] assemblyTypes = [];
|
||||
try
|
||||
{
|
||||
assemblyTypes = assembly.GetTypes().ToList<Type>();
|
||||
assemblyTypes = assembly.GetTypes();
|
||||
}
|
||||
catch (ReflectionTypeLoadException rtle)
|
||||
{
|
||||
assemblyTypes = rtle.Types.Where(t => t != null)!.ToList<Type>();
|
||||
assemblyTypes = rtle.Types ?? [];
|
||||
}
|
||||
|
||||
// Loop through all types
|
||||
foreach (Type type in assemblyTypes)
|
||||
foreach (Type? type in assemblyTypes)
|
||||
{
|
||||
// If the type is invalid
|
||||
if (type == null)
|
||||
continue;
|
||||
|
||||
// If the type isn't a class
|
||||
if (!type.IsClass)
|
||||
continue;
|
||||
|
||||
@@ -21,9 +21,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new binary to fill
|
||||
var binary = new Binary();
|
||||
|
||||
@@ -77,7 +74,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Assign the DIFAT sectors table
|
||||
binary.DIFATSectorNumbers = difatSectors.ToArray();
|
||||
binary.DIFATSectorNumbers = [.. difatSectors];
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -115,7 +112,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Assign the FAT sectors table
|
||||
binary.FATSectorNumbers = fatSectors.ToArray();
|
||||
binary.FATSectorNumbers = [.. fatSectors];
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -153,7 +150,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Assign the mini FAT sectors table
|
||||
binary.MiniFATSectorNumbers = miniFatSectors.ToArray();
|
||||
binary.MiniFATSectorNumbers = [.. miniFatSectors];
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -219,7 +216,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Assign the Directory sectors table
|
||||
binary.DirectoryEntries = directorySectors.ToArray();
|
||||
binary.DirectoryEntries = [.. directorySectors];
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -235,9 +232,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<FileHeader>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != SignatureUInt64)
|
||||
if (header?.Signature != SignatureUInt64)
|
||||
return null;
|
||||
if (header.ByteOrder != 0xFFFE)
|
||||
return null;
|
||||
@@ -265,7 +260,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled sector full of sector numbers on success, null on error</returns>
|
||||
private static SectorNumber[] ParseSectorNumbers(Stream data, ushort sectorShift)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
int sectorCount = (int)(Math.Pow(2, sectorShift) / sizeof(uint));
|
||||
var sectorNumbers = new SectorNumber[sectorCount];
|
||||
|
||||
@@ -286,7 +280,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled sector full of directory entries on success, null on error</returns>
|
||||
private static DirectoryEntry[]? ParseDirectoryEntries(Stream data, ushort sectorShift, ushort majorVersion)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
// TODO: Fix the directory entry size const
|
||||
const int directoryEntrySize = 64 + 2 + 1 + 1 + 4 + 4 + 4 + 16 + 4 + 8 + 8 + 4 + 8;
|
||||
int sectorCount = (int)(Math.Pow(2, sectorShift) / directoryEntrySize);
|
||||
var directoryEntries = new DirectoryEntry[sectorCount];
|
||||
|
||||
253
SabreTools.Serialization/Deserializers/CHD.cs
Normal file
253
SabreTools.Serialization/Deserializers/CHD.cs
Normal file
@@ -0,0 +1,253 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.CHD;
|
||||
|
||||
namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
// TODO: Expand this to full CHD files eventually
|
||||
public class CHD : BaseBinaryDeserializer<Header>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public override Header? Deserialize(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Determine the header version
|
||||
uint version = GetVersion(data);
|
||||
|
||||
// Read and return the current CHD
|
||||
return version switch
|
||||
{
|
||||
1 => ParseHeaderV1(data),
|
||||
2 => ParseHeaderV2(data),
|
||||
3 => ParseHeaderV3(data),
|
||||
4 => ParseHeaderV4(data),
|
||||
5 => ParseHeaderV5(data),
|
||||
_ => null,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the matching CHD version, if possible
|
||||
/// </summary>
|
||||
/// <returns>Matching version, 0 if none</returns>
|
||||
private static uint GetVersion(Stream data)
|
||||
{
|
||||
// Read the header values
|
||||
byte[] tagBytes = data.ReadBytes(8);
|
||||
string tag = Encoding.ASCII.GetString(tagBytes);
|
||||
uint length = data.ReadUInt32BigEndian();
|
||||
uint version = data.ReadUInt32BigEndian();
|
||||
|
||||
// Seek back to start
|
||||
data.SeekIfPossible();
|
||||
|
||||
// Check the signature
|
||||
if (!string.Equals(tag, Constants.SignatureString, StringComparison.Ordinal))
|
||||
return 0;
|
||||
|
||||
// Match the version to header length
|
||||
#if NET472_OR_GREATER || NETCOREAPP
|
||||
return (version, length) switch
|
||||
{
|
||||
(1, Constants.HeaderV1Size) => version,
|
||||
(2, Constants.HeaderV2Size) => version,
|
||||
(3, Constants.HeaderV3Size) => version,
|
||||
(4, Constants.HeaderV4Size) => version,
|
||||
(5, Constants.HeaderV5Size) => version,
|
||||
_ => 0,
|
||||
};
|
||||
#else
|
||||
return version switch
|
||||
{
|
||||
1 => length == Constants.HeaderV1Size ? version : 0,
|
||||
2 => length == Constants.HeaderV2Size ? version : 0,
|
||||
3 => length == Constants.HeaderV3Size ? version : 0,
|
||||
4 => length == Constants.HeaderV4Size ? version : 0,
|
||||
5 => length == Constants.HeaderV5Size ? version : 0,
|
||||
_ => 0,
|
||||
};
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a V1 header
|
||||
/// </summary>
|
||||
private static HeaderV1? ParseHeaderV1(Stream data)
|
||||
{
|
||||
var header = new HeaderV1();
|
||||
|
||||
byte[] tagBytes = data.ReadBytes(8);
|
||||
header.Tag = Encoding.ASCII.GetString(tagBytes);
|
||||
if (header.Tag != Constants.SignatureString)
|
||||
return null;
|
||||
|
||||
header.Length = data.ReadUInt32BigEndian();
|
||||
if (header.Length != Constants.HeaderV1Size)
|
||||
return null;
|
||||
|
||||
header.Version = data.ReadUInt32BigEndian();
|
||||
header.Flags = (Flags)data.ReadUInt32BigEndian();
|
||||
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
|
||||
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
|
||||
return null;
|
||||
|
||||
header.HunkSize = data.ReadUInt32BigEndian();
|
||||
header.TotalHunks = data.ReadUInt32BigEndian();
|
||||
header.Cylinders = data.ReadUInt32BigEndian();
|
||||
header.Heads = data.ReadUInt32BigEndian();
|
||||
header.Sectors = data.ReadUInt32BigEndian();
|
||||
header.MD5 = data.ReadBytes(16);
|
||||
header.ParentMD5 = data.ReadBytes(16);
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a V2 header
|
||||
/// </summary>
|
||||
private static HeaderV2? ParseHeaderV2(Stream data)
|
||||
{
|
||||
var header = new HeaderV2();
|
||||
|
||||
byte[] tagBytes = data.ReadBytes(8);
|
||||
header.Tag = Encoding.ASCII.GetString(tagBytes);
|
||||
if (header.Tag != Constants.SignatureString)
|
||||
return null;
|
||||
|
||||
header.Length = data.ReadUInt32BigEndian();
|
||||
if (header.Length != Constants.HeaderV2Size)
|
||||
return null;
|
||||
|
||||
header.Version = data.ReadUInt32BigEndian();
|
||||
header.Flags = (Flags)data.ReadUInt32BigEndian();
|
||||
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
|
||||
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
|
||||
return null;
|
||||
|
||||
header.HunkSize = data.ReadUInt32BigEndian();
|
||||
header.TotalHunks = data.ReadUInt32BigEndian();
|
||||
header.Cylinders = data.ReadUInt32BigEndian();
|
||||
header.Heads = data.ReadUInt32BigEndian();
|
||||
header.Sectors = data.ReadUInt32BigEndian();
|
||||
header.MD5 = data.ReadBytes(16);
|
||||
header.ParentMD5 = data.ReadBytes(16);
|
||||
header.BytesPerSector = data.ReadUInt32BigEndian();
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a V3 header
|
||||
/// </summary>
|
||||
private static HeaderV3? ParseHeaderV3(Stream data)
|
||||
{
|
||||
var header = new HeaderV3();
|
||||
|
||||
byte[] tagBytes = data.ReadBytes(8);
|
||||
header.Tag = Encoding.ASCII.GetString(tagBytes);
|
||||
if (header.Tag != Constants.SignatureString)
|
||||
return null;
|
||||
|
||||
header.Length = data.ReadUInt32BigEndian();
|
||||
if (header.Length != Constants.HeaderV3Size)
|
||||
return null;
|
||||
|
||||
header.Version = data.ReadUInt32BigEndian();
|
||||
header.Flags = (Flags)data.ReadUInt32BigEndian();
|
||||
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
|
||||
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB_PLUS)
|
||||
return null;
|
||||
|
||||
header.TotalHunks = data.ReadUInt32BigEndian();
|
||||
header.LogicalBytes = data.ReadUInt64BigEndian();
|
||||
header.MetaOffset = data.ReadUInt64BigEndian();
|
||||
header.MD5 = data.ReadBytes(16);
|
||||
header.ParentMD5 = data.ReadBytes(16);
|
||||
header.HunkBytes = data.ReadUInt32BigEndian();
|
||||
header.SHA1 = data.ReadBytes(20);
|
||||
header.ParentSHA1 = data.ReadBytes(20);
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a V4 header
|
||||
/// </summary>
|
||||
private static HeaderV4? ParseHeaderV4(Stream data)
|
||||
{
|
||||
var header = new HeaderV4();
|
||||
|
||||
byte[] tagBytes = data.ReadBytes(8);
|
||||
header.Tag = Encoding.ASCII.GetString(tagBytes);
|
||||
if (header.Tag != Constants.SignatureString)
|
||||
return null;
|
||||
|
||||
header.Length = data.ReadUInt32BigEndian();
|
||||
if (header.Length != Constants.HeaderV4Size)
|
||||
return null;
|
||||
|
||||
header.Version = data.ReadUInt32BigEndian();
|
||||
header.Flags = (Flags)data.ReadUInt32BigEndian();
|
||||
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
|
||||
if (header.Compression > CompressionType.CHDCOMPRESSION_AV)
|
||||
return null;
|
||||
|
||||
header.TotalHunks = data.ReadUInt32BigEndian();
|
||||
header.LogicalBytes = data.ReadUInt64BigEndian();
|
||||
header.MetaOffset = data.ReadUInt64BigEndian();
|
||||
header.HunkBytes = data.ReadUInt32BigEndian();
|
||||
header.SHA1 = data.ReadBytes(20);
|
||||
header.ParentSHA1 = data.ReadBytes(20);
|
||||
header.RawSHA1 = data.ReadBytes(20);
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a V5 header
|
||||
/// </summary>
|
||||
private static HeaderV5? ParseHeaderV5(Stream data)
|
||||
{
|
||||
var header = new HeaderV5();
|
||||
|
||||
byte[] tagBytes = data.ReadBytes(8);
|
||||
header.Tag = Encoding.ASCII.GetString(tagBytes);
|
||||
if (header.Tag != Constants.SignatureString)
|
||||
return null;
|
||||
|
||||
header.Length = data.ReadUInt32BigEndian();
|
||||
if (header.Length != Constants.HeaderV5Size)
|
||||
return null;
|
||||
|
||||
header.Version = data.ReadUInt32BigEndian();
|
||||
header.Compressors = new CodecType[4];
|
||||
for (int i = 0; i < header.Compressors.Length; i++)
|
||||
{
|
||||
header.Compressors[i] = (CodecType)data.ReadUInt32BigEndian();
|
||||
}
|
||||
|
||||
header.LogicalBytes = data.ReadUInt64BigEndian();
|
||||
header.MapOffset = data.ReadUInt64BigEndian();
|
||||
header.MetaOffset = data.ReadUInt64BigEndian();
|
||||
header.HunkBytes = data.ReadUInt32BigEndian();
|
||||
header.UnitBytes = data.ReadUInt32BigEndian();
|
||||
header.RawSHA1 = data.ReadBytes(20);
|
||||
header.SHA1 = data.ReadBytes(20);
|
||||
header.ParentSHA1 = data.ReadBytes(20);
|
||||
|
||||
return header;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,9 +19,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new CIA archive to fill
|
||||
var cia = new Models.N3DS.CIA();
|
||||
|
||||
@@ -157,8 +154,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled certificate on success, null on error</returns>
|
||||
public static Certificate? ParseCertificate(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Certificate certificate = new Certificate();
|
||||
var certificate = new Certificate();
|
||||
|
||||
certificate.SignatureType = (SignatureType)data.ReadUInt32();
|
||||
switch (certificate.SignatureType)
|
||||
@@ -233,8 +229,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled ticket on success, null on error</returns>
|
||||
public static Ticket? ParseTicket(Stream data, bool fromCdn = false)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
Ticket ticket = new Ticket();
|
||||
var ticket = new Ticket();
|
||||
|
||||
ticket.SignatureType = (SignatureType)data.ReadUInt32();
|
||||
switch (ticket.SignatureType)
|
||||
@@ -338,7 +333,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled title metadata on success, null on error</returns>
|
||||
public static TitleMetadata? ParseTitleMetadata(Stream data, bool fromCdn = false)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var titleMetadata = new TitleMetadata();
|
||||
|
||||
titleMetadata.SignatureType = (SignatureType)data.ReadUInt32();
|
||||
|
||||
@@ -70,7 +70,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
var deserializer = new ClrMamePro();
|
||||
return deserializer.Deserialize(data, quotes);
|
||||
}
|
||||
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override MetadataFile? Deserialize(Stream? data)
|
||||
=> Deserialize(data, true);
|
||||
@@ -102,9 +102,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
var videos = new List<Video>();
|
||||
var dipSwitches = new List<DipSwitch>();
|
||||
|
||||
var additional = new List<string>();
|
||||
var headerAdditional = new List<string>();
|
||||
var gameAdditional = new List<string>();
|
||||
while (!reader.EndOfStream)
|
||||
{
|
||||
// If we have no next line
|
||||
@@ -120,12 +117,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case CmpRowType.EndTopLevel:
|
||||
switch (lastTopLevel)
|
||||
{
|
||||
case "doscenter":
|
||||
if (dat.ClrMamePro != null)
|
||||
dat.ClrMamePro.ADDITIONAL_ELEMENTS = [.. headerAdditional];
|
||||
|
||||
headerAdditional.Clear();
|
||||
break;
|
||||
case "game":
|
||||
case "machine":
|
||||
case "resource":
|
||||
@@ -142,7 +133,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
game.Chip = [.. chips];
|
||||
game.Video = [.. videos];
|
||||
game.DipSwitch = [.. dipSwitches];
|
||||
game.ADDITIONAL_ELEMENTS = [.. gameAdditional];
|
||||
|
||||
games.Add(game);
|
||||
game = null;
|
||||
@@ -158,10 +148,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
chips.Clear();
|
||||
videos.Clear();
|
||||
dipSwitches.Clear();
|
||||
gameAdditional.Clear();
|
||||
break;
|
||||
default:
|
||||
// No-op
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
@@ -188,10 +174,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "set":
|
||||
game = new Set();
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
additional.Add(reader.CurrentLine);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -249,10 +231,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "forcepacking":
|
||||
dat.ClrMamePro.ForcePacking = reader.Standalone?.Value;
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
headerAdditional.Add(reader.CurrentLine);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -303,14 +281,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
var sample = new Sample
|
||||
{
|
||||
Name = reader.Standalone?.Value ?? string.Empty,
|
||||
ADDITIONAL_ELEMENTS = [],
|
||||
};
|
||||
samples.Add(sample);
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
gameAdditional.Add(reader.CurrentLine);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -391,22 +364,13 @@ namespace SabreTools.Serialization.Deserializers
|
||||
game.Driver = driver;
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
gameAdditional.Add(reader.CurrentLine);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
if (reader.CurrentLine != null)
|
||||
additional.Add(reader.CurrentLine);
|
||||
}
|
||||
}
|
||||
|
||||
// Add extra pieces and return
|
||||
dat.Game = [.. games];
|
||||
dat.ADDITIONAL_ELEMENTS = [.. additional];
|
||||
return dat;
|
||||
}
|
||||
|
||||
@@ -447,7 +411,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
}
|
||||
|
||||
release.ADDITIONAL_ELEMENTS = [.. itemAdditional];
|
||||
return release;
|
||||
}
|
||||
|
||||
@@ -461,7 +424,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var biosset = new BiosSet();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -476,13 +438,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "default":
|
||||
biosset.Default = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
biosset.ADDITIONAL_ELEMENTS = [.. itemAdditional];
|
||||
return biosset;
|
||||
}
|
||||
|
||||
@@ -496,7 +454,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var rom = new Rom();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -565,13 +522,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "mia":
|
||||
rom.MIA = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
rom.ADDITIONAL_ELEMENTS = [.. itemAdditional];
|
||||
return rom;
|
||||
}
|
||||
|
||||
@@ -585,7 +538,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var disk = new Disk();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -609,13 +561,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "flags":
|
||||
disk.Flags = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
disk.ADDITIONAL_ELEMENTS = [.. itemAdditional];
|
||||
return disk;
|
||||
}
|
||||
|
||||
@@ -629,7 +577,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var media = new Media();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -650,13 +597,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "spamsum":
|
||||
media.SpamSum = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
media.ADDITIONAL_ELEMENTS = [.. itemAdditional];
|
||||
return media;
|
||||
}
|
||||
|
||||
@@ -670,7 +613,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var sample = new Sample();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -679,13 +621,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "name":
|
||||
sample.Name = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
sample.ADDITIONAL_ELEMENTS = [.. itemAdditional];
|
||||
return sample;
|
||||
}
|
||||
|
||||
@@ -699,7 +637,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var archive = new Archive();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -708,13 +645,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "name":
|
||||
archive.Name = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
archive.ADDITIONAL_ELEMENTS = [.. itemAdditional];
|
||||
return archive;
|
||||
}
|
||||
|
||||
@@ -728,7 +661,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var chip = new Chip();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -746,13 +678,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "clock":
|
||||
chip.Clock = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
chip.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
|
||||
return chip;
|
||||
}
|
||||
|
||||
@@ -766,7 +694,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var video = new Video();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -793,13 +720,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "freq":
|
||||
video.Freq = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
video.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
|
||||
return video;
|
||||
}
|
||||
|
||||
@@ -813,7 +736,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var sound = new Sound();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -822,13 +744,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "channels":
|
||||
sound.Channels = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
sound.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
|
||||
return sound;
|
||||
}
|
||||
|
||||
@@ -842,7 +760,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var input = new Input();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -866,13 +783,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "service":
|
||||
input.Service = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
input.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
|
||||
return input;
|
||||
}
|
||||
|
||||
@@ -886,7 +799,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var dipswitch = new DipSwitch();
|
||||
var entries = new List<string>();
|
||||
foreach (var kvp in reader.Internal)
|
||||
@@ -902,14 +814,10 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "default":
|
||||
dipswitch.Default = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
dipswitch.Entry = [.. entries];
|
||||
dipswitch.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
|
||||
return dipswitch;
|
||||
}
|
||||
|
||||
@@ -923,7 +831,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var driver = new Driver();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -944,13 +851,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "blit":
|
||||
driver.Blit = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
driver.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
|
||||
return driver;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.CueSheets;
|
||||
@@ -44,11 +43,13 @@ namespace SabreTools.Serialization.Deserializers
|
||||
continue;
|
||||
|
||||
// http://stackoverflow.com/questions/554013/regular-expression-to-split-on-spaces-unless-in-quotes
|
||||
string[] splitLine = Regex
|
||||
.Matches(line, @"[^\s""]+|""[^""]*""")
|
||||
.Cast<Match>()
|
||||
.Select(m => m.Groups[0].Value)
|
||||
.ToArray();
|
||||
var matchCol = Regex.Matches(line, @"[^\s""]+|""[^""]*""");
|
||||
var splitLine = new List<string>();
|
||||
foreach (Match? match in matchCol)
|
||||
{
|
||||
if (match != null)
|
||||
splitLine.Add(match.Groups[0].Value);
|
||||
}
|
||||
|
||||
switch (splitLine[0])
|
||||
{
|
||||
@@ -59,7 +60,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read MCN
|
||||
case "CATALOG":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"CATALOG line malformed: {line}");
|
||||
|
||||
cueSheet.Catalog = splitLine[1].Trim('"');
|
||||
@@ -67,7 +68,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read external CD-Text file path
|
||||
case "CDTEXTFILE":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"CDTEXTFILE line malformed: {line}");
|
||||
|
||||
cueSheet.CdTextFile = splitLine[1].Trim('"');
|
||||
@@ -75,7 +76,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read CD-Text enhanced performer
|
||||
case "PERFORMER":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"PERFORMER line malformed: {line}");
|
||||
|
||||
cueSheet.Performer = splitLine[1].Trim('"');
|
||||
@@ -83,7 +84,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read CD-Text enhanced songwriter
|
||||
case "SONGWRITER":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"SONGWRITER line malformed: {line}");
|
||||
|
||||
cueSheet.Songwriter = splitLine[1].Trim('"');
|
||||
@@ -91,7 +92,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read CD-Text enhanced title
|
||||
case "TITLE":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"TITLE line malformed: {line}");
|
||||
|
||||
cueSheet.Title = splitLine[1].Trim('"');
|
||||
@@ -99,7 +100,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read file information
|
||||
case "FILE":
|
||||
if (splitLine.Length < 3)
|
||||
if (splitLine.Count < 3)
|
||||
throw new FormatException($"FILE line malformed: {line}");
|
||||
|
||||
var file = CreateCueFile(splitLine[1], splitLine[2], data, out lastLine);
|
||||
@@ -152,11 +153,13 @@ namespace SabreTools.Serialization.Deserializers
|
||||
continue;
|
||||
|
||||
// http://stackoverflow.com/questions/554013/regular-expression-to-split-on-spaces-unless-in-quotes
|
||||
string[] splitLine = Regex
|
||||
.Matches(line, @"[^\s""]+|""[^""]*""")
|
||||
.Cast<Match>()
|
||||
.Select(m => m.Groups[0].Value)
|
||||
.ToArray();
|
||||
var matchCol = Regex.Matches(line, @"[^\s""]+|""[^""]*""");
|
||||
var splitLine = new List<string>();
|
||||
foreach (Match? match in matchCol)
|
||||
{
|
||||
if (match != null)
|
||||
splitLine.Add(match.Groups[0].Value);
|
||||
}
|
||||
|
||||
switch (splitLine[0])
|
||||
{
|
||||
@@ -167,7 +170,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read track information
|
||||
case "TRACK":
|
||||
if (splitLine.Length < 3)
|
||||
if (splitLine.Count < 3)
|
||||
throw new FormatException($"TRACK line malformed: {line}");
|
||||
|
||||
var track = CreateCueTrack(splitLine[1], splitLine[2], data, out lastLine);
|
||||
@@ -237,12 +240,13 @@ namespace SabreTools.Serialization.Deserializers
|
||||
continue;
|
||||
|
||||
// http://stackoverflow.com/questions/554013/regular-expression-to-split-on-spaces-unless-in-quotes
|
||||
string[] splitLine = Regex
|
||||
.Matches(line, @"[^\s""]+|""[^""]*""")
|
||||
.Cast<Match>()
|
||||
.Select(m => m.Groups[0].Value)
|
||||
.ToArray();
|
||||
|
||||
var matchCol = Regex.Matches(line, @"[^\s""]+|""[^""]*""");
|
||||
var splitLine = new List<string>();
|
||||
foreach (Match? match in matchCol)
|
||||
{
|
||||
if (match != null)
|
||||
splitLine.Add(match.Groups[0].Value);
|
||||
}
|
||||
switch (splitLine[0])
|
||||
{
|
||||
// Read comments
|
||||
@@ -252,15 +256,15 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read flag information
|
||||
case "FLAGS":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"FLAGS line malformed: {line}");
|
||||
|
||||
cueTrack.Flags = GetFlags(splitLine);
|
||||
cueTrack.Flags = GetFlags([.. splitLine]);
|
||||
break;
|
||||
|
||||
// Read International Standard Recording Code
|
||||
case "ISRC":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"ISRC line malformed: {line}");
|
||||
|
||||
cueTrack.ISRC = splitLine[1].Trim('"');
|
||||
@@ -268,7 +272,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read CD-Text enhanced performer
|
||||
case "PERFORMER":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"PERFORMER line malformed: {line}");
|
||||
|
||||
cueTrack.Performer = splitLine[1].Trim('"');
|
||||
@@ -276,7 +280,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read CD-Text enhanced songwriter
|
||||
case "SONGWRITER":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"SONGWRITER line malformed: {line}");
|
||||
|
||||
cueTrack.Songwriter = splitLine[1].Trim('"');
|
||||
@@ -284,7 +288,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read CD-Text enhanced title
|
||||
case "TITLE":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"TITLE line malformed: {line}");
|
||||
|
||||
cueTrack.Title = splitLine[1].Trim('"');
|
||||
@@ -292,7 +296,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read pregap information
|
||||
case "PREGAP":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"PREGAP line malformed: {line}");
|
||||
|
||||
var pregap = CreatePreGap(splitLine[1]);
|
||||
@@ -304,7 +308,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read index information
|
||||
case "INDEX":
|
||||
if (splitLine.Length < 3)
|
||||
if (splitLine.Count < 3)
|
||||
throw new FormatException($"INDEX line malformed: {line}");
|
||||
|
||||
var index = CreateCueIndex(splitLine[1], splitLine[2]);
|
||||
@@ -316,7 +320,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Read postgap information
|
||||
case "POSTGAP":
|
||||
if (splitLine.Length < 2)
|
||||
if (splitLine.Count < 2)
|
||||
throw new FormatException($"POSTGAP line malformed: {line}");
|
||||
|
||||
var postgap = CreatePostGap(splitLine[1]);
|
||||
@@ -356,7 +360,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
throw new ArgumentException("Length was null or whitespace");
|
||||
|
||||
// Ignore lines that don't contain the correct information
|
||||
if (length!.Length != 8 || length.Count(c => c == ':') != 2)
|
||||
if (length!.Length != 8)
|
||||
throw new FormatException($"Length was not in a recognized format: {length}");
|
||||
|
||||
// Split the line
|
||||
@@ -413,7 +417,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
throw new ArgumentException("Start time was null or whitespace");
|
||||
|
||||
// Ignore lines that don't contain the correct information
|
||||
if (startTime!.Length != 8 || startTime.Count(c => c == ':') != 2)
|
||||
if (startTime!.Length != 8)
|
||||
throw new FormatException($"Start time was not in a recognized format: {startTime}");
|
||||
|
||||
// Split the line
|
||||
@@ -464,7 +468,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
throw new ArgumentException("Length was null or whitespace");
|
||||
|
||||
// Ignore lines that don't contain the correct information
|
||||
if (length!.Length != 8 || length.Count(c => c == ':') != 2)
|
||||
if (length!.Length != 8)
|
||||
throw new FormatException($"Length was not in a recognized format: {length}");
|
||||
|
||||
// Split the line
|
||||
|
||||
@@ -26,9 +26,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
var games = new List<Game>();
|
||||
var files = new List<Models.DosCenter.File>();
|
||||
|
||||
var additional = new List<string>();
|
||||
var headerAdditional = new List<string>();
|
||||
var gameAdditional = new List<string>();
|
||||
while (!reader.EndOfStream)
|
||||
{
|
||||
// If we have no next line
|
||||
@@ -44,26 +41,15 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case CmpRowType.EndTopLevel:
|
||||
switch (lastTopLevel)
|
||||
{
|
||||
case "doscenter":
|
||||
if (dat.DosCenter != null)
|
||||
dat.DosCenter.ADDITIONAL_ELEMENTS = headerAdditional.ToArray();
|
||||
|
||||
headerAdditional.Clear();
|
||||
break;
|
||||
case "game":
|
||||
if (game != null)
|
||||
{
|
||||
game.File = files.ToArray();
|
||||
game.ADDITIONAL_ELEMENTS = gameAdditional.ToArray();
|
||||
game.File = [.. files];
|
||||
games.Add(game);
|
||||
}
|
||||
|
||||
game = null;
|
||||
files.Clear();
|
||||
gameAdditional.Clear();
|
||||
break;
|
||||
default:
|
||||
// No-op
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
@@ -81,10 +67,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "game":
|
||||
game = new Game();
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
additional.Add(reader.CurrentLine);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,10 +99,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "comment:":
|
||||
dat.DosCenter.Comment = reader.Standalone?.Value;
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
headerAdditional.Add(item: reader.CurrentLine);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -135,10 +113,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "name":
|
||||
game.Name = reader.Standalone?.Value;
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
gameAdditional.Add(item: reader.CurrentLine);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,28 +121,17 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
// If we have an unknown type, log it
|
||||
if (reader.InternalName != "file")
|
||||
{
|
||||
if (reader.CurrentLine != null)
|
||||
gameAdditional.Add(reader.CurrentLine);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create the file and add to the list
|
||||
var file = CreateFile(reader);
|
||||
if (file != null)
|
||||
files.Add(file);
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
if (reader.CurrentLine != null)
|
||||
additional.Add(item: reader.CurrentLine);
|
||||
}
|
||||
}
|
||||
|
||||
// Add extra pieces and return
|
||||
dat.Game = games.ToArray();
|
||||
dat.ADDITIONAL_ELEMENTS = additional.ToArray();
|
||||
dat.Game = [.. games];
|
||||
return dat;
|
||||
}
|
||||
|
||||
@@ -182,7 +145,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Internal == null)
|
||||
return null;
|
||||
|
||||
var itemAdditional = new List<string>();
|
||||
var file = new Models.DosCenter.File();
|
||||
foreach (var kvp in reader.Internal)
|
||||
{
|
||||
@@ -200,14 +162,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "date":
|
||||
file.Date = kvp.Value;
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
itemAdditional.Add(item: reader.CurrentLine);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
file.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
|
||||
return file;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Readers;
|
||||
using SabreTools.Models.EverdriveSMDB;
|
||||
@@ -47,15 +46,11 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (reader.Line.Count > 5)
|
||||
row.Size = reader.Line[5];
|
||||
|
||||
// If we have additional fields
|
||||
if (reader.Line.Count > 6)
|
||||
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(5).ToArray();
|
||||
|
||||
rows.Add(row);
|
||||
}
|
||||
|
||||
// Assign the rows to the Dat and return
|
||||
dat.Row = rows.ToArray();
|
||||
dat.Row = [.. rows];
|
||||
return dat;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
@@ -173,7 +172,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
long directoryNamesEnd = data.Position + directoryHeader.NameSize;
|
||||
|
||||
// Create the string dictionary
|
||||
file.DirectoryNames = new Dictionary<long, string?>();
|
||||
file.DirectoryNames = [];
|
||||
|
||||
// Loop and read the null-terminated strings
|
||||
while (data.Position < directoryNamesEnd)
|
||||
@@ -184,10 +183,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
data.Seek(-directoryName?.Length ?? 0, SeekOrigin.Current);
|
||||
byte[]? endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
|
||||
if (endingData != null)
|
||||
directoryName = Encoding.ASCII.GetString(endingData);
|
||||
else
|
||||
directoryName = null;
|
||||
directoryName = endingData != null ? Encoding.ASCII.GetString(endingData) : null;
|
||||
}
|
||||
|
||||
file.DirectoryNames[nameOffset] = directoryName;
|
||||
@@ -612,7 +608,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled Half-Life Game Cache data block header on success, null on error</returns>
|
||||
private static DataBlockHeader? ParseDataBlockHeader(Stream data, uint minorVersion)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var dataBlockHeader = new DataBlockHeader();
|
||||
|
||||
// In version 3 the DataBlockHeader is missing the LastVersionPlayed field.
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SabreTools.Hashing;
|
||||
using SabreTools.Models.Hashfile;
|
||||
|
||||
@@ -60,7 +59,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
using var stream = PathProcessor.OpenStream(path);
|
||||
return DeserializeStream(stream, hash);
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
#region IStreamDeserializer
|
||||
@@ -71,7 +70,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
var deserializer = new Hashfile();
|
||||
return deserializer.Deserialize(data, hash);
|
||||
}
|
||||
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override Models.Hashfile.Hashfile? Deserialize(Stream? data)
|
||||
=> Deserialize(data, HashType.CRC32);
|
||||
@@ -86,19 +85,24 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Setup the reader and output
|
||||
var reader = new StreamReader(data);
|
||||
var dat = new Models.Hashfile.Hashfile();
|
||||
var additional = new List<string>();
|
||||
|
||||
// Create lists for each hash type
|
||||
var sfvList = new List<SFV>();
|
||||
var md2List = new List<MD2>();
|
||||
var md4List = new List<MD4>();
|
||||
var md5List = new List<MD5>();
|
||||
var sha1List = new List<SHA1>();
|
||||
var sha256List = new List<SHA256>();
|
||||
var sha384List = new List<SHA384>();
|
||||
var sha512List = new List<SHA512>();
|
||||
var spamsumList = new List<SpamSum>();
|
||||
|
||||
// Loop through the rows and parse out values
|
||||
var hashes = new List<object>();
|
||||
while (!reader.EndOfStream)
|
||||
{
|
||||
// Read and split the line
|
||||
string? line = reader.ReadLine();
|
||||
#if NETFRAMEWORK || NETCOREAPP3_1
|
||||
string[]? lineParts = line?.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
#else
|
||||
string[]? lineParts = line?.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
#endif
|
||||
string[]? lineParts = line?.Split([' '], StringSplitOptions.RemoveEmptyEntries);
|
||||
if (lineParts == null)
|
||||
continue;
|
||||
|
||||
@@ -106,93 +110,76 @@ namespace SabreTools.Serialization.Deserializers
|
||||
switch (hash)
|
||||
{
|
||||
case HashType.CRC32:
|
||||
case HashType.CRC32_ISO:
|
||||
case HashType.CRC32_Naive:
|
||||
case HashType.CRC32_Optimized:
|
||||
case HashType.CRC32_Parallel:
|
||||
var sfv = new SFV
|
||||
{
|
||||
#if NETFRAMEWORK
|
||||
File = string.Join(" ", lineParts.Take(lineParts.Length - 1).ToArray()),
|
||||
File = string.Join(" ", lineParts, 0, lineParts.Length - 1),
|
||||
Hash = lineParts[lineParts.Length - 1],
|
||||
#else
|
||||
File = string.Join(" ", lineParts[..^1]),
|
||||
Hash = lineParts[^1],
|
||||
#endif
|
||||
};
|
||||
hashes.Add(sfv);
|
||||
sfvList.Add(sfv);
|
||||
break;
|
||||
case HashType.MD2:
|
||||
var md2 = new MD2
|
||||
{
|
||||
Hash = lineParts[0],
|
||||
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
|
||||
};
|
||||
md2List.Add(md2);
|
||||
break;
|
||||
case HashType.MD4:
|
||||
var md4 = new MD4
|
||||
{
|
||||
Hash = lineParts[0],
|
||||
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
|
||||
};
|
||||
md4List.Add(md4);
|
||||
break;
|
||||
case HashType.MD5:
|
||||
var md5 = new MD5
|
||||
{
|
||||
Hash = lineParts[0],
|
||||
#if NETFRAMEWORK
|
||||
File = string.Join(" ", lineParts.Skip(1).ToArray()),
|
||||
#else
|
||||
File = string.Join(" ", lineParts[1..]),
|
||||
#endif
|
||||
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
|
||||
};
|
||||
hashes.Add(md5);
|
||||
md5List.Add(md5);
|
||||
break;
|
||||
case HashType.SHA1:
|
||||
var sha1 = new SHA1
|
||||
{
|
||||
Hash = lineParts[0],
|
||||
#if NETFRAMEWORK
|
||||
File = string.Join(" ", lineParts.Skip(1).ToArray()),
|
||||
#else
|
||||
File = string.Join(" ", lineParts[1..]),
|
||||
#endif
|
||||
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
|
||||
};
|
||||
hashes.Add(sha1);
|
||||
sha1List.Add(sha1);
|
||||
break;
|
||||
case HashType.SHA256:
|
||||
var sha256 = new SHA256
|
||||
{
|
||||
Hash = lineParts[0],
|
||||
#if NETFRAMEWORK
|
||||
File = string.Join(" ", lineParts.Skip(1).ToArray()),
|
||||
#else
|
||||
File = string.Join(" ", lineParts[1..]),
|
||||
#endif
|
||||
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
|
||||
};
|
||||
hashes.Add(sha256);
|
||||
sha256List.Add(sha256);
|
||||
break;
|
||||
case HashType.SHA384:
|
||||
var sha384 = new SHA384
|
||||
{
|
||||
Hash = lineParts[0],
|
||||
#if NETFRAMEWORK
|
||||
File = string.Join(" ", lineParts.Skip(1).ToArray()),
|
||||
#else
|
||||
File = string.Join(" ", lineParts[1..]),
|
||||
#endif
|
||||
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
|
||||
};
|
||||
hashes.Add(sha384);
|
||||
sha384List.Add(sha384);
|
||||
break;
|
||||
case HashType.SHA512:
|
||||
var sha512 = new SHA512
|
||||
{
|
||||
Hash = lineParts[0],
|
||||
#if NETFRAMEWORK
|
||||
File = string.Join(" ", lineParts.Skip(1).ToArray()),
|
||||
#else
|
||||
File = string.Join(" ", lineParts[1..]),
|
||||
#endif
|
||||
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
|
||||
};
|
||||
hashes.Add(sha512);
|
||||
sha512List.Add(sha512);
|
||||
break;
|
||||
case HashType.SpamSum:
|
||||
var spamSum = new SpamSum
|
||||
{
|
||||
Hash = lineParts[0],
|
||||
#if NETFRAMEWORK
|
||||
File = string.Join(" ", lineParts.Skip(1).ToArray()),
|
||||
#else
|
||||
File = string.Join(" ", lineParts[1..]),
|
||||
#endif
|
||||
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
|
||||
};
|
||||
hashes.Add(spamSum);
|
||||
spamsumList.Add(spamSum);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -201,32 +188,34 @@ namespace SabreTools.Serialization.Deserializers
|
||||
switch (hash)
|
||||
{
|
||||
case HashType.CRC32:
|
||||
case HashType.CRC32_ISO:
|
||||
case HashType.CRC32_Naive:
|
||||
case HashType.CRC32_Optimized:
|
||||
case HashType.CRC32_Parallel:
|
||||
dat.SFV = hashes.Cast<SFV>().ToArray();
|
||||
dat.SFV = [.. sfvList];
|
||||
break;
|
||||
case HashType.MD2:
|
||||
dat.MD2 = [.. md2List];
|
||||
break;
|
||||
case HashType.MD4:
|
||||
dat.MD4 = [.. md4List];
|
||||
break;
|
||||
case HashType.MD5:
|
||||
dat.MD5 = hashes.Cast<MD5>().ToArray();
|
||||
dat.MD5 = [.. md5List];
|
||||
break;
|
||||
case HashType.SHA1:
|
||||
dat.SHA1 = hashes.Cast<SHA1>().ToArray();
|
||||
dat.SHA1 = [.. sha1List];
|
||||
break;
|
||||
case HashType.SHA256:
|
||||
dat.SHA256 = hashes.Cast<SHA256>().ToArray();
|
||||
dat.SHA256 = [.. sha256List];
|
||||
break;
|
||||
case HashType.SHA384:
|
||||
dat.SHA384 = hashes.Cast<SHA384>().ToArray();
|
||||
dat.SHA384 = [.. sha384List];
|
||||
break;
|
||||
case HashType.SHA512:
|
||||
dat.SHA512 = hashes.Cast<SHA512>().ToArray();
|
||||
dat.SHA512 = [.. sha512List];
|
||||
break;
|
||||
case HashType.SpamSum:
|
||||
dat.SpamSum = hashes.Cast<SpamSum>().ToArray();
|
||||
dat.SpamSum = [.. spamsumList];
|
||||
break;
|
||||
}
|
||||
dat.ADDITIONAL_ELEMENTS = [.. additional];
|
||||
|
||||
return dat;
|
||||
}
|
||||
|
||||
|
||||
@@ -17,9 +17,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new IRD to fill
|
||||
var ird = new Models.IRD.File();
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.InstallShieldArchiveV3;
|
||||
|
||||
@@ -17,9 +18,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
@@ -99,7 +97,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature1 != 0x8C655D13) // TODO: Move constant to Models
|
||||
if (header?.Signature1 != Constants.HeaderSignature)
|
||||
return null;
|
||||
if (header.TocAddress >= data.Length)
|
||||
return null;
|
||||
@@ -114,7 +112,17 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled directory on success, null on error</returns>
|
||||
public static Models.InstallShieldArchiveV3.Directory? ParseDirectory(Stream data)
|
||||
{
|
||||
return data.ReadType<Models.InstallShieldArchiveV3.Directory>();
|
||||
var directory = new Models.InstallShieldArchiveV3.Directory();
|
||||
|
||||
directory.FileCount = data.ReadUInt16();
|
||||
directory.ChunkSize = data.ReadUInt16();
|
||||
|
||||
// TODO: Is there any equivilent automatic type for UInt16-prefixed ANSI?
|
||||
ushort nameLength = data.ReadUInt16();
|
||||
byte[] nameBytes = data.ReadBytes(nameLength);
|
||||
directory.Name = Encoding.ASCII.GetString(nameBytes);
|
||||
|
||||
return directory;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SabreTools.IO.Extensions;
|
||||
@@ -20,9 +21,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new cabinet to fill
|
||||
var cabinet = new Cabinet();
|
||||
|
||||
@@ -336,9 +334,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var commonHeader = data.ReadType<CommonHeader>();
|
||||
|
||||
if (commonHeader == null)
|
||||
return null;
|
||||
if (commonHeader.Signature != SignatureString)
|
||||
if (commonHeader?.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
return commonHeader;
|
||||
@@ -352,7 +348,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled volume header on success, null on error</returns>
|
||||
public static VolumeHeader ParseVolumeHeader(Stream data, int majorVersion)
|
||||
{
|
||||
VolumeHeader volumeHeader = new VolumeHeader();
|
||||
var volumeHeader = new VolumeHeader();
|
||||
|
||||
// Read the descriptor based on version
|
||||
if (majorVersion <= 5)
|
||||
@@ -447,14 +443,9 @@ namespace SabreTools.Serialization.Deserializers
|
||||
var fileGroup = new FileGroup();
|
||||
|
||||
fileGroup.NameOffset = data.ReadUInt32();
|
||||
|
||||
fileGroup.ExpandedSize = data.ReadUInt32();
|
||||
fileGroup.Reserved0 = data.ReadBytes(4);
|
||||
fileGroup.CompressedSize = data.ReadUInt32();
|
||||
fileGroup.Reserved1 = data.ReadBytes(4);
|
||||
fileGroup.Reserved2 = data.ReadBytes(2);
|
||||
fileGroup.Attribute1 = data.ReadUInt16();
|
||||
fileGroup.Attribute2 = data.ReadUInt16();
|
||||
fileGroup.Attributes = (FileGroupAttributes)data.ReadUInt16();
|
||||
|
||||
// TODO: Figure out what data lives in this area for V5 and below
|
||||
if (majorVersion <= 5)
|
||||
@@ -462,19 +453,19 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
fileGroup.FirstFile = data.ReadUInt32();
|
||||
fileGroup.LastFile = data.ReadUInt32();
|
||||
fileGroup.UnknownOffset = data.ReadUInt32();
|
||||
fileGroup.Var4Offset = data.ReadUInt32();
|
||||
fileGroup.Var1Offset = data.ReadUInt32();
|
||||
fileGroup.UnknownStringOffset = data.ReadUInt32();
|
||||
fileGroup.OperatingSystemOffset = data.ReadUInt32();
|
||||
fileGroup.LanguageOffset = data.ReadUInt32();
|
||||
fileGroup.HTTPLocationOffset = data.ReadUInt32();
|
||||
fileGroup.FTPLocationOffset = data.ReadUInt32();
|
||||
fileGroup.MiscOffset = data.ReadUInt32();
|
||||
fileGroup.Var2Offset = data.ReadUInt32();
|
||||
fileGroup.TargetDirectoryOffset = data.ReadUInt32();
|
||||
fileGroup.Reserved3 = data.ReadBytes(2);
|
||||
fileGroup.Reserved4 = data.ReadBytes(2);
|
||||
fileGroup.Reserved5 = data.ReadBytes(2);
|
||||
fileGroup.Reserved6 = data.ReadBytes(2);
|
||||
fileGroup.Reserved7 = data.ReadBytes(2);
|
||||
fileGroup.OverwriteFlags = (FileGroupFlags)data.ReadUInt32();
|
||||
fileGroup.Reserved = new uint[4];
|
||||
for (int i = 0; i < fileGroup.Reserved.Length; i++)
|
||||
{
|
||||
fileGroup.Reserved[i] = data.ReadUInt32();
|
||||
}
|
||||
|
||||
// Cache the current position
|
||||
long currentPosition = data.Position;
|
||||
@@ -489,7 +480,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (majorVersion >= 17)
|
||||
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
|
||||
else
|
||||
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
|
||||
fileGroup.Name = data.ReadNullTerminatedAnsiString();
|
||||
}
|
||||
|
||||
// Seek back to the correct offset
|
||||
@@ -512,15 +503,19 @@ namespace SabreTools.Serialization.Deserializers
|
||||
component.IdentifierOffset = data.ReadUInt32();
|
||||
component.DescriptorOffset = data.ReadUInt32();
|
||||
component.DisplayNameOffset = data.ReadUInt32();
|
||||
component.Reserved0 = data.ReadUInt16();
|
||||
component.ReservedOffset0 = data.ReadUInt32();
|
||||
component.ReservedOffset1 = data.ReadUInt32();
|
||||
component.Status = (ComponentStatus)data.ReadUInt16();
|
||||
component.PasswordOffset = data.ReadUInt32();
|
||||
component.MiscOffset = data.ReadUInt32();
|
||||
component.ComponentIndex = data.ReadUInt16();
|
||||
component.NameOffset = data.ReadUInt32();
|
||||
component.ReservedOffset2 = data.ReadUInt32();
|
||||
component.ReservedOffset3 = data.ReadUInt32();
|
||||
component.ReservedOffset4 = data.ReadUInt32();
|
||||
component.Reserved1 = data.ReadBytes(32);
|
||||
component.CDRomFolderOffset = data.ReadUInt32();
|
||||
component.HTTPLocationOffset = data.ReadUInt32();
|
||||
component.FTPLocationOffset = data.ReadUInt32();
|
||||
component.Guid = new Guid[2];
|
||||
for (int i = 0; i < component.Guid.Length; i++)
|
||||
{
|
||||
component.Guid[i] = data.ReadGuid();
|
||||
}
|
||||
component.CLSIDOffset = data.ReadUInt32();
|
||||
component.Reserved2 = data.ReadBytes(28);
|
||||
component.Reserved3 = data.ReadBytes(majorVersion <= 5 ? 2 : 1);
|
||||
@@ -533,10 +528,10 @@ namespace SabreTools.Serialization.Deserializers
|
||||
component.SubComponentsCount = data.ReadUInt16();
|
||||
component.SubComponentsOffset = data.ReadUInt32();
|
||||
component.NextComponentOffset = data.ReadUInt32();
|
||||
component.ReservedOffset5 = data.ReadUInt32();
|
||||
component.ReservedOffset6 = data.ReadUInt32();
|
||||
component.ReservedOffset7 = data.ReadUInt32();
|
||||
component.ReservedOffset8 = data.ReadUInt32();
|
||||
component.OnInstallingOffset = data.ReadUInt32();
|
||||
component.OnInstalledOffset = data.ReadUInt32();
|
||||
component.OnUninstallingOffset = data.ReadUInt32();
|
||||
component.OnUninstalledOffset = data.ReadUInt32();
|
||||
|
||||
// Cache the current position
|
||||
long currentPosition = data.Position;
|
||||
@@ -649,7 +644,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled file descriptor on success, null on error</returns>
|
||||
public static FileDescriptor ParseFileDescriptor(Stream data, int majorVersion, uint descriptorOffset)
|
||||
{
|
||||
FileDescriptor fileDescriptor = new FileDescriptor();
|
||||
var fileDescriptor = new FileDescriptor();
|
||||
|
||||
// Read the descriptor based on version
|
||||
if (majorVersion <= 5)
|
||||
|
||||
@@ -427,9 +427,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var informationBlock = data.ReadType<InformationBlock>();
|
||||
|
||||
if (informationBlock == null)
|
||||
return null;
|
||||
if (informationBlock.Signature != LESignatureString && informationBlock.Signature != LXSignatureString)
|
||||
if (informationBlock?.Signature != LESignatureString && informationBlock?.Signature != LXSignatureString)
|
||||
return null;
|
||||
|
||||
return informationBlock;
|
||||
@@ -472,15 +470,13 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled resident names table entry on success, null on error</returns>
|
||||
public static ResidentNamesTableEntry ParseResidentNamesTableEntry(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var entry = new ResidentNamesTableEntry();
|
||||
|
||||
entry.Length = data.ReadByteValue();
|
||||
if (entry.Length > 0 && data.Position + entry.Length <= data.Length)
|
||||
{
|
||||
byte[]? name = data.ReadBytes(entry.Length);
|
||||
if (name != null)
|
||||
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
|
||||
byte[] name = data.ReadBytes(entry.Length);
|
||||
entry.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
|
||||
}
|
||||
entry.OrdinalNumber = data.ReadUInt16();
|
||||
|
||||
|
||||
@@ -23,7 +23,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
var sets = new List<Set>();
|
||||
var rows = new List<Row>();
|
||||
|
||||
var additional = new List<string>();
|
||||
while (!reader.EndOfStream)
|
||||
{
|
||||
// Read the line and don't split yet
|
||||
@@ -33,7 +32,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// If we have a set to process
|
||||
if (set != null)
|
||||
{
|
||||
set.Row = rows.ToArray();
|
||||
set.Row = [.. rows];
|
||||
sets.Add(set);
|
||||
set = null;
|
||||
rows.Clear();
|
||||
@@ -215,7 +214,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
default:
|
||||
row = null;
|
||||
additional.Add(line);
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -226,15 +224,14 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// If we have a set to process
|
||||
if (set != null)
|
||||
{
|
||||
set.Row = rows.ToArray();
|
||||
set.Row = [.. rows];
|
||||
sets.Add(set);
|
||||
set = null;
|
||||
rows.Clear();
|
||||
}
|
||||
|
||||
// Add extra pieces and return
|
||||
dat.Set = sets.ToArray();
|
||||
dat.ADDITIONAL_ELEMENTS = additional.ToArray();
|
||||
dat.Set = [.. sets];
|
||||
return dat;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,9 +21,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
@@ -32,7 +29,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Check for User Data
|
||||
uint possibleSignature = data.ReadUInt32();
|
||||
data.Seek(-4, SeekOrigin.Current);
|
||||
if (possibleSignature == 0x1B51504D)
|
||||
if (possibleSignature == UserDataSignatureUInt32)
|
||||
{
|
||||
// Save the current position for offset correction
|
||||
long basePtr = data.Position;
|
||||
@@ -56,7 +53,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Check for the Header
|
||||
possibleSignature = data.ReadUInt32();
|
||||
data.Seek(-4, SeekOrigin.Current);
|
||||
if (possibleSignature == 0x1A51504D)
|
||||
if (possibleSignature == ArchiveHeaderSignatureUInt32)
|
||||
{
|
||||
// Try to parse the archive header
|
||||
var archiveHeader = ParseArchiveHeader(data);
|
||||
@@ -102,7 +99,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
hashTable.Add(hashEntry);
|
||||
}
|
||||
|
||||
archive.HashTable = hashTable.ToArray();
|
||||
archive.HashTable = [.. hashTable];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,7 +128,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
hashTable.Add(hashEntry);
|
||||
}
|
||||
|
||||
archive.HashTable = hashTable.ToArray();
|
||||
archive.HashTable = [.. hashTable];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -160,7 +157,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
hashTable.Add(hashEntry);
|
||||
}
|
||||
|
||||
archive.HashTable = hashTable.ToArray();
|
||||
archive.HashTable = [.. hashTable];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -193,7 +190,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
blockTable.Add(blockEntry);
|
||||
}
|
||||
|
||||
archive.BlockTable = blockTable.ToArray();
|
||||
archive.BlockTable = [.. blockTable];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -222,7 +219,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
blockTable.Add(blockEntry);
|
||||
}
|
||||
|
||||
archive.BlockTable = blockTable.ToArray();
|
||||
archive.BlockTable = [.. blockTable];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -251,7 +248,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
blockTable.Add(blockEntry);
|
||||
}
|
||||
|
||||
archive.BlockTable = blockTable.ToArray();
|
||||
archive.BlockTable = [.. blockTable];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -278,7 +275,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
hiBlockTable.Add(hiBlockEntry);
|
||||
}
|
||||
|
||||
archive.HiBlockTable = hiBlockTable.ToArray();
|
||||
archive.HiBlockTable = [.. hiBlockTable];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -406,9 +403,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var userData = data.ReadType<UserData>();
|
||||
|
||||
if (userData == null)
|
||||
return null;
|
||||
if (userData.Signature != UserDataSignatureString)
|
||||
if (userData?.Signature != UserDataSignatureString)
|
||||
return null;
|
||||
|
||||
return userData;
|
||||
|
||||
@@ -20,9 +20,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new cart image to fill
|
||||
var cart = new Cart();
|
||||
|
||||
@@ -62,118 +59,74 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
#endregion
|
||||
|
||||
#region Partitions
|
||||
|
||||
// Create the partition table
|
||||
cart.Partitions = new NCCHHeader[8];
|
||||
|
||||
// Iterate and build the partitions
|
||||
for (int i = 0; i < 8; i++)
|
||||
{
|
||||
cart.Partitions[i] = ParseNCCHHeader(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
// Cache the media unit size for further use
|
||||
long mediaUnitSize = 0;
|
||||
if (header.PartitionFlags != null)
|
||||
mediaUnitSize = (uint)(0x200 * Math.Pow(2, header.PartitionFlags[(int)NCSDFlags.MediaUnitSize]));
|
||||
|
||||
#region Extended Headers
|
||||
#region Partitions
|
||||
|
||||
// Create the extended header table
|
||||
cart.ExtendedHeaders = new NCCHExtendedHeader[8];
|
||||
// Create the tables
|
||||
cart.Partitions = new NCCHHeader[8];
|
||||
cart.ExtendedHeaders = new NCCHExtendedHeader?[8];
|
||||
cart.ExeFSHeaders = new ExeFSHeader?[8];
|
||||
cart.RomFSHeaders = new RomFSHeader?[8];
|
||||
|
||||
// Iterate and build the extended headers
|
||||
// Iterate and build the partitions
|
||||
for (int i = 0; i < 8; i++)
|
||||
{
|
||||
// If we have an encrypted or invalid partition
|
||||
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
|
||||
// Find the offset to the partition
|
||||
long partitionOffset = cart.Header.PartitionsTable?[i]?.Offset ?? 0;
|
||||
partitionOffset *= mediaUnitSize;
|
||||
if (partitionOffset == 0)
|
||||
continue;
|
||||
|
||||
// If we have no partitions table
|
||||
if (cart.Header!.PartitionsTable == null)
|
||||
// Seek to the start of the partition
|
||||
data.Seek(partitionOffset, SeekOrigin.Begin);
|
||||
|
||||
// Handle the normal header
|
||||
var partition = ParseNCCHHeader(data);
|
||||
if (partition == null || partition.MagicID != NCCHMagicNumber)
|
||||
continue;
|
||||
|
||||
// Get the extended header offset
|
||||
long offset = (cart.Header.PartitionsTable[i]!.Offset * mediaUnitSize) + 0x200;
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
continue;
|
||||
// Set the normal header
|
||||
cart.Partitions[i] = partition;
|
||||
|
||||
// Seek to the extended header
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
// Handle the extended header, if it exists
|
||||
if (partition.ExtendedHeaderSizeInBytes > 0)
|
||||
{
|
||||
var extendedHeader = ParseNCCHExtendedHeader(data);
|
||||
if (extendedHeader != null)
|
||||
cart.ExtendedHeaders[i] = extendedHeader;
|
||||
}
|
||||
|
||||
// Parse the extended header
|
||||
var extendedHeader = ParseNCCHExtendedHeader(data);
|
||||
if (extendedHeader != null)
|
||||
cart.ExtendedHeaders[i] = extendedHeader;
|
||||
}
|
||||
// Handle the ExeFS, if it exists
|
||||
if (partition.ExeFSSizeInMediaUnits > 0)
|
||||
{
|
||||
long offset = partition.ExeFSOffsetInMediaUnits * mediaUnitSize;
|
||||
data.Seek(partitionOffset + offset, SeekOrigin.Begin);
|
||||
|
||||
#endregion
|
||||
var exeFsHeader = ParseExeFSHeader(data);
|
||||
if (exeFsHeader == null)
|
||||
return null;
|
||||
|
||||
#region ExeFS Headers
|
||||
cart.ExeFSHeaders[i] = exeFsHeader;
|
||||
}
|
||||
|
||||
// Create the ExeFS header table
|
||||
cart.ExeFSHeaders = new ExeFSHeader[8];
|
||||
// Handle the RomFS, if it exists
|
||||
if (partition.RomFSSizeInMediaUnits > 0)
|
||||
{
|
||||
long offset = partition.RomFSOffsetInMediaUnits * mediaUnitSize;
|
||||
data.Seek(partitionOffset + offset, SeekOrigin.Begin);
|
||||
|
||||
// Iterate and build the ExeFS headers
|
||||
for (int i = 0; i < 8; i++)
|
||||
{
|
||||
// If we have an encrypted or invalid partition
|
||||
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
|
||||
continue;
|
||||
var romFsHeader = ParseRomFSHeader(data);
|
||||
if (romFsHeader == null)
|
||||
continue;
|
||||
else if (romFsHeader.MagicString != RomFSMagicNumber || romFsHeader.MagicNumber != RomFSSecondMagicNumber)
|
||||
continue;
|
||||
|
||||
// If we have no partitions table
|
||||
if (cart.Header!.PartitionsTable == null)
|
||||
continue;
|
||||
|
||||
// Get the ExeFS header offset
|
||||
long offset = (cart.Header.PartitionsTable[i]!.Offset + cart.Partitions[i]!.ExeFSOffsetInMediaUnits) * mediaUnitSize;
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
continue;
|
||||
|
||||
// Seek to the ExeFS header
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Parse the ExeFS header
|
||||
var exeFsHeader = ParseExeFSHeader(data);
|
||||
if (exeFsHeader == null)
|
||||
return null;
|
||||
|
||||
cart.ExeFSHeaders[i] = exeFsHeader;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region RomFS Headers
|
||||
|
||||
// Create the RomFS header table
|
||||
cart.RomFSHeaders = new RomFSHeader[8];
|
||||
|
||||
// Iterate and build the RomFS headers
|
||||
for (int i = 0; i < 8; i++)
|
||||
{
|
||||
// If we have an encrypted or invalid partition
|
||||
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
|
||||
continue;
|
||||
|
||||
// If we have no partitions table
|
||||
if (cart.Header!.PartitionsTable == null)
|
||||
continue;
|
||||
|
||||
// Get the RomFS header offset
|
||||
long offset = (cart.Header.PartitionsTable[i]!.Offset + cart.Partitions[i]!.RomFSOffsetInMediaUnits) * mediaUnitSize;
|
||||
if (offset < 0 || offset >= data.Length)
|
||||
continue;
|
||||
|
||||
// Seek to the RomFS header
|
||||
data.Seek(offset, SeekOrigin.Begin);
|
||||
|
||||
// Parse the RomFS header
|
||||
var romFsHeader = ParseRomFSHeader(data);
|
||||
if (romFsHeader != null)
|
||||
cart.RomFSHeaders[i] = romFsHeader;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -188,14 +141,10 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled NCSD header on success, null on error</returns>
|
||||
public static NCSDHeader? ParseNCSDHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var header = new NCSDHeader();
|
||||
|
||||
header.RSA2048Signature = data.ReadBytes(0x100);
|
||||
byte[]? magicNumber = data.ReadBytes(4);
|
||||
if (magicNumber == null)
|
||||
return null;
|
||||
|
||||
byte[] magicNumber = data.ReadBytes(4);
|
||||
header.MagicNumber = Encoding.ASCII.GetString(magicNumber).TrimEnd('\0'); ;
|
||||
if (header.MagicNumber != NCSDMagicNumber)
|
||||
return null;
|
||||
@@ -259,7 +208,43 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled card info header on success, null on error</returns>
|
||||
public static CardInfoHeader? ParseCardInfoHeader(Stream data)
|
||||
{
|
||||
return data.ReadType<CardInfoHeader>();
|
||||
// TODO: Use marshalling here instead of building
|
||||
var header = new CardInfoHeader();
|
||||
|
||||
header.WritableAddressMediaUnits = data.ReadUInt32();
|
||||
header.CardInfoBitmask = data.ReadUInt32();
|
||||
header.Reserved1 = data.ReadBytes(0xF8);
|
||||
header.FilledSize = data.ReadUInt32();
|
||||
header.Reserved2 = data.ReadBytes(0x0C);
|
||||
header.TitleVersion = data.ReadUInt16();
|
||||
header.CardRevision = data.ReadUInt16();
|
||||
header.Reserved3 = data.ReadBytes(0x0C);
|
||||
header.CVerTitleID = data.ReadBytes(0x08);
|
||||
header.CVerVersionNumber = data.ReadUInt16();
|
||||
header.Reserved4 = data.ReadBytes(0xCD6);
|
||||
header.InitialData = ParseInitialData(data);
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into initial data
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled initial data on success, null on error</returns>
|
||||
public static InitialData? ParseInitialData(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
var id = new InitialData();
|
||||
|
||||
id.CardSeedKeyY = data.ReadBytes(0x10);
|
||||
id.EncryptedCardSeed = data.ReadBytes(0x10);
|
||||
id.CardSeedAESMAC = data.ReadBytes(0x10);
|
||||
id.CardSeedNonce = data.ReadBytes(0x0C);
|
||||
id.Reserved = data.ReadBytes(0xC4);
|
||||
id.BackupHeader = ParseNCCHHeader(data, skipSignature: true);
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -339,7 +324,51 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled NCCH extended header on success, null on error</returns>
|
||||
public static NCCHExtendedHeader? ParseNCCHExtendedHeader(Stream data)
|
||||
{
|
||||
return data.ReadType<NCCHExtendedHeader>();
|
||||
// TODO: Replace with `data.ReadType<NCCHExtendedHeader>();` when enum serialization fixed
|
||||
var header = new NCCHExtendedHeader();
|
||||
|
||||
header.SCI = data.ReadType<SystemControlInfo>();
|
||||
header.ACI = ParseAccessControlInfo(data);
|
||||
header.AccessDescSignature = data.ReadBytes(0x100);
|
||||
header.NCCHHDRPublicKey = data.ReadBytes(0x100);
|
||||
header.ACIForLimitations = ParseAccessControlInfo(data);
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an access control info
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled access control info on success, null on error</returns>
|
||||
public static AccessControlInfo? ParseAccessControlInfo(Stream data)
|
||||
{
|
||||
var aci = new AccessControlInfo();
|
||||
|
||||
aci.ARM11LocalSystemCapabilities = data.ReadType<ARM11LocalSystemCapabilities>();
|
||||
aci.ARM11KernelCapabilities = data.ReadType<ARM11KernelCapabilities>();
|
||||
aci.ARM9AccessControl = ParseARM9AccessControl(data);
|
||||
|
||||
return aci;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an ARM9 access control
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled ARM9 access control on success, null on error</returns>
|
||||
public static ARM9AccessControl? ParseARM9AccessControl(Stream data)
|
||||
{
|
||||
var a9ac = new ARM9AccessControl();
|
||||
|
||||
a9ac.Descriptors = new ARM9AccessControlDescriptors[15];
|
||||
for (int i = 0; i < a9ac.Descriptors.Length; i++)
|
||||
{
|
||||
a9ac.Descriptors[i] = (ARM9AccessControlDescriptors)data.ReadByteValue();
|
||||
}
|
||||
a9ac.DescriptorVersion = data.ReadByteValue();
|
||||
|
||||
return a9ac;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -91,10 +91,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
data.Seek(-directoryName?.Length ?? 0, SeekOrigin.Current);
|
||||
byte[]? endingData = data.ReadBytes((int)(directoryNamesEnd - data.Position));
|
||||
if (endingData != null)
|
||||
directoryName = Encoding.ASCII.GetString(endingData);
|
||||
else
|
||||
directoryName = null;
|
||||
directoryName = endingData != null ? Encoding.ASCII.GetString(endingData) : null;
|
||||
}
|
||||
|
||||
file.DirectoryNames[nameOffset] = directoryName;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.NewExecutable;
|
||||
using static SabreTools.Models.NewExecutable.Constants;
|
||||
@@ -215,9 +214,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<ExecutableHeader>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Magic != SignatureString)
|
||||
if (header?.Magic != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
@@ -303,19 +300,34 @@ namespace SabreTools.Serialization.Deserializers
|
||||
resourceTable.ResourceTypes = [.. resourceTypes];
|
||||
|
||||
// Get the full list of unique string offsets
|
||||
var stringOffsets = resourceTable.ResourceTypes
|
||||
.Where(rt => rt != null)
|
||||
.Where(rt => !rt!.IsIntegerType() && rt!.TypeID != 0)
|
||||
.Select(rt => rt!.TypeID)
|
||||
.Union(resourceTable.ResourceTypes
|
||||
.Where(rt => rt != null && rt!.TypeID != 0)
|
||||
.SelectMany(rt => rt!.Resources ?? [])
|
||||
.Where(r => !r!.IsIntegerType())
|
||||
.Select(r => r!.ResourceID))
|
||||
.Distinct()
|
||||
.Where(o => o != 0)
|
||||
.OrderBy(o => o)
|
||||
.ToList();
|
||||
var stringOffsets = new List<ushort>();
|
||||
foreach (var rtie in resourceTable.ResourceTypes)
|
||||
{
|
||||
// Skip invalid entries
|
||||
if (rtie == null || rtie.TypeID == 0)
|
||||
continue;
|
||||
|
||||
// Handle offset types
|
||||
if (!rtie.IsIntegerType() && !stringOffsets.Contains(rtie.TypeID))
|
||||
stringOffsets.Add(rtie.TypeID);
|
||||
|
||||
// Handle types with resources
|
||||
foreach (var rtre in rtie.Resources ?? [])
|
||||
{
|
||||
// Skip invalid entries
|
||||
if (rtre == null || rtre.IsIntegerType() || rtre.ResourceID == 0)
|
||||
continue;
|
||||
|
||||
// Skip already added entries
|
||||
if (stringOffsets.Contains(rtre.ResourceID))
|
||||
continue;
|
||||
|
||||
stringOffsets.Add(rtre.ResourceID);
|
||||
}
|
||||
}
|
||||
|
||||
// Order the offsets list
|
||||
stringOffsets.Sort();
|
||||
|
||||
// Populate the type and name string dictionary
|
||||
resourceTable.TypeAndNameStrings = [];
|
||||
|
||||
@@ -19,9 +19,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new cart image to fill
|
||||
var cart = new Cart();
|
||||
|
||||
@@ -110,7 +107,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Set the file allocation table
|
||||
cart.FileAllocationTable = fileAllocationTable.ToArray();
|
||||
cart.FileAllocationTable = [.. fileAllocationTable];
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -170,7 +167,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Assign the folder allocation table
|
||||
nameTable.FolderAllocationTable = folderAllocationTable.ToArray();
|
||||
nameTable.FolderAllocationTable = [.. folderAllocationTable];
|
||||
|
||||
// Create a variable-length table
|
||||
var nameList = new List<NameListEntry>();
|
||||
@@ -184,7 +181,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Assign the name list
|
||||
nameTable.NameList = nameList.ToArray();
|
||||
nameTable.NameList = [.. nameList];
|
||||
|
||||
return nameTable;
|
||||
}
|
||||
|
||||
@@ -18,9 +18,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new Half-Life Package to fill
|
||||
var file = new Models.PAK.File();
|
||||
|
||||
@@ -73,9 +70,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != SignatureString)
|
||||
if (header?.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
using System.IO;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.PFF;
|
||||
@@ -20,9 +19,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
@@ -56,7 +52,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var file = ParseSegment(data, header.FileSegmentSize);
|
||||
if (file == null)
|
||||
return null;
|
||||
continue;
|
||||
|
||||
archive.Segments[i] = file;
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
// Assign the units and return
|
||||
di.Units = diUnits.ToArray();
|
||||
di.Units = [.. diUnits];
|
||||
return di;
|
||||
}
|
||||
|
||||
|
||||
@@ -168,7 +168,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
#region Local File
|
||||
|
||||
// Setup all of the collections
|
||||
var localFileHeaders = new List<LocalFileHeader?>();
|
||||
var localFileHeaders = new List<LocalFileHeader>();
|
||||
var encryptionHeaders = new List<byte[]?>();
|
||||
var fileData = new List<byte[]>(); // TODO: Should this data be read here?
|
||||
var dataDescriptors = new List<DataDescriptor?>();
|
||||
@@ -200,7 +200,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (localFileHeader == null)
|
||||
{
|
||||
// Add a placeholder null item
|
||||
localFileHeaders.Add(null);
|
||||
localFileHeaders.Add(new LocalFileHeader());
|
||||
encryptionHeaders.Add(null);
|
||||
fileData.Add([]);
|
||||
dataDescriptors.Add(null);
|
||||
|
||||
@@ -303,7 +303,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
private static UnknownBlock1 ParseUnknownBlock1(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
UnknownBlock1 unknownBlock1 = new UnknownBlock1();
|
||||
var unknownBlock1 = new UnknownBlock1();
|
||||
|
||||
unknownBlock1.Length = data.ReadUInt32();
|
||||
unknownBlock1.Data = data.ReadBytes((int)unknownBlock1.Length);
|
||||
@@ -319,7 +319,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
private static UnknownBlock3 ParseUnknownBlock3(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
UnknownBlock3 unknownBlock3 = new UnknownBlock3();
|
||||
var unknownBlock3 = new UnknownBlock3();
|
||||
|
||||
// No-op because we don't even know the length
|
||||
|
||||
@@ -334,7 +334,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
private static DataFile ParseDataFile(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DataFile dataFile = new DataFile();
|
||||
var dataFile = new DataFile();
|
||||
|
||||
dataFile.FileNameLength = data.ReadUInt16();
|
||||
byte[]? fileName = data.ReadBytes(dataFile.FileNameLength);
|
||||
|
||||
@@ -17,9 +17,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new playlist to fill
|
||||
var playlist = new Playlist();
|
||||
|
||||
@@ -46,7 +43,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
long currentOffset = data.Position;
|
||||
var entryHeader = PlayJAudio.DeserializeStream(data, currentOffset);
|
||||
if (entryHeader == null)
|
||||
return null;
|
||||
continue;
|
||||
|
||||
playlist.AudioFiles[i] = entryHeader;
|
||||
}
|
||||
@@ -64,7 +61,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
private static PlaylistHeader ParsePlaylistHeader(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
PlaylistHeader playlistHeader = new PlaylistHeader();
|
||||
var playlistHeader = new PlaylistHeader();
|
||||
|
||||
playlistHeader.TrackCount = data.ReadUInt32();
|
||||
playlistHeader.Data = data.ReadBytes(52);
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
#if NET35_OR_GREATER || NETCOREAPP
|
||||
using System.Linq;
|
||||
#endif
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.PortableExecutable;
|
||||
@@ -678,7 +680,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
_ = data.ReadByteValue();
|
||||
}
|
||||
|
||||
return attributeCertificateTable.ToArray();
|
||||
return [.. attributeCertificateTable];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -913,7 +915,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
break;
|
||||
}
|
||||
|
||||
importTable.ImportDirectoryTable = importDirectoryTable.ToArray();
|
||||
importTable.ImportDirectoryTable = [.. importDirectoryTable];
|
||||
|
||||
// Names
|
||||
for (int i = 0; i < importTable.ImportDirectoryTable.Length; i++)
|
||||
@@ -981,7 +983,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
break;
|
||||
}
|
||||
|
||||
importLookupTables[i] = entryLookupTable.ToArray();
|
||||
importLookupTables[i] = [.. entryLookupTable];
|
||||
}
|
||||
|
||||
importTable.ImportLookupTables = importLookupTables;
|
||||
@@ -1035,7 +1037,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
break;
|
||||
}
|
||||
|
||||
importAddressTables[i] = addressLookupTable.ToArray();
|
||||
importAddressTables[i] = [.. addressLookupTable];
|
||||
}
|
||||
|
||||
importTable.ImportAddressTables = importAddressTables;
|
||||
@@ -1047,38 +1049,72 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|| importTable.ImportAddressTables != null && importTable.ImportAddressTables.Count > 0)
|
||||
{
|
||||
// Get the addresses of the hint/name table entries
|
||||
List<int> hintNameTableEntryAddresses = new List<int>();
|
||||
var hintNameTableEntryAddresses = new List<int>();
|
||||
|
||||
// If we have import lookup tables
|
||||
if (importTable.ImportLookupTables != null && importLookupTables.Count > 0)
|
||||
{
|
||||
#if NET20
|
||||
var addresses = new List<int>();
|
||||
foreach (var kvp in importTable.ImportLookupTables)
|
||||
{
|
||||
if (kvp.Value == null)
|
||||
continue;
|
||||
|
||||
var vaddrs = Array.ConvertAll(kvp.Value, ilte => ilte == null
|
||||
? 0
|
||||
: (int)ilte.HintNameTableRVA.ConvertVirtualAddress(sections));
|
||||
addresses.AddRange(vaddrs);
|
||||
}
|
||||
#else
|
||||
var addresses = importTable.ImportLookupTables
|
||||
.Where(kvp => kvp.Value != null)
|
||||
.SelectMany(kvp => kvp.Value!)
|
||||
.SelectMany(kvp => kvp.Value ?? [])
|
||||
.Where(ilte => ilte != null)
|
||||
.Select(ilte => (int)ilte!.HintNameTableRVA.ConvertVirtualAddress(sections));
|
||||
#endif
|
||||
hintNameTableEntryAddresses.AddRange(addresses);
|
||||
}
|
||||
|
||||
// If we have import address tables
|
||||
if (importTable.ImportAddressTables != null && importTable.ImportAddressTables.Count > 0)
|
||||
{
|
||||
#if NET20
|
||||
var addresses = new List<int>();
|
||||
foreach (var kvp in importTable.ImportAddressTables)
|
||||
{
|
||||
if (kvp.Value == null)
|
||||
continue;
|
||||
|
||||
var vaddrs = Array.ConvertAll(kvp.Value, iate => iate == null
|
||||
? 0
|
||||
: (int)iate.HintNameTableRVA.ConvertVirtualAddress(sections));
|
||||
addresses.AddRange(vaddrs);
|
||||
}
|
||||
#else
|
||||
var addresses = importTable.ImportAddressTables
|
||||
.Where(kvp => kvp.Value != null)
|
||||
.SelectMany(kvp => kvp.Value!)
|
||||
.SelectMany(kvp => kvp.Value ?? [])
|
||||
.Where(iate => iate != null)
|
||||
.Select(iate => (int)iate!.HintNameTableRVA.ConvertVirtualAddress(sections));
|
||||
#endif
|
||||
hintNameTableEntryAddresses.AddRange(addresses);
|
||||
}
|
||||
|
||||
// Sanitize the addresses
|
||||
hintNameTableEntryAddresses = hintNameTableEntryAddresses.Where(addr => addr != 0)
|
||||
.Distinct()
|
||||
.OrderBy(a => a)
|
||||
.ToList();
|
||||
hintNameTableEntryAddresses = hintNameTableEntryAddresses.FindAll(addr => addr != 0);
|
||||
#if NET20
|
||||
var temp = new List<int>();
|
||||
foreach (int value in hintNameTableEntryAddresses)
|
||||
{
|
||||
if (!temp.Contains(value))
|
||||
temp.Add(value);
|
||||
}
|
||||
#else
|
||||
hintNameTableEntryAddresses = hintNameTableEntryAddresses.Distinct().ToList();
|
||||
#endif
|
||||
hintNameTableEntryAddresses.Sort();
|
||||
|
||||
// If we have any addresses, add them to the table
|
||||
if (hintNameTableEntryAddresses.Any())
|
||||
if (hintNameTableEntryAddresses.Count > 0)
|
||||
{
|
||||
for (int i = 0; i < hintNameTableEntryAddresses.Count; i++)
|
||||
{
|
||||
@@ -1216,11 +1252,12 @@ namespace SabreTools.Serialization.Deserializers
|
||||
return resourceDirectoryTable;
|
||||
|
||||
// If we're not aligned to a section
|
||||
if (!sections.Any(s => s != null && s.PointerToRawData == initialOffset))
|
||||
var firstSection = Array.Find(sections, s => s != null && s.PointerToRawData == initialOffset);
|
||||
if (firstSection == null)
|
||||
return resourceDirectoryTable;
|
||||
|
||||
// Get the section size
|
||||
int size = (int)sections.First(s => s != null && s.PointerToRawData == initialOffset)!.SizeOfRawData;
|
||||
int size = (int)firstSection.SizeOfRawData;
|
||||
|
||||
// Align to the 512-byte boundary, we find the start of an MS-DOS header, or the end of the file
|
||||
while (data.Position - initialOffset < size && data.Position % 0x200 != 0 && data.Position < data.Length - 1)
|
||||
|
||||
@@ -19,9 +19,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new archive to fill
|
||||
var archive = new Archive();
|
||||
|
||||
@@ -40,24 +37,21 @@ namespace SabreTools.Serialization.Deserializers
|
||||
#region File List
|
||||
|
||||
// If we have any files
|
||||
if (header.FileCount > 0)
|
||||
var fileDescriptors = new FileDescriptor[header.FileCount];
|
||||
|
||||
// Read all entries in turn
|
||||
for (int i = 0; i < header.FileCount; i++)
|
||||
{
|
||||
var fileDescriptors = new FileDescriptor[header.FileCount];
|
||||
var file = ParseFileDescriptor(data, header.MinorVersion);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
// Read all entries in turn
|
||||
for (int i = 0; i < header.FileCount; i++)
|
||||
{
|
||||
var file = ParseFileDescriptor(data, header.MinorVersion);
|
||||
if (file == null)
|
||||
return null;
|
||||
|
||||
fileDescriptors[i] = file;
|
||||
}
|
||||
|
||||
// Set the file list
|
||||
archive.FileList = fileDescriptors;
|
||||
fileDescriptors[i] = file;
|
||||
}
|
||||
|
||||
// Set the file list
|
||||
archive.FileList = fileDescriptors;
|
||||
|
||||
#endregion
|
||||
|
||||
// Cache the compressed data offset
|
||||
@@ -75,9 +69,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != SignatureString)
|
||||
if (header?.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Readers;
|
||||
using SabreTools.Models.RomCenter;
|
||||
@@ -25,11 +24,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
// Loop through and parse out the values
|
||||
var roms = new List<Rom>();
|
||||
var additional = new List<string>();
|
||||
var creditsAdditional = new List<string>();
|
||||
var datAdditional = new List<string>();
|
||||
var emulatorAdditional = new List<string>();
|
||||
var gamesAdditional = new List<string>();
|
||||
while (!reader.EndOfStream)
|
||||
{
|
||||
// If we have no next line
|
||||
@@ -57,10 +51,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "games":
|
||||
dat.Games ??= new Games();
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
additional.Add(reader.CurrentLine);
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
@@ -94,10 +84,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "comment":
|
||||
dat.Credits.Comment = reader.KeyValuePair?.Value;
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
creditsAdditional.Add(reader.CurrentLine);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,10 +107,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "merge":
|
||||
dat.Dat.Merge = reader.KeyValuePair?.Value;
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
datAdditional.Add(reader.CurrentLine);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -142,10 +124,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
case "version":
|
||||
dat.Emulator.Version = reader.KeyValuePair?.Value;
|
||||
break;
|
||||
default:
|
||||
if (reader.CurrentLine != null)
|
||||
emulatorAdditional.Add(reader.CurrentLine);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,13 +134,12 @@ namespace SabreTools.Serialization.Deserializers
|
||||
dat.Games ??= new Games();
|
||||
|
||||
// If the line doesn't contain the delimiter
|
||||
#if NETFRAMEWORK
|
||||
if (!(reader.CurrentLine?.Contains("¬") ?? false))
|
||||
#else
|
||||
if (!(reader.CurrentLine?.Contains('¬') ?? false))
|
||||
{
|
||||
if (reader.CurrentLine != null)
|
||||
gamesAdditional.Add(reader.CurrentLine);
|
||||
|
||||
#endif
|
||||
continue;
|
||||
}
|
||||
|
||||
// Otherwise, separate out the line
|
||||
string[] splitLine = reader.CurrentLine.Split('¬');
|
||||
@@ -181,32 +158,14 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// EMPTY = splitLine[10]
|
||||
};
|
||||
|
||||
if (splitLine.Length > 11)
|
||||
rom.ADDITIONAL_ELEMENTS = splitLine.Skip(11).ToArray();
|
||||
|
||||
roms.Add(rom);
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
if (reader.CurrentLine != null)
|
||||
additional.Add(reader.CurrentLine);
|
||||
}
|
||||
}
|
||||
|
||||
// Add extra pieces and return
|
||||
dat.ADDITIONAL_ELEMENTS = additional.Where(s => s != null).ToArray();
|
||||
if (dat.Credits != null)
|
||||
dat.Credits.ADDITIONAL_ELEMENTS = creditsAdditional.Where(s => s != null).ToArray();
|
||||
if (dat.Dat != null)
|
||||
dat.Dat.ADDITIONAL_ELEMENTS = datAdditional.Where(s => s != null).ToArray();
|
||||
if (dat.Emulator != null)
|
||||
dat.Emulator.ADDITIONAL_ELEMENTS = emulatorAdditional.Where(s => s != null).ToArray();
|
||||
if (dat.Games != null)
|
||||
{
|
||||
dat.Games.Rom = roms.ToArray();
|
||||
dat.Games.ADDITIONAL_ELEMENTS = gamesAdditional.Where(s => s != null).Select(s => s).ToArray();
|
||||
}
|
||||
dat.Games.Rom = [.. roms];
|
||||
|
||||
return dat;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,9 +17,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Deserialize the SFB
|
||||
var sfb = data.ReadType<Models.PlayStation3.SFB>();
|
||||
if (sfb == null)
|
||||
|
||||
@@ -17,9 +17,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
int initialOffset = (int)data.Position;
|
||||
|
||||
// Create a new SFO to fill
|
||||
var sfo = new Models.PlayStation3.SFO();
|
||||
|
||||
|
||||
@@ -20,9 +20,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new SGA to fill
|
||||
var file = new Models.SGA.File();
|
||||
|
||||
@@ -78,7 +75,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Versions 4 and 5 share the same header
|
||||
case 4:
|
||||
case 5:
|
||||
Header4 header4 = new Header4();
|
||||
var header4 = new Header4();
|
||||
|
||||
header4.Signature = signature;
|
||||
header4.MajorVersion = majorVersion;
|
||||
@@ -97,7 +94,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Versions 6 and 7 share the same header
|
||||
case 6:
|
||||
case 7:
|
||||
Header6 header6 = new Header6();
|
||||
var header6 = new Header6();
|
||||
|
||||
header6.Signature = signature;
|
||||
header6.MajorVersion = majorVersion;
|
||||
@@ -125,20 +122,24 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA directory on success, null on error</returns>
|
||||
private static Models.SGA.Directory? ParseDirectory(Stream data, ushort majorVersion)
|
||||
{
|
||||
#region Directory
|
||||
|
||||
// Create the appropriate type of directory
|
||||
Models.SGA.Directory directory;
|
||||
switch (majorVersion)
|
||||
return majorVersion switch
|
||||
{
|
||||
case 4: directory = new Directory4(); break;
|
||||
case 5: directory = new Directory5(); break;
|
||||
case 6: directory = new Directory6(); break;
|
||||
case 7: directory = new Directory7(); break;
|
||||
default: return null;
|
||||
}
|
||||
4 => ParseDirectory4(data),
|
||||
5 => ParseDirectory5(data),
|
||||
6 => ParseDirectory6(data),
|
||||
7 => ParseDirectory7(data),
|
||||
_ => null,
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SGA directory on success, null on error</returns>
|
||||
private static Directory4? ParseDirectory4(Stream data)
|
||||
{
|
||||
var directory = new Directory4();
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
@@ -146,36 +147,19 @@ namespace SabreTools.Serialization.Deserializers
|
||||
#region Directory Header
|
||||
|
||||
// Try to parse the directory header
|
||||
var directoryHeader = ParseDirectoryHeader(data, majorVersion);
|
||||
var directoryHeader = ParseDirectory4Header(data);
|
||||
if (directoryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the directory header
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.DirectoryHeader = directoryHeader as DirectoryHeader4; break;
|
||||
case 5: (directory as Directory5)!.DirectoryHeader = directoryHeader as DirectoryHeader5; break;
|
||||
case 6: (directory as Directory6)!.DirectoryHeader = directoryHeader as DirectoryHeader5; break;
|
||||
case 7: (directory as Directory7)!.DirectoryHeader = directoryHeader as DirectoryHeader7; break;
|
||||
default: return null;
|
||||
}
|
||||
directory.DirectoryHeader = directoryHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sections
|
||||
|
||||
// Get the sections offset
|
||||
long sectionOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sectionOffset = (directoryHeader as DirectoryHeader4)!.SectionOffset; break;
|
||||
case 5:
|
||||
case 6: sectionOffset = (directoryHeader as DirectoryHeader5)!.SectionOffset; break;
|
||||
case 7: sectionOffset = (directoryHeader as DirectoryHeader7)!.SectionOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the sections offset based on the directory
|
||||
// Get and adjust the sections offset
|
||||
long sectionOffset = directoryHeader.SectionOffset;
|
||||
sectionOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
@@ -185,67 +169,21 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Seek to the sections
|
||||
data.Seek(sectionOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the section count
|
||||
uint sectionCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sectionCount = (directoryHeader as DirectoryHeader4)!.SectionCount; break;
|
||||
case 5:
|
||||
case 6: sectionCount = (directoryHeader as DirectoryHeader5)!.SectionCount; break;
|
||||
case 7: sectionCount = (directoryHeader as DirectoryHeader7)!.SectionCount; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Create the sections array
|
||||
object[] sections;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sections = new Section4[sectionCount]; break;
|
||||
case 5:
|
||||
case 6:
|
||||
case 7: sections = new Section5[sectionCount]; break;
|
||||
default: return null;
|
||||
}
|
||||
directory.Sections = new Section4[directoryHeader.SectionCount];
|
||||
|
||||
// Try to parse the sections
|
||||
for (int i = 0; i < sections.Length; i++)
|
||||
for (int i = 0; i < directory.Sections.Length; i++)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: sections[i] = ParseSection4(data); break;
|
||||
case 5:
|
||||
case 6:
|
||||
case 7: sections[i] = ParseSection5(data); break;
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Assign the sections
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.Sections = sections as Section4[]; break;
|
||||
case 5: (directory as Directory5)!.Sections = sections as Section5[]; break;
|
||||
case 6: (directory as Directory6)!.Sections = sections as Section5[]; break;
|
||||
case 7: (directory as Directory7)!.Sections = sections as Section5[]; break;
|
||||
default: return null;
|
||||
directory.Sections[i] = ParseSection4(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Folders
|
||||
|
||||
// Get the folders offset
|
||||
long folderOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folderOffset = (directoryHeader as DirectoryHeader4)!.FolderOffset; break;
|
||||
case 5: folderOffset = (directoryHeader as DirectoryHeader5)!.FolderOffset; break;
|
||||
case 6: folderOffset = (directoryHeader as DirectoryHeader5)!.FolderOffset; break;
|
||||
case 7: folderOffset = (directoryHeader as DirectoryHeader7)!.FolderOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the folders offset based on the directory
|
||||
// Get and adjust the folders offset
|
||||
long folderOffset = directoryHeader.FolderOffset;
|
||||
folderOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
@@ -255,67 +193,21 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Seek to the folders
|
||||
data.Seek(folderOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the folder count
|
||||
uint folderCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folderCount = (directoryHeader as DirectoryHeader4)!.FolderCount; break;
|
||||
case 5: folderCount = (directoryHeader as DirectoryHeader5)!.FolderCount; break;
|
||||
case 6: folderCount = (directoryHeader as DirectoryHeader5)!.FolderCount; break;
|
||||
case 7: folderCount = (directoryHeader as DirectoryHeader7)!.FolderCount; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Create the folders array
|
||||
object[] folders;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folders = new Folder4[folderCount]; break;
|
||||
case 5: folders = new Folder5[folderCount]; break;
|
||||
case 6: folders = new Folder5[folderCount]; break;
|
||||
case 7: folders = new Folder5[folderCount]; break;
|
||||
default: return null;
|
||||
}
|
||||
directory.Folders = new Folder4[directoryHeader.FolderCount];
|
||||
|
||||
// Try to parse the folders
|
||||
for (int i = 0; i < folders.Length; i++)
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: folders[i] = ParseFolder4(data); break;
|
||||
case 5: folders[i] = ParseFolder5(data); break;
|
||||
case 6: folders[i] = ParseFolder5(data); break;
|
||||
case 7: folders[i] = ParseFolder5(data); break;
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Assign the folders
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.Folders = folders as Folder4[]; break;
|
||||
case 5: (directory as Directory5)!.Folders = folders as Folder5[]; break;
|
||||
case 6: (directory as Directory6)!.Folders = folders as Folder5[]; break;
|
||||
case 7: (directory as Directory7)!.Folders = folders as Folder5[]; break;
|
||||
default: return null;
|
||||
directory.Folders[i] = ParseFolder4(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Files
|
||||
|
||||
// Get the files offset
|
||||
long fileOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: fileOffset = (directoryHeader as DirectoryHeader4)!.FileOffset; break;
|
||||
case 5: fileOffset = (directoryHeader as DirectoryHeader5)!.FileOffset; break;
|
||||
case 6: fileOffset = (directoryHeader as DirectoryHeader5)!.FileOffset; break;
|
||||
case 7: fileOffset = (directoryHeader as DirectoryHeader7)!.FileOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the files offset based on the directory
|
||||
// Get and adjust the files offset
|
||||
long fileOffset = directoryHeader.FileOffset;
|
||||
fileOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
@@ -326,66 +218,23 @@ namespace SabreTools.Serialization.Deserializers
|
||||
data.Seek(fileOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the file count
|
||||
uint fileCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: fileCount = (directoryHeader as DirectoryHeader4)!.FileCount; break;
|
||||
case 5: fileCount = (directoryHeader as DirectoryHeader5)!.FileCount; break;
|
||||
case 6: fileCount = (directoryHeader as DirectoryHeader5)!.FileCount; break;
|
||||
case 7: fileCount = (directoryHeader as DirectoryHeader7)!.FileCount; break;
|
||||
default: return null;
|
||||
}
|
||||
uint fileCount = directoryHeader.FileCount;
|
||||
|
||||
// Create the files array
|
||||
object[] files;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: files = new File4[fileCount]; break;
|
||||
case 5: files = new File4[fileCount]; break;
|
||||
case 6: files = new File6[fileCount]; break;
|
||||
case 7: files = new File7[fileCount]; break;
|
||||
default: return null;
|
||||
}
|
||||
directory.Files = new File4[fileCount];
|
||||
|
||||
// Try to parse the files
|
||||
for (int i = 0; i < files.Length; i++)
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: files[i] = ParseFile4(data); break;
|
||||
case 5: files[i] = ParseFile4(data); break;
|
||||
case 6: files[i] = ParseFile6(data); break;
|
||||
case 7: files[i] = ParseFile7(data); break;
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Assign the files
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.Files = files as File4[]; break;
|
||||
case 5: (directory as Directory5)!.Files = files as File4[]; break;
|
||||
case 6: (directory as Directory6)!.Files = files as File6[]; break;
|
||||
case 7: (directory as Directory7)!.Files = files as File7[]; break;
|
||||
default: return null;
|
||||
directory.Files[i] = ParseFile4(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region String Table
|
||||
|
||||
// Get the string table offset
|
||||
long stringTableOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: stringTableOffset = (directoryHeader as DirectoryHeader4)!.StringTableOffset; break;
|
||||
case 5: stringTableOffset = (directoryHeader as DirectoryHeader5)!.StringTableOffset; break;
|
||||
case 6: stringTableOffset = (directoryHeader as DirectoryHeader5)!.StringTableOffset; break;
|
||||
case 7: stringTableOffset = (directoryHeader as DirectoryHeader7)!.StringTableOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// Adjust the string table offset based on the directory
|
||||
// Get and adjust the string table offset
|
||||
long stringTableOffset = directoryHeader.StringTableOffset;
|
||||
stringTableOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
@@ -395,87 +244,40 @@ namespace SabreTools.Serialization.Deserializers
|
||||
// Seek to the string table
|
||||
data.Seek(stringTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// Get the string table count
|
||||
uint stringCount;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: stringCount = (directoryHeader as DirectoryHeader4)!.StringTableCount; break;
|
||||
case 5: stringCount = (directoryHeader as DirectoryHeader5)!.StringTableCount; break;
|
||||
case 6: stringCount = (directoryHeader as DirectoryHeader5)!.StringTableCount; break;
|
||||
case 7: stringCount = (directoryHeader as DirectoryHeader7)!.StringTableCount; break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
// TODO: Are these strings actually indexed by number and not position?
|
||||
// TODO: If indexed by position, I think it needs to be adjusted by start of table
|
||||
|
||||
// Create the strings dictionary
|
||||
Dictionary<long, string?> strings = new Dictionary<long, string?>((int)stringCount);
|
||||
directory.StringTable = new Dictionary<long, string?>((int)directoryHeader.StringTableCount);
|
||||
|
||||
// Get the current position to adjust the offsets
|
||||
long stringTableStart = data.Position;
|
||||
|
||||
// Try to parse the strings
|
||||
for (int i = 0; i < stringCount; i++)
|
||||
for (int i = 0; i < directoryHeader.StringTableCount; i++)
|
||||
{
|
||||
long currentPosition = data.Position - stringTableStart;
|
||||
strings[currentPosition] = data.ReadNullTerminatedAnsiString();
|
||||
}
|
||||
|
||||
// Assign the files
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.StringTable = strings; break;
|
||||
case 5: (directory as Directory5)!.StringTable = strings; break;
|
||||
case 6: (directory as Directory6)!.StringTable = strings; break;
|
||||
case 7: (directory as Directory7)!.StringTable = strings; break;
|
||||
default: return null;
|
||||
directory.StringTable[currentPosition] = data.ReadNullTerminatedAnsiString();
|
||||
}
|
||||
|
||||
// Loop through all folders to assign names
|
||||
for (int i = 0; i < folderCount; i++)
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
uint nameOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: nameOffset = (directory as Directory4)!.Folders![i]!.NameOffset; break;
|
||||
case 5: nameOffset = (directory as Directory5)!.Folders![i]!.NameOffset; break;
|
||||
case 6: nameOffset = (directory as Directory6)!.Folders![i]!.NameOffset; break;
|
||||
case 7: nameOffset = (directory as Directory7)!.Folders![i]!.NameOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
var folder = directory.Folders[i];
|
||||
if (folder == null)
|
||||
continue;
|
||||
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.Folders![i]!.Name = strings[nameOffset]; break;
|
||||
case 5: (directory as Directory5)!.Folders![i]!.Name = strings[nameOffset]; break;
|
||||
case 6: (directory as Directory6)!.Folders![i]!.Name = strings[nameOffset]; break;
|
||||
case 7: (directory as Directory7)!.Folders![i]!.Name = strings[nameOffset]; break;
|
||||
default: return null;
|
||||
}
|
||||
folder.Name = directory.StringTable[folder.NameOffset];
|
||||
}
|
||||
|
||||
// Loop through all files to assign names
|
||||
for (int i = 0; i < fileCount; i++)
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
uint nameOffset;
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: nameOffset = (directory as Directory4)!.Files![i]!.NameOffset; break;
|
||||
case 5: nameOffset = (directory as Directory5)!.Files![i]!.NameOffset; break;
|
||||
case 6: nameOffset = (directory as Directory6)!.Files![i]!.NameOffset; break;
|
||||
case 7: nameOffset = (directory as Directory7)!.Files![i]!.NameOffset; break;
|
||||
default: return null;
|
||||
}
|
||||
var file = directory.Files[i];
|
||||
if (file == null)
|
||||
continue;
|
||||
|
||||
switch (majorVersion)
|
||||
{
|
||||
case 4: (directory as Directory4)!.Files![i]!.Name = strings[nameOffset]; break;
|
||||
case 5: (directory as Directory5)!.Files![i]!.Name = strings[nameOffset]; break;
|
||||
case 6: (directory as Directory6)!.Files![i]!.Name = strings[nameOffset]; break;
|
||||
case 7: (directory as Directory7)!.Files![i]!.Name = strings[nameOffset]; break;
|
||||
default: return null;
|
||||
}
|
||||
file.Name = directory.StringTable[file.NameOffset];
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -484,21 +286,453 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory header
|
||||
/// Parse a Stream into an SGA directory
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="majorVersion">SGA major version</param>
|
||||
/// <returns>Filled SGA directory header on success, null on error</returns>
|
||||
private static object? ParseDirectoryHeader(Stream data, ushort majorVersion)
|
||||
/// <returns>Filled SGA directory on success, null on error</returns>
|
||||
private static Directory5? ParseDirectory5(Stream data)
|
||||
{
|
||||
switch (majorVersion)
|
||||
var directory = new Directory5();
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
|
||||
#region Directory Header
|
||||
|
||||
// Try to parse the directory header
|
||||
var directoryHeader = ParseDirectory5Header(data);
|
||||
if (directoryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the directory header
|
||||
directory.DirectoryHeader = directoryHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sections
|
||||
|
||||
// Get and adjust the sections offset
|
||||
long sectionOffset = directoryHeader.SectionOffset;
|
||||
sectionOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (sectionOffset < 0 || sectionOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the sections
|
||||
data.Seek(sectionOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the sections array
|
||||
directory.Sections = new Section5[directoryHeader.SectionCount];
|
||||
|
||||
// Try to parse the sections
|
||||
for (int i = 0; i < directory.Sections.Length; i++)
|
||||
{
|
||||
case 4: return ParseDirectory4Header(data);
|
||||
case 5: return ParseDirectory5Header(data);
|
||||
case 6: return ParseDirectory5Header(data);
|
||||
case 7: return ParseDirectory7Header(data);
|
||||
default: return null;
|
||||
directory.Sections[i] = ParseSection5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Folders
|
||||
|
||||
// Get and adjust the folders offset
|
||||
long folderOffset = directoryHeader.FolderOffset;
|
||||
folderOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (folderOffset < 0 || folderOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the folders
|
||||
data.Seek(folderOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the folders array
|
||||
directory.Folders = new Folder5[directoryHeader.FolderCount];
|
||||
|
||||
// Try to parse the folders
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
directory.Folders[i] = ParseFolder5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Files
|
||||
|
||||
// Get and adjust the files offset
|
||||
long fileOffset = directoryHeader.FileOffset;
|
||||
fileOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (fileOffset < 0 || fileOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the files
|
||||
data.Seek(fileOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the files array
|
||||
directory.Files = new File4[directoryHeader.FileCount];
|
||||
|
||||
// Try to parse the files
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
directory.Files[i] = ParseFile4(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region String Table
|
||||
|
||||
// Get and adjust the string table offset
|
||||
long stringTableOffset = directoryHeader.StringTableOffset;
|
||||
stringTableOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (stringTableOffset < 0 || stringTableOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the string table
|
||||
data.Seek(stringTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// TODO: Are these strings actually indexed by number and not position?
|
||||
// TODO: If indexed by position, I think it needs to be adjusted by start of table
|
||||
|
||||
// Create the strings dictionary
|
||||
directory.StringTable = new Dictionary<long, string?>((int)directoryHeader.StringTableCount);
|
||||
|
||||
// Get the current position to adjust the offsets
|
||||
long stringTableStart = data.Position;
|
||||
|
||||
// Try to parse the strings
|
||||
for (int i = 0; i < directoryHeader.StringTableCount; i++)
|
||||
{
|
||||
long currentPosition = data.Position - stringTableStart;
|
||||
directory.StringTable[currentPosition] = data.ReadNullTerminatedAnsiString();
|
||||
}
|
||||
|
||||
// Loop through all folders to assign names
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
var folder = directory.Folders[i];
|
||||
if (folder == null)
|
||||
continue;
|
||||
|
||||
folder.Name = directory.StringTable[folder.NameOffset];
|
||||
}
|
||||
|
||||
// Loop through all files to assign names
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
var file = directory.Files[i];
|
||||
if (file == null)
|
||||
continue;
|
||||
|
||||
file.Name = directory.StringTable[file.NameOffset];
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return directory;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SGA directory on success, null on error</returns>
|
||||
private static Directory6? ParseDirectory6(Stream data)
|
||||
{
|
||||
var directory = new Directory6();
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
|
||||
#region Directory Header
|
||||
|
||||
// Try to parse the directory header
|
||||
var directoryHeader = ParseDirectory5Header(data);
|
||||
if (directoryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the directory header
|
||||
directory.DirectoryHeader = directoryHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sections
|
||||
|
||||
// Get and adjust the sections offset
|
||||
long sectionOffset = directoryHeader.SectionOffset;
|
||||
sectionOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (sectionOffset < 0 || sectionOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the sections
|
||||
data.Seek(sectionOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the sections array
|
||||
directory.Sections = new Section5[directoryHeader.SectionCount];
|
||||
|
||||
// Try to parse the sections
|
||||
for (int i = 0; i < directory.Sections.Length; i++)
|
||||
{
|
||||
directory.Sections[i] = ParseSection5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Folders
|
||||
|
||||
// Get and adjust the folders offset
|
||||
long folderOffset = directoryHeader.FolderOffset;
|
||||
folderOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (folderOffset < 0 || folderOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the folders
|
||||
data.Seek(folderOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the folders array
|
||||
directory.Folders = new Folder5[directoryHeader.FolderCount];
|
||||
|
||||
// Try to parse the folders
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
directory.Folders[i] = ParseFolder5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Files
|
||||
|
||||
// Get and adjust the files offset
|
||||
long fileOffset = directoryHeader.FileOffset;
|
||||
fileOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (fileOffset < 0 || fileOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the files
|
||||
data.Seek(fileOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the files array
|
||||
directory.Files = new File6[directoryHeader.FileCount];
|
||||
|
||||
// Try to parse the files
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
directory.Files[i] = ParseFile6(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region String Table
|
||||
|
||||
// Get and adjust the string table offset
|
||||
long stringTableOffset = directoryHeader.StringTableOffset;
|
||||
stringTableOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (stringTableOffset < 0 || stringTableOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the string table
|
||||
data.Seek(stringTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// TODO: Are these strings actually indexed by number and not position?
|
||||
// TODO: If indexed by position, I think it needs to be adjusted by start of table
|
||||
|
||||
// Create the strings dictionary
|
||||
directory.StringTable = new Dictionary<long, string?>((int)directoryHeader.StringTableCount);
|
||||
|
||||
// Get the current position to adjust the offsets
|
||||
long stringTableStart = data.Position;
|
||||
|
||||
// Try to parse the strings
|
||||
for (int i = 0; i < directoryHeader.StringTableCount; i++)
|
||||
{
|
||||
long currentPosition = data.Position - stringTableStart;
|
||||
directory.StringTable[currentPosition] = data.ReadNullTerminatedAnsiString();
|
||||
}
|
||||
|
||||
// Loop through all folders to assign names
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
var folder = directory.Folders[i];
|
||||
if (folder == null)
|
||||
continue;
|
||||
|
||||
folder.Name = directory.StringTable[folder.NameOffset];
|
||||
}
|
||||
|
||||
// Loop through all files to assign names
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
var file = directory.Files[i];
|
||||
if (file == null)
|
||||
continue;
|
||||
|
||||
file.Name = directory.StringTable[file.NameOffset];
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return directory;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into an SGA directory
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled SGA directory on success, null on error</returns>
|
||||
private static Directory7? ParseDirectory7(Stream data)
|
||||
{
|
||||
var directory = new Directory7();
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
|
||||
#region Directory Header
|
||||
|
||||
// Try to parse the directory header
|
||||
var directoryHeader = ParseDirectory7Header(data);
|
||||
if (directoryHeader == null)
|
||||
return null;
|
||||
|
||||
// Set the directory header
|
||||
directory.DirectoryHeader = directoryHeader;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sections
|
||||
|
||||
// Get and adjust the sections offset
|
||||
long sectionOffset = directoryHeader.SectionOffset;
|
||||
sectionOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (sectionOffset < 0 || sectionOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the sections
|
||||
data.Seek(sectionOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the sections array
|
||||
directory.Sections = new Section5[directoryHeader.SectionCount];
|
||||
|
||||
// Try to parse the sections
|
||||
for (int i = 0; i < directory.Sections.Length; i++)
|
||||
{
|
||||
directory.Sections[i] = ParseSection5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Folders
|
||||
|
||||
// Get and adjust the folders offset
|
||||
long folderOffset = directoryHeader.FolderOffset;
|
||||
folderOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (folderOffset < 0 || folderOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the folders
|
||||
data.Seek(folderOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the folders array
|
||||
directory.Folders = new Folder5[directoryHeader.FolderCount];
|
||||
|
||||
// Try to parse the folders
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
directory.Folders[i] = ParseFolder5(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Files
|
||||
|
||||
// Get and adjust the files offset
|
||||
long fileOffset = directoryHeader.FileOffset;
|
||||
fileOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (fileOffset < 0 || fileOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the files
|
||||
data.Seek(fileOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the files array
|
||||
directory.Files = new File7[directoryHeader.FileCount];
|
||||
|
||||
// Try to parse the files
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
directory.Files[i] = ParseFile7(data);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region String Table
|
||||
|
||||
// Get and adjust the string table offset
|
||||
long stringTableOffset = directoryHeader.StringTableOffset;
|
||||
stringTableOffset += currentOffset;
|
||||
|
||||
// Validate the offset
|
||||
if (stringTableOffset < 0 || stringTableOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the string table
|
||||
data.Seek(stringTableOffset, SeekOrigin.Begin);
|
||||
|
||||
// TODO: Are these strings actually indexed by number and not position?
|
||||
// TODO: If indexed by position, I think it needs to be adjusted by start of table
|
||||
|
||||
// Create the strings dictionary
|
||||
directory.StringTable = new Dictionary<long, string?>((int)directoryHeader.StringTableCount);
|
||||
|
||||
// Get the current position to adjust the offsets
|
||||
long stringTableStart = data.Position;
|
||||
|
||||
// Try to parse the strings
|
||||
for (int i = 0; i < directoryHeader.StringTableCount; i++)
|
||||
{
|
||||
long currentPosition = data.Position - stringTableStart;
|
||||
directory.StringTable[currentPosition] = data.ReadNullTerminatedAnsiString();
|
||||
}
|
||||
|
||||
// Loop through all folders to assign names
|
||||
for (int i = 0; i < directory.Folders.Length; i++)
|
||||
{
|
||||
var folder = directory.Folders[i];
|
||||
if (folder == null)
|
||||
continue;
|
||||
|
||||
folder.Name = directory.StringTable[folder.NameOffset];
|
||||
}
|
||||
|
||||
// Loop through all files to assign names
|
||||
for (int i = 0; i < directory.Files.Length; i++)
|
||||
{
|
||||
var file = directory.Files[i];
|
||||
if (file == null)
|
||||
continue;
|
||||
|
||||
file.Name = directory.StringTable[file.NameOffset];
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return directory;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -508,7 +742,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA directory header version 4 on success, null on error</returns>
|
||||
private static DirectoryHeader4 ParseDirectory4Header(Stream data)
|
||||
{
|
||||
DirectoryHeader4 directoryHeader4 = new DirectoryHeader4();
|
||||
var directoryHeader4 = new DirectoryHeader4();
|
||||
|
||||
directoryHeader4.SectionOffset = data.ReadUInt32();
|
||||
directoryHeader4.SectionCount = data.ReadUInt16();
|
||||
@@ -529,7 +763,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA directory header version 5 on success, null on error</returns>
|
||||
private static DirectoryHeader5 ParseDirectory5Header(Stream data)
|
||||
{
|
||||
DirectoryHeader5 directoryHeader5 = new DirectoryHeader5();
|
||||
var directoryHeader5 = new DirectoryHeader5();
|
||||
|
||||
directoryHeader5.SectionOffset = data.ReadUInt32();
|
||||
directoryHeader5.SectionCount = data.ReadUInt32();
|
||||
@@ -550,7 +784,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA directory header version 7 on success, null on error</returns>
|
||||
private static DirectoryHeader7 ParseDirectory7Header(Stream data)
|
||||
{
|
||||
DirectoryHeader7 directoryHeader7 = new DirectoryHeader7();
|
||||
var directoryHeader7 = new DirectoryHeader7();
|
||||
|
||||
directoryHeader7.SectionOffset = data.ReadUInt32();
|
||||
directoryHeader7.SectionCount = data.ReadUInt32();
|
||||
@@ -574,7 +808,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA section version 4 on success, null on error</returns>
|
||||
private static Section4 ParseSection4(Stream data)
|
||||
{
|
||||
Section4 section4 = new Section4();
|
||||
var section4 = new Section4();
|
||||
|
||||
byte[]? section4Alias = data.ReadBytes(64);
|
||||
if (section4Alias != null)
|
||||
@@ -599,7 +833,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA section version 5 on success, null on error</returns>
|
||||
private static Section5 ParseSection5(Stream data)
|
||||
{
|
||||
Section5 section5 = new Section5();
|
||||
var section5 = new Section5();
|
||||
|
||||
byte[]? section5Alias = data.ReadBytes(64);
|
||||
if (section5Alias != null)
|
||||
@@ -624,7 +858,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA folder version 4 on success, null on error</returns>
|
||||
private static Folder4 ParseFolder4(Stream data)
|
||||
{
|
||||
Folder4 folder4 = new Folder4();
|
||||
var folder4 = new Folder4();
|
||||
|
||||
folder4.NameOffset = data.ReadUInt32();
|
||||
folder4.Name = null; // Read from string table
|
||||
@@ -644,7 +878,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA folder version 5 on success, null on error</returns>
|
||||
private static Folder5 ParseFolder5(Stream data)
|
||||
{
|
||||
Folder5 folder5 = new Folder5();
|
||||
var folder5 = new Folder5();
|
||||
|
||||
folder5.NameOffset = data.ReadUInt32();
|
||||
folder5.Name = null; // Read from string table
|
||||
@@ -664,7 +898,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA file version 4 on success, null on error</returns>
|
||||
private static File4 ParseFile4(Stream data)
|
||||
{
|
||||
File4 file4 = new File4();
|
||||
var file4 = new File4();
|
||||
|
||||
file4.NameOffset = data.ReadUInt32();
|
||||
file4.Name = null; // Read from string table
|
||||
@@ -686,7 +920,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA file version 6 on success, null on error</returns>
|
||||
private static File6 ParseFile6(Stream data)
|
||||
{
|
||||
File6 file6 = new File6();
|
||||
var file6 = new File6();
|
||||
|
||||
file6.NameOffset = data.ReadUInt32();
|
||||
file6.Name = null; // Read from string table
|
||||
@@ -709,7 +943,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
/// <returns>Filled SGA file version 7 on success, null on error</returns>
|
||||
private static File7 ParseFile7(Stream data)
|
||||
{
|
||||
File7 file7 = new File7();
|
||||
var file7 = new File7();
|
||||
|
||||
file7.NameOffset = data.ReadUInt32();
|
||||
file7.Name = null; // Read from string table
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Readers;
|
||||
using SabreTools.Models.SeparatedValue;
|
||||
@@ -104,7 +103,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (!reader.ReadHeader() || reader.HeaderValues == null)
|
||||
return null;
|
||||
|
||||
dat.Header = reader.HeaderValues.ToArray();
|
||||
dat.Header = [.. reader.HeaderValues];
|
||||
|
||||
// Loop through the rows and parse out values
|
||||
var rows = new List<Row>();
|
||||
@@ -135,10 +134,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
SHA256 = reader.Line[12],
|
||||
Status = reader.Line[13],
|
||||
};
|
||||
|
||||
// If we have additional fields
|
||||
if (reader.Line.Count > HeaderWithoutExtendedHashesCount)
|
||||
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithoutExtendedHashesCount).ToArray();
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -162,16 +157,12 @@ namespace SabreTools.Serialization.Deserializers
|
||||
SpamSum = reader.Line[15],
|
||||
Status = reader.Line[16],
|
||||
};
|
||||
|
||||
// If we have additional fields
|
||||
if (reader.Line.Count > HeaderWithExtendedHashesCount)
|
||||
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithExtendedHashesCount).ToArray();
|
||||
}
|
||||
rows.Add(row);
|
||||
}
|
||||
|
||||
// Assign the rows to the Dat and return
|
||||
dat.Row = rows.ToArray();
|
||||
dat.Row = [.. rows];
|
||||
return dat;
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -40,7 +40,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
#region Extended Header
|
||||
|
||||
if (header?.Version == 2)
|
||||
if (header.Version == 2)
|
||||
{
|
||||
// Try to parse the extended header
|
||||
var extendedHeader = ParseExtendedHeader(data);
|
||||
@@ -69,8 +69,8 @@ namespace SabreTools.Serialization.Deserializers
|
||||
|
||||
if (header?.Version == 2
|
||||
&& file.ExtendedHeader != null
|
||||
&& file.ExtendedHeader.ArchiveHashLength > 0
|
||||
&& data.Position + file.ExtendedHeader.ArchiveHashLength <= data.Length)
|
||||
&& file.ExtendedHeader.ArchiveMD5SectionSize > 0
|
||||
&& data.Position + file.ExtendedHeader.ArchiveMD5SectionSize <= data.Length)
|
||||
{
|
||||
// Create the archive hashes list
|
||||
var archiveHashes = new List<ArchiveHash>();
|
||||
@@ -79,7 +79,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
initialOffset = data.Position;
|
||||
|
||||
// Try to parse the directory items
|
||||
while (data.Position < initialOffset + file.ExtendedHeader.ArchiveHashLength)
|
||||
while (data.Position < initialOffset + file.ExtendedHeader.ArchiveMD5SectionSize)
|
||||
{
|
||||
var archiveHash = ParseArchiveHash(data);
|
||||
if (archiveHash == null)
|
||||
@@ -195,7 +195,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
}
|
||||
}
|
||||
|
||||
return directoryItems.ToArray();
|
||||
return [.. directoryItems];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,226 +0,0 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.WAD;
|
||||
using static SabreTools.Models.WAD.Constants;
|
||||
|
||||
namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
public class WAD : BaseBinaryDeserializer<Models.WAD.File>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public override Models.WAD.File? Deserialize(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new Half-Life Texture Package to fill
|
||||
var file = new Models.WAD.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the package header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Lumps
|
||||
|
||||
// Get the lump offset
|
||||
uint lumpOffset = header.LumpOffset;
|
||||
if (lumpOffset < 0 || lumpOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the lump offset
|
||||
data.Seek(lumpOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the lump array
|
||||
file.Lumps = new Lump[header.LumpCount];
|
||||
for (int i = 0; i < header.LumpCount; i++)
|
||||
{
|
||||
var lump = ParseLump(data);
|
||||
if (lump == null)
|
||||
return null;
|
||||
|
||||
file.Lumps[i] = lump;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Lump Infos
|
||||
|
||||
// Create the lump info array
|
||||
file.LumpInfos = new LumpInfo?[header.LumpCount];
|
||||
for (int i = 0; i < header.LumpCount; i++)
|
||||
{
|
||||
var lump = file.Lumps[i];
|
||||
if (lump == null)
|
||||
{
|
||||
file.LumpInfos[i] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (lump.Compression != 0)
|
||||
{
|
||||
file.LumpInfos[i] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the lump info offset
|
||||
uint lumpInfoOffset = lump.Offset;
|
||||
if (lumpInfoOffset < 0 || lumpInfoOffset >= data.Length)
|
||||
{
|
||||
file.LumpInfos[i] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Seek to the lump info offset
|
||||
data.Seek(lumpInfoOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the lump info -- TODO: Do we ever set the mipmap level?
|
||||
var lumpInfo = ParseLumpInfo(data, lump.Type);
|
||||
file.LumpInfos[i] = lumpInfo;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package header on success, null on error</returns>
|
||||
private static Header? ParseHeader(Stream data)
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package lump
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package lump on success, null on error</returns>
|
||||
private static Lump? ParseLump(Stream data)
|
||||
{
|
||||
return data.ReadType<Lump>();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package lump info
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="type">Lump type</param>
|
||||
/// <param name="mipmap">Mipmap level</param>
|
||||
/// <returns>Filled Half-Life Texture Package lump info on success, null on error</returns>
|
||||
private static LumpInfo? ParseLumpInfo(Stream data, byte type, uint mipmap = 0)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
LumpInfo lumpInfo = new LumpInfo();
|
||||
|
||||
// Cache the initial offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Type 0x42 has no name, type 0x43 does. Are these flags?
|
||||
if (type == 0x42)
|
||||
{
|
||||
if (mipmap > 0)
|
||||
return null;
|
||||
|
||||
lumpInfo.Width = data.ReadUInt32();
|
||||
lumpInfo.Height = data.ReadUInt32();
|
||||
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
|
||||
lumpInfo.PaletteSize = data.ReadUInt16();
|
||||
}
|
||||
else if (type == 0x43)
|
||||
{
|
||||
if (mipmap > 3)
|
||||
return null;
|
||||
|
||||
byte[]? name = data.ReadBytes(16);
|
||||
if (name != null)
|
||||
lumpInfo.Name = Encoding.ASCII.GetString(name);
|
||||
lumpInfo.Width = data.ReadUInt32();
|
||||
lumpInfo.Height = data.ReadUInt32();
|
||||
lumpInfo.PixelOffset = data.ReadUInt32();
|
||||
lumpInfo.UnknownData = data.ReadBytes(12);
|
||||
|
||||
// Cache the current offset
|
||||
long currentOffset = data.Position;
|
||||
|
||||
// Seek to the pixel data
|
||||
data.Seek(initialOffset + lumpInfo.PixelOffset, SeekOrigin.Begin);
|
||||
|
||||
// Read the pixel data
|
||||
lumpInfo.PixelData = data.ReadBytes((int)(lumpInfo.Width * lumpInfo.Height));
|
||||
|
||||
// Seek back to the offset
|
||||
data.Seek(currentOffset, SeekOrigin.Begin);
|
||||
|
||||
uint pixelSize = lumpInfo.Width * lumpInfo.Height;
|
||||
|
||||
// Mipmap data -- TODO: How do we determine this during initial parsing?
|
||||
switch (mipmap)
|
||||
{
|
||||
case 1: _ = data.ReadBytes((int)pixelSize); break;
|
||||
case 2: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4))); break;
|
||||
case 3: _ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16))); break;
|
||||
default: return null;
|
||||
}
|
||||
|
||||
_ = data.ReadBytes((int)(pixelSize + (pixelSize / 4) + (pixelSize / 16) + (pixelSize / 64))); // Pixel data
|
||||
lumpInfo.PaletteSize = data.ReadUInt16();
|
||||
lumpInfo.PaletteData = data.ReadBytes((int)lumpInfo.PaletteSize * 3);
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Adjust based on mipmap level
|
||||
switch (mipmap)
|
||||
{
|
||||
case 1:
|
||||
lumpInfo.Width /= 2;
|
||||
lumpInfo.Height /= 2;
|
||||
break;
|
||||
|
||||
case 2:
|
||||
lumpInfo.Width /= 4;
|
||||
lumpInfo.Height /= 4;
|
||||
break;
|
||||
|
||||
case 3:
|
||||
lumpInfo.Width /= 8;
|
||||
lumpInfo.Height /= 8;
|
||||
break;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
return lumpInfo;
|
||||
}
|
||||
}
|
||||
}
|
||||
263
SabreTools.Serialization/Deserializers/WAD3.cs
Normal file
263
SabreTools.Serialization/Deserializers/WAD3.cs
Normal file
@@ -0,0 +1,263 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.WAD3;
|
||||
using static SabreTools.Models.WAD3.Constants;
|
||||
|
||||
namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
public class WAD3 : BaseBinaryDeserializer<Models.WAD3.File>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public override Models.WAD3.File? Deserialize(Stream? data)
|
||||
{
|
||||
// If the data is invalid
|
||||
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
|
||||
return null;
|
||||
|
||||
// If the offset is out of bounds
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Create a new Half-Life Texture Package to fill
|
||||
var file = new Models.WAD3.File();
|
||||
|
||||
#region Header
|
||||
|
||||
// Try to parse the header
|
||||
var header = ParseHeader(data);
|
||||
if (header == null)
|
||||
return null;
|
||||
|
||||
// Set the package header
|
||||
file.Header = header;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Directory Entries
|
||||
|
||||
// Get the directory offset
|
||||
uint dirOffset = header.DirOffset;
|
||||
if (dirOffset < 0 || dirOffset >= data.Length)
|
||||
return null;
|
||||
|
||||
// Seek to the lump offset
|
||||
data.Seek(dirOffset, SeekOrigin.Begin);
|
||||
|
||||
// Create the lump array
|
||||
file.DirEntries = new DirEntry[header.NumDirs];
|
||||
for (int i = 0; i < header.NumDirs; i++)
|
||||
{
|
||||
var lump = ParseDirEntry(data);
|
||||
if (lump == null)
|
||||
return null;
|
||||
|
||||
file.DirEntries[i] = lump;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region File Entries
|
||||
|
||||
// Create the file entry array
|
||||
file.FileEntries = new FileEntry?[header.NumDirs];
|
||||
for (int i = 0; i < header.NumDirs; i++)
|
||||
{
|
||||
var dirEntry = file.DirEntries[i];
|
||||
if (dirEntry == null)
|
||||
continue;
|
||||
|
||||
// TODO: Handle compressed entries
|
||||
if (dirEntry.Compression != 0)
|
||||
continue;
|
||||
|
||||
// Get the file entry offset
|
||||
uint fileEntryOffset = dirEntry.Offset;
|
||||
if (fileEntryOffset < 0 || fileEntryOffset >= data.Length)
|
||||
continue;
|
||||
|
||||
// Seek to the file entry offset
|
||||
data.Seek(fileEntryOffset, SeekOrigin.Begin);
|
||||
|
||||
// Try to parse the file entry
|
||||
var fileEntry = ParseFileEntry(data, dirEntry.Type);
|
||||
if (fileEntry != null)
|
||||
file.FileEntries[i] = fileEntry;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package header
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package header on success, null on error</returns>
|
||||
private static Header? ParseHeader(Stream data)
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != SignatureString)
|
||||
return null;
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package directory entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package directory entry on success, null on error</returns>
|
||||
private static DirEntry? ParseDirEntry(Stream data)
|
||||
{
|
||||
return data.ReadType<DirEntry>();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package file entry
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <param name="type">File entry type</param>
|
||||
/// <returns>Filled Half-Life Texture Package file entry on success, null on error</returns>
|
||||
private static FileEntry? ParseFileEntry(Stream data, FileType type)
|
||||
{
|
||||
return type switch
|
||||
{
|
||||
FileType.Spraydecal
|
||||
or FileType.Miptex => ParseMipTex(data),
|
||||
FileType.Qpic => ParseQpicImage(data),
|
||||
FileType.Font => ParseFont(data),
|
||||
_ => null,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package MipTex
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package MipTex on success, null on error</returns>
|
||||
private static MipTex ParseMipTex(Stream data)
|
||||
{
|
||||
var miptex = new MipTex();
|
||||
|
||||
byte[] nameBytes = data.ReadBytes(16);
|
||||
miptex.Name = Encoding.ASCII.GetString(nameBytes).TrimEnd('\0');
|
||||
miptex.Width = data.ReadUInt32();
|
||||
miptex.Height = data.ReadUInt32();
|
||||
miptex.MipOffsets = new uint[4];
|
||||
for (int i = 0; i < miptex.MipOffsets.Length; i++)
|
||||
{
|
||||
miptex.MipOffsets[i] = data.ReadUInt32();
|
||||
}
|
||||
miptex.MipImages = new MipMap[4];
|
||||
for (int i = 0; i < miptex.MipImages.Length; i++)
|
||||
{
|
||||
miptex.MipImages[i] = ParseMipMap(data, miptex.Width, miptex.Height);
|
||||
}
|
||||
miptex.ColorsUsed = data.ReadUInt16();
|
||||
miptex.Palette = new byte[miptex.ColorsUsed][];
|
||||
for (int i = 0; i < miptex.ColorsUsed; i++)
|
||||
{
|
||||
miptex.Palette[i] = data.ReadBytes(3);
|
||||
}
|
||||
|
||||
return miptex;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package MipMap
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package MipMap on success, null on error</returns>
|
||||
private static MipMap ParseMipMap(Stream data, uint width, uint height)
|
||||
{
|
||||
var mipmap = new MipMap();
|
||||
|
||||
mipmap.Data = new byte[width][];
|
||||
for (int i = 0; i < width; i++)
|
||||
{
|
||||
mipmap.Data[i] = data.ReadBytes((int)height);
|
||||
}
|
||||
|
||||
return mipmap;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package Qpic image
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package Qpic image on success, null on error</returns>
|
||||
private static QpicImage ParseQpicImage(Stream data)
|
||||
{
|
||||
var qpic = new QpicImage();
|
||||
|
||||
qpic.Width = data.ReadUInt32();
|
||||
qpic.Height = data.ReadUInt32();
|
||||
qpic.Data = new byte[qpic.Height][];
|
||||
for (int i = 0; i < qpic.Height; i++)
|
||||
{
|
||||
qpic.Data[i] = data.ReadBytes((int)qpic.Width);
|
||||
}
|
||||
qpic.ColorsUsed = data.ReadUInt16();
|
||||
qpic.Palette = new byte[qpic.ColorsUsed][];
|
||||
for (int i = 0; i < qpic.ColorsUsed; i++)
|
||||
{
|
||||
qpic.Palette[i] = data.ReadBytes(3);
|
||||
}
|
||||
|
||||
return qpic;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package font
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package font on success, null on error</returns>
|
||||
private static Font ParseFont(Stream data)
|
||||
{
|
||||
var font = new Font();
|
||||
|
||||
font.Width = data.ReadUInt32();
|
||||
font.Height = data.ReadUInt32();
|
||||
font.RowCount = data.ReadUInt32();
|
||||
font.RowHeight = data.ReadUInt32();
|
||||
font.FontInfo = new CharInfo[256];
|
||||
for (int i = 0; i < font.FontInfo.Length; i++)
|
||||
{
|
||||
font.FontInfo[i] = ParseCharInfo(data);
|
||||
}
|
||||
font.Data = new byte[font.Height][];
|
||||
for (int i = 0; i < font.Height; i++)
|
||||
{
|
||||
font.Data[i] = data.ReadBytes((int)font.Width);
|
||||
}
|
||||
font.ColorsUsed = data.ReadUInt16();
|
||||
font.Palette = new byte[font.ColorsUsed][];
|
||||
for (int i = 0; i < font.ColorsUsed; i++)
|
||||
{
|
||||
font.Palette[i] = data.ReadBytes(3);
|
||||
}
|
||||
|
||||
return font;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Stream into a Half-Life Texture Package CharInfo
|
||||
/// </summary>
|
||||
/// <param name="data">Stream to parse</param>
|
||||
/// <returns>Filled Half-Life Texture Package CharInfo on success, null on error</returns>
|
||||
private static CharInfo ParseCharInfo(Stream data)
|
||||
{
|
||||
var charinfo = new CharInfo();
|
||||
|
||||
charinfo.StartOffset = data.ReadUInt16();
|
||||
charinfo.CharWidth = data.ReadUInt16();
|
||||
|
||||
return charinfo;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.XZP;
|
||||
using static SabreTools.Models.XZP.Constants;
|
||||
@@ -19,9 +18,6 @@ namespace SabreTools.Serialization.Deserializers
|
||||
if (data.Position < 0 || data.Position >= data.Length)
|
||||
return null;
|
||||
|
||||
// Cache the current offset
|
||||
long initialOffset = data.Position;
|
||||
|
||||
// Create a new XBox Package File to fill
|
||||
var file = new Models.XZP.File();
|
||||
|
||||
@@ -43,11 +39,11 @@ namespace SabreTools.Serialization.Deserializers
|
||||
file.DirectoryEntries = new DirectoryEntry[header.DirectoryEntryCount];
|
||||
|
||||
// Try to parse the directory entries
|
||||
for (int i = 0; i < header.DirectoryEntryCount; i++)
|
||||
for (int i = 0; i < file.DirectoryEntries.Length; i++)
|
||||
{
|
||||
var directoryEntry = ParseDirectoryEntry(data);
|
||||
if (directoryEntry == null)
|
||||
return null;
|
||||
continue;
|
||||
|
||||
file.DirectoryEntries[i] = directoryEntry;
|
||||
}
|
||||
@@ -62,11 +58,11 @@ namespace SabreTools.Serialization.Deserializers
|
||||
file.PreloadDirectoryEntries = new DirectoryEntry[header.PreloadDirectoryEntryCount];
|
||||
|
||||
// Try to parse the preload directory entries
|
||||
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
|
||||
for (int i = 0; i < file.PreloadDirectoryEntries.Length; i++)
|
||||
{
|
||||
var directoryEntry = ParseDirectoryEntry(data);
|
||||
if (directoryEntry == null)
|
||||
return null;
|
||||
continue;
|
||||
|
||||
file.PreloadDirectoryEntries[i] = directoryEntry;
|
||||
}
|
||||
@@ -82,11 +78,11 @@ namespace SabreTools.Serialization.Deserializers
|
||||
file.PreloadDirectoryMappings = new DirectoryMapping[header.PreloadDirectoryEntryCount];
|
||||
|
||||
// Try to parse the preload directory mappings
|
||||
for (int i = 0; i < header.PreloadDirectoryEntryCount; i++)
|
||||
for (int i = 0; i < file.PreloadDirectoryMappings.Length; i++)
|
||||
{
|
||||
var directoryMapping = ParseDirectoryMapping(data);
|
||||
if (directoryMapping == null)
|
||||
return null;
|
||||
continue;
|
||||
|
||||
file.PreloadDirectoryMappings[i] = directoryMapping;
|
||||
}
|
||||
@@ -110,7 +106,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
file.DirectoryItems = new DirectoryItem[header.DirectoryItemCount];
|
||||
|
||||
// Try to parse the directory items
|
||||
for (int i = 0; i < header.DirectoryItemCount; i++)
|
||||
for (int i = 0; i < file.DirectoryItems.Length; i++)
|
||||
{
|
||||
var directoryItem = ParseDirectoryItem(data);
|
||||
file.DirectoryItems[i] = directoryItem;
|
||||
@@ -146,9 +142,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var header = data.ReadType<Header>();
|
||||
|
||||
if (header == null)
|
||||
return null;
|
||||
if (header.Signature != HeaderSignatureString)
|
||||
if (header?.Signature != HeaderSignatureString)
|
||||
return null;
|
||||
if (header.Version != 6)
|
||||
return null;
|
||||
@@ -184,7 +178,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
private static DirectoryItem ParseDirectoryItem(Stream data)
|
||||
{
|
||||
// TODO: Use marshalling here instead of building
|
||||
DirectoryItem directoryItem = new DirectoryItem();
|
||||
var directoryItem = new DirectoryItem();
|
||||
|
||||
directoryItem.FileNameCRC = data.ReadUInt32();
|
||||
directoryItem.NameOffset = data.ReadUInt32();
|
||||
@@ -214,9 +208,7 @@ namespace SabreTools.Serialization.Deserializers
|
||||
{
|
||||
var footer = data.ReadType<Footer>();
|
||||
|
||||
if (footer == null)
|
||||
return null;
|
||||
if (footer.Signature != FooterSignatureString)
|
||||
if (footer?.Signature != FooterSignatureString)
|
||||
return null;
|
||||
|
||||
return footer;
|
||||
|
||||
9
SabreTools.Serialization/ExtensionAttribute.cs
Normal file
9
SabreTools.Serialization/ExtensionAttribute.cs
Normal file
@@ -0,0 +1,9 @@
|
||||
#if NET20
|
||||
|
||||
namespace System.Runtime.CompilerServices
|
||||
{
|
||||
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Method)]
|
||||
internal sealed class ExtensionAttribute : Attribute {}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,7 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Xml.Serialization;
|
||||
using SabreTools.IO.Extensions;
|
||||
@@ -28,7 +27,7 @@ namespace SabreTools.Serialization
|
||||
return 0;
|
||||
|
||||
// If the RVA matches a section start exactly, use that
|
||||
var matchingSection = sections.FirstOrDefault(s => s != null && s.VirtualAddress == rva);
|
||||
var matchingSection = Array.Find(sections, s => s != null && s.VirtualAddress == rva);
|
||||
if (matchingSection != null)
|
||||
return rva - matchingSection.VirtualAddress + matchingSection.PointerToRawData;
|
||||
|
||||
@@ -165,7 +164,9 @@ namespace SabreTools.Serialization
|
||||
if (string.IsNullOrEmpty(addD.Version))
|
||||
offset = originalOffset + 0x10;
|
||||
|
||||
addD.Build = data.ReadBytes(ref offset, 4)?.Select(b => (char)b)?.ToArray();
|
||||
var buildBytes = data.ReadBytes(ref offset, 4);
|
||||
var buildChars = Array.ConvertAll(buildBytes, b => (char)b);
|
||||
addD.Build = buildChars;
|
||||
|
||||
// Distinguish between v1 and v2
|
||||
int bytesToRead = 112; // v2
|
||||
|
||||
@@ -39,6 +39,7 @@ namespace SabreTools.Serialization
|
||||
Wrapper.BFPK item => item.PrettyPrint(),
|
||||
Wrapper.BSP item => item.PrettyPrint(),
|
||||
Wrapper.CFB item => item.PrettyPrint(),
|
||||
Wrapper.CHD item => item.PrettyPrint(),
|
||||
Wrapper.CIA item => item.PrettyPrint(),
|
||||
Wrapper.GCF item => item.PrettyPrint(),
|
||||
Wrapper.InstallShieldCabinet item => item.PrettyPrint(),
|
||||
@@ -62,7 +63,7 @@ namespace SabreTools.Serialization
|
||||
Wrapper.SGA item => item.PrettyPrint(),
|
||||
Wrapper.VBSP item => item.PrettyPrint(),
|
||||
Wrapper.VPK item => item.PrettyPrint(),
|
||||
Wrapper.WAD item => item.PrettyPrint(),
|
||||
Wrapper.WAD3 item => item.PrettyPrint(),
|
||||
Wrapper.XeMID item => item.PrettyPrint(),
|
||||
Wrapper.XMID item => item.PrettyPrint(),
|
||||
Wrapper.XZP item => item.PrettyPrint(),
|
||||
@@ -83,6 +84,7 @@ namespace SabreTools.Serialization
|
||||
Wrapper.BFPK item => item.ExportJSON(),
|
||||
Wrapper.BSP item => item.ExportJSON(),
|
||||
Wrapper.CFB item => item.ExportJSON(),
|
||||
Wrapper.CHD item => item.ExportJSON(),
|
||||
Wrapper.CIA item => item.ExportJSON(),
|
||||
Wrapper.GCF item => item.ExportJSON(),
|
||||
Wrapper.InstallShieldCabinet item => item.ExportJSON(),
|
||||
@@ -106,7 +108,7 @@ namespace SabreTools.Serialization
|
||||
Wrapper.SGA item => item.ExportJSON(),
|
||||
Wrapper.VBSP item => item.ExportJSON(),
|
||||
Wrapper.VPK item => item.ExportJSON(),
|
||||
Wrapper.WAD item => item.ExportJSON(),
|
||||
Wrapper.WAD3 item => item.ExportJSON(),
|
||||
Wrapper.XeMID item => item.ExportJSON(),
|
||||
Wrapper.XMID item => item.ExportJSON(),
|
||||
Wrapper.XZP item => item.ExportJSON(),
|
||||
@@ -167,6 +169,16 @@ namespace SabreTools.Serialization
|
||||
return builder;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
private static StringBuilder PrettyPrint(this Wrapper.CHD item)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
CHD.Print(builder, item.Model);
|
||||
return builder;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
@@ -316,7 +328,7 @@ namespace SabreTools.Serialization
|
||||
PIC.Print(builder, item.Model);
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
@@ -400,10 +412,10 @@ namespace SabreTools.Serialization
|
||||
/// <summary>
|
||||
/// Export the item information as pretty-printed text
|
||||
/// </summary>
|
||||
private static StringBuilder PrettyPrint(this Wrapper.WAD item)
|
||||
private static StringBuilder PrettyPrint(this Wrapper.WAD3 item)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
WAD.Print(builder, item.Model);
|
||||
WAD3.Print(builder, item.Model);
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine(svm.Day, "Day");
|
||||
builder.AppendLine(svm.Unknown2, "Unknown 2");
|
||||
builder.AppendLine(svm.Length, "Length");
|
||||
//builder.AppendLine(svm.Data, "Data");
|
||||
builder.AppendLine(svm.Length, "Data skipped...");
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +1,26 @@
|
||||
using System.Text;
|
||||
using SabreTools.Models.BSP;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
using static SabreTools.Models.BSP.Constants;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
public class BSP : IPrinter<File>
|
||||
public class BSP : IPrinter<BspFile>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public void PrintInformation(StringBuilder builder, File model)
|
||||
public void PrintInformation(StringBuilder builder, BspFile model)
|
||||
=> Print(builder, model);
|
||||
|
||||
public static void Print(StringBuilder builder, File file)
|
||||
public static void Print(StringBuilder builder, BspFile file)
|
||||
{
|
||||
builder.AppendLine("BSP Information:");
|
||||
builder.AppendLine("-------------------------");
|
||||
builder.AppendLine();
|
||||
|
||||
Print(builder, file.Header);
|
||||
Print(builder, file.Lumps);
|
||||
Print(builder, file.TextureHeader);
|
||||
Print(builder, file.Textures);
|
||||
PrintLumps(builder, file);
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Header? header)
|
||||
private static void Print(StringBuilder builder, BspHeader? header)
|
||||
{
|
||||
builder.AppendLine(" Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
@@ -38,30 +35,21 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Lump?[]? lumps)
|
||||
private static void PrintLumps(StringBuilder builder, BspFile? model)
|
||||
{
|
||||
builder.AppendLine(" Lumps Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (lumps == null || lumps.Length == 0)
|
||||
if (model?.Header?.Lumps == null || model.Header.Lumps.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No lumps");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < lumps.Length; i++)
|
||||
for (int i = 0; i < model.Header.Lumps.Length; i++)
|
||||
{
|
||||
var lump = lumps[i];
|
||||
string specialLumpName = string.Empty;
|
||||
switch (i)
|
||||
{
|
||||
case HL_BSP_LUMP_ENTITIES:
|
||||
specialLumpName = " (entities)";
|
||||
break;
|
||||
case HL_BSP_LUMP_TEXTUREDATA:
|
||||
specialLumpName = " (texture data)";
|
||||
break;
|
||||
}
|
||||
var lump = model.Header.Lumps[i];
|
||||
string specialLumpName = GetLumpName(i);
|
||||
|
||||
builder.AppendLine($" Lump {i}{specialLumpName}");
|
||||
if (lump == null)
|
||||
@@ -72,79 +60,360 @@ namespace SabreTools.Serialization.Printers
|
||||
|
||||
builder.AppendLine(lump.Offset, " Offset");
|
||||
builder.AppendLine(lump.Length, " Length");
|
||||
switch ((LumpType)i)
|
||||
{
|
||||
case LumpType.LUMP_ENTITIES:
|
||||
Print(builder, model.Entities);
|
||||
break;
|
||||
case LumpType.LUMP_PLANES:
|
||||
Print(builder, model.PlanesLump);
|
||||
break;
|
||||
case LumpType.LUMP_TEXTURES:
|
||||
Print(builder, model.TextureLump);
|
||||
break;
|
||||
case LumpType.LUMP_VERTICES:
|
||||
Print(builder, model.VerticesLump);
|
||||
break;
|
||||
case LumpType.LUMP_VISIBILITY:
|
||||
Print(builder, model.VisibilityLump);
|
||||
break;
|
||||
case LumpType.LUMP_NODES:
|
||||
Print(builder, model.NodesLump);
|
||||
break;
|
||||
case LumpType.LUMP_TEXINFO:
|
||||
Print(builder, model.TexinfoLump);
|
||||
break;
|
||||
case LumpType.LUMP_FACES:
|
||||
Print(builder, model.FacesLump);
|
||||
break;
|
||||
case LumpType.LUMP_LIGHTING:
|
||||
Print(builder, model.LightmapLump);
|
||||
break;
|
||||
case LumpType.LUMP_CLIPNODES:
|
||||
Print(builder, model.ClipnodesLump);
|
||||
break;
|
||||
case LumpType.LUMP_LEAVES:
|
||||
Print(builder, model.LeavesLump);
|
||||
break;
|
||||
case LumpType.LUMP_MARKSURFACES:
|
||||
Print(builder, model.MarksurfacesLump);
|
||||
break;
|
||||
case LumpType.LUMP_EDGES:
|
||||
Print(builder, model.EdgesLump);
|
||||
break;
|
||||
case LumpType.LUMP_SURFEDGES:
|
||||
Print(builder, model.SurfedgesLump);
|
||||
break;
|
||||
case LumpType.LUMP_MODELS:
|
||||
Print(builder, model.ModelsLump);
|
||||
break;
|
||||
default:
|
||||
builder.AppendLine($" Unsupported lump type: {(LumpType)i} (0x{i:X4})");
|
||||
break;
|
||||
}
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, TextureHeader? header)
|
||||
private static string GetLumpName(int i)
|
||||
{
|
||||
builder.AppendLine(" Texture Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
return (LumpType)i switch
|
||||
{
|
||||
builder.AppendLine(" No texture header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.TextureCount, " Texture count");
|
||||
builder.AppendLine(" Offsets:");
|
||||
if (header.Offsets == null || header.Offsets.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No offsets");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < header.Offsets.Length; i++)
|
||||
{
|
||||
builder.AppendLine(header.Offsets[i], $" Offset {i}");
|
||||
}
|
||||
builder.AppendLine();
|
||||
LumpType.LUMP_ENTITIES => " - LUMP_ENTITIES",
|
||||
LumpType.LUMP_PLANES => " - LUMP_PLANES",
|
||||
LumpType.LUMP_TEXTURES => " - LUMP_TEXTURES",
|
||||
LumpType.LUMP_VERTICES => " - LUMP_VERTICES",
|
||||
LumpType.LUMP_VISIBILITY => " - LUMP_VISIBILITY",
|
||||
LumpType.LUMP_NODES => " - LUMP_NODES",
|
||||
LumpType.LUMP_TEXINFO => " - LUMP_TEXINFO",
|
||||
LumpType.LUMP_FACES => " - LUMP_FACES",
|
||||
LumpType.LUMP_LIGHTING => " - LUMP_LIGHTING",
|
||||
LumpType.LUMP_CLIPNODES => " - LUMP_CLIPNODES",
|
||||
LumpType.LUMP_LEAVES => " - LUMP_LEAVES",
|
||||
LumpType.LUMP_MARKSURFACES => " - LUMP_MARKSURFACES",
|
||||
LumpType.LUMP_EDGES => " - LUMP_EDGES",
|
||||
LumpType.LUMP_SURFEDGES => " - LUMP_SURFEDGES",
|
||||
LumpType.LUMP_MODELS => " - LUMP_MODELS",
|
||||
_ => string.Empty,
|
||||
};
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Texture?[]? textures)
|
||||
private static void Print(StringBuilder builder, EntitiesLump? lump)
|
||||
{
|
||||
builder.AppendLine(" Textures Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (textures == null || textures.Length == 0)
|
||||
if (lump?.Entities == null || lump.Entities.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No textures");
|
||||
builder.AppendLine();
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < textures.Length; i++)
|
||||
for (int j = 0; j < lump.Entities.Length; j++)
|
||||
{
|
||||
var texture = textures[i];
|
||||
builder.AppendLine($" Texture {i}");
|
||||
if (texture == null)
|
||||
{
|
||||
builder.AppendLine(" [NULL]");
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.AppendLine(texture.Name, " Name");
|
||||
builder.AppendLine(texture.Width, " Width");
|
||||
builder.AppendLine(texture.Height, " Height");
|
||||
builder.AppendLine(" Offsets:");
|
||||
if (texture.Offsets == null || texture.Offsets.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No offsets");
|
||||
continue;
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int j = 0; j < texture.Offsets.Length; j++)
|
||||
{
|
||||
builder.AppendLine(texture.Offsets[i], $" Offset {j}");
|
||||
}
|
||||
}
|
||||
// Skip texture data
|
||||
builder.AppendLine(texture.PaletteSize, " Palette size");
|
||||
// Skip palette data
|
||||
// TODO: Implement entity printing
|
||||
var entity = lump.Entities[j];
|
||||
builder.AppendLine($" Entity {j}");
|
||||
builder.AppendLine(" Entity data is not parsed properly");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, PlanesLump? lump)
|
||||
{
|
||||
if (lump?.Planes == null || lump.Planes.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Planes.Length; j++)
|
||||
{
|
||||
var plane = lump.Planes[j];
|
||||
builder.AppendLine($" Plane {j}");
|
||||
builder.AppendLine($" Normal vector: ({plane.NormalVector.X}, {plane.NormalVector.Y}, {plane.NormalVector.Z})");
|
||||
builder.AppendLine(plane.Distance, " Distance");
|
||||
builder.AppendLine($" Plane type: {plane.PlaneType} (0x{plane.PlaneType:X})");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, TextureLump? lump)
|
||||
{
|
||||
if (lump == null)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
if (lump?.Header == null)
|
||||
{
|
||||
builder.AppendLine(" No texture header");
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(" Texture Header:");
|
||||
builder.AppendLine(lump.Header.MipTextureCount, " MipTexture count");
|
||||
builder.AppendLine(lump.Header.Offsets, " Offsets");
|
||||
}
|
||||
|
||||
if (lump?.Textures == null || lump.Textures.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No texture data");
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(" Textures:");
|
||||
for (int j = 0; j < lump.Textures.Length; j++)
|
||||
{
|
||||
var texture = lump.Textures[j];
|
||||
builder.AppendLine($" Texture {j}");
|
||||
builder.AppendLine(texture.Name, " Name");
|
||||
builder.AppendLine(texture.Width, " Width");
|
||||
builder.AppendLine(texture.Height, " Height");
|
||||
builder.AppendLine(texture.Offsets, " Offsets");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, VerticesLump? lump)
|
||||
{
|
||||
if (lump?.Vertices == null || lump.Vertices.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Vertices.Length; j++)
|
||||
{
|
||||
var vertex = lump.Vertices[j];
|
||||
builder.AppendLine($" Vertex {j}: ({vertex.X}, {vertex.Y}, {vertex.Z})");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, VisibilityLump? lump)
|
||||
{
|
||||
if (lump == null)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(lump.NumClusters, " Cluster count");
|
||||
builder.AppendLine(" Byte offsets skipped...");
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, BspNodesLump? lump)
|
||||
{
|
||||
if (lump?.Nodes == null || lump.Nodes.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Nodes.Length; j++)
|
||||
{
|
||||
var node = lump.Nodes[j];
|
||||
builder.AppendLine($" Node {j}");
|
||||
builder.AppendLine(node.Children, " Children");
|
||||
builder.AppendLine(node.Mins, " Mins");
|
||||
builder.AppendLine(node.Maxs, " Maxs");
|
||||
builder.AppendLine(node.FirstFace, " First face index");
|
||||
builder.AppendLine(node.FaceCount, " Count of faces");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, BspTexinfoLump? lump)
|
||||
{
|
||||
if (lump?.Texinfos == null || lump.Texinfos.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Texinfos.Length; j++)
|
||||
{
|
||||
var texinfo = lump.Texinfos[j];
|
||||
builder.AppendLine($" Texinfo {j}");
|
||||
builder.AppendLine($" S-Vector: ({texinfo.SVector.X}, {texinfo.SVector.Y}, {texinfo.SVector.Z})");
|
||||
builder.AppendLine(texinfo.TextureSShift, " Texture shift in S direction");
|
||||
builder.AppendLine($" T-Vector: ({texinfo.TVector.X}, {texinfo.TVector.Y}, {texinfo.TVector.Z})");
|
||||
builder.AppendLine(texinfo.TextureTShift, " Texture shift in T direction");
|
||||
builder.AppendLine(texinfo.MiptexIndex, " Miptex index");
|
||||
builder.AppendLine($" Flags: {texinfo.Flags} (0x{texinfo.Flags:X})");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, BspFacesLump? lump)
|
||||
{
|
||||
if (lump?.Faces == null || lump.Faces.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Faces.Length; j++)
|
||||
{
|
||||
var face = lump.Faces[j];
|
||||
builder.AppendLine($" Face {j}");
|
||||
builder.AppendLine(face.PlaneIndex, " Plane index");
|
||||
builder.AppendLine(face.PlaneSideCount, " Plane side count");
|
||||
builder.AppendLine(face.FirstEdgeIndex, " First surfedge index");
|
||||
builder.AppendLine(face.NumberOfEdges, " Surfedge count");
|
||||
builder.AppendLine(face.TextureInfoIndex, " Texture info index");
|
||||
builder.AppendLine(face.LightingStyles, " Lighting styles");
|
||||
builder.AppendLine(face.LightmapOffset, " Lightmap offset");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, LightmapLump? lump)
|
||||
{
|
||||
if (lump?.Lightmap == null || lump.Lightmap.Length == 0)
|
||||
builder.AppendLine(" No data");
|
||||
else
|
||||
builder.AppendLine(" Lightmap data skipped...");
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, ClipnodesLump? lump)
|
||||
{
|
||||
if (lump?.Clipnodes == null || lump.Clipnodes.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Clipnodes.Length; j++)
|
||||
{
|
||||
var clipnode = lump.Clipnodes[j];
|
||||
builder.AppendLine($" Clipnode {j}");
|
||||
builder.AppendLine(clipnode.PlaneIndex, " Plane index");
|
||||
builder.AppendLine(clipnode.ChildrenIndices, " Children indices");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, BspLeavesLump? lump)
|
||||
{
|
||||
if (lump?.Leaves == null || lump.Leaves.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Leaves.Length; j++)
|
||||
{
|
||||
var leaf = lump.Leaves[j];
|
||||
builder.AppendLine($" Leaf {j}");
|
||||
builder.AppendLine($" Contents: {leaf.Contents} (0x{leaf.Contents:X})");
|
||||
builder.AppendLine(leaf.VisOffset, " Visibility offset");
|
||||
builder.AppendLine(leaf.Mins, " Mins");
|
||||
builder.AppendLine(leaf.Maxs, " Maxs");
|
||||
builder.AppendLine(leaf.FirstMarkSurfaceIndex, " First marksurface index");
|
||||
builder.AppendLine(leaf.MarkSurfacesCount, " Marksurfaces count");
|
||||
builder.AppendLine(leaf.AmbientLevels, " Ambient sound levels");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, MarksurfacesLump? lump)
|
||||
{
|
||||
if (lump?.Marksurfaces == null || lump.Marksurfaces.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Marksurfaces.Length; j++)
|
||||
{
|
||||
var marksurface = lump.Marksurfaces[j];
|
||||
builder.AppendLine(marksurface, $" Marksurface {j}");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, EdgesLump? lump)
|
||||
{
|
||||
if (lump?.Edges == null || lump.Edges.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Edges.Length; j++)
|
||||
{
|
||||
var edge = lump.Edges[j];
|
||||
builder.AppendLine($" Edge {j}");
|
||||
builder.AppendLine(edge.VertexIndices, " Vertex indices");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, SurfedgesLump? lump)
|
||||
{
|
||||
if (lump?.Surfedges == null || lump.Surfedges.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Surfedges.Length; j++)
|
||||
{
|
||||
var surfedge = lump.Surfedges[j];
|
||||
builder.AppendLine(surfedge, $" Surfedge {j}");
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, BspModelsLump? lump)
|
||||
{
|
||||
if (lump?.Models == null || lump.Models.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int j = 0; j < lump.Models.Length; j++)
|
||||
{
|
||||
var bmodel = lump.Models[j];
|
||||
builder.AppendLine($" Model {j}");
|
||||
builder.AppendLine($" Mins: {bmodel.Mins.X}, {bmodel.Mins.Y}, {bmodel.Mins.Z}");
|
||||
builder.AppendLine($" Maxs: {bmodel.Maxs.X}, {bmodel.Maxs.Y}, {bmodel.Maxs.Z}");
|
||||
builder.AppendLine($" Origin vector: {bmodel.OriginVector.X}, {bmodel.OriginVector.Y}, {bmodel.OriginVector.Z}");
|
||||
builder.AppendLine(bmodel.HeadnodesIndex, " Headnodes index");
|
||||
builder.AppendLine(bmodel.VisLeafsCount, " ??? (VisLeafsCount)");
|
||||
builder.AppendLine(bmodel.FirstFaceIndex, " First face index");
|
||||
builder.AppendLine(bmodel.FacesCount, " Faces count");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
159
SabreTools.Serialization/Printers/CHD.cs
Normal file
159
SabreTools.Serialization/Printers/CHD.cs
Normal file
@@ -0,0 +1,159 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using SabreTools.Models.CHD;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
public class CHD : IPrinter<Header>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public void PrintInformation(StringBuilder builder, Header model)
|
||||
=> Print(builder, model);
|
||||
|
||||
public static void Print(StringBuilder builder, Header header)
|
||||
{
|
||||
builder.AppendLine("CHD Header Information:");
|
||||
builder.AppendLine("-------------------------");
|
||||
|
||||
if (header == null)
|
||||
{
|
||||
builder.AppendLine("No header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
switch (header)
|
||||
{
|
||||
case HeaderV1 v1:
|
||||
Print(builder, v1);
|
||||
break;
|
||||
case HeaderV2 v2:
|
||||
Print(builder, v2);
|
||||
break;
|
||||
case HeaderV3 v3:
|
||||
Print(builder, v3);
|
||||
break;
|
||||
case HeaderV4 v4:
|
||||
Print(builder, v4);
|
||||
break;
|
||||
case HeaderV5 v5:
|
||||
Print(builder, v5);
|
||||
break;
|
||||
default:
|
||||
builder.AppendLine("Unrecognized header type");
|
||||
builder.AppendLine();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, HeaderV1 header)
|
||||
{
|
||||
builder.AppendLine(header.Tag, $"Tag");
|
||||
builder.AppendLine(header.Length, $"Length");
|
||||
builder.AppendLine(header.Version, $"Version");
|
||||
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
|
||||
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
|
||||
builder.AppendLine(header.HunkSize, $"Hunk size");
|
||||
builder.AppendLine(header.TotalHunks, $"Total hunks");
|
||||
builder.AppendLine(header.Cylinders, $"Cylinders");
|
||||
builder.AppendLine(header.Heads, $"Heads");
|
||||
builder.AppendLine(header.Sectors, $"Sectors");
|
||||
builder.AppendLine(header.MD5, $"MD5");
|
||||
builder.AppendLine(header.ParentMD5, $"Parent MD5");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, HeaderV2 header)
|
||||
{
|
||||
builder.AppendLine(header.Tag, $"Tag");
|
||||
builder.AppendLine(header.Length, $"Length");
|
||||
builder.AppendLine(header.Version, $"Version");
|
||||
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
|
||||
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
|
||||
builder.AppendLine(header.HunkSize, $"Hunk size");
|
||||
builder.AppendLine(header.TotalHunks, $"Total hunks");
|
||||
builder.AppendLine(header.Cylinders, $"Cylinders");
|
||||
builder.AppendLine(header.Heads, $"Heads");
|
||||
builder.AppendLine(header.Sectors, $"Sectors");
|
||||
builder.AppendLine(header.MD5, $"MD5");
|
||||
builder.AppendLine(header.ParentMD5, $"Parent MD5");
|
||||
builder.AppendLine(header.BytesPerSector, $"Bytes per sector");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, HeaderV3 header)
|
||||
{
|
||||
builder.AppendLine(header.Tag, $"Tag");
|
||||
builder.AppendLine(header.Length, $"Length");
|
||||
builder.AppendLine(header.Version, $"Version");
|
||||
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
|
||||
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
|
||||
builder.AppendLine(header.TotalHunks, $"Total hunks");
|
||||
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
|
||||
builder.AppendLine(header.MetaOffset, $"Meta offset");
|
||||
builder.AppendLine(header.MD5, $"MD5");
|
||||
builder.AppendLine(header.ParentMD5, $"Parent MD5");
|
||||
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
|
||||
builder.AppendLine(header.SHA1, $"SHA-1");
|
||||
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, HeaderV4 header)
|
||||
{
|
||||
builder.AppendLine(header.Tag, $"Tag");
|
||||
builder.AppendLine(header.Length, $"Length");
|
||||
builder.AppendLine(header.Version, $"Version");
|
||||
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
|
||||
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
|
||||
builder.AppendLine(header.TotalHunks, $"Total hunks");
|
||||
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
|
||||
builder.AppendLine(header.MetaOffset, $"Meta offset");
|
||||
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
|
||||
builder.AppendLine(header.SHA1, $"SHA-1");
|
||||
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
|
||||
builder.AppendLine(header.RawSHA1, $"Raw SHA-1");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, HeaderV5 header)
|
||||
{
|
||||
builder.AppendLine(header.Tag, $"Tag");
|
||||
builder.AppendLine(header.Length, $"Length");
|
||||
builder.AppendLine(header.Version, $"Version");
|
||||
|
||||
string compressorsLine = "Compressors: ";
|
||||
if (header.Compressors == null)
|
||||
{
|
||||
compressorsLine += "[NULL]";
|
||||
}
|
||||
else
|
||||
{
|
||||
var compressors = new List<string>();
|
||||
for (int i = 0; i < header.Compressors.Length; i++)
|
||||
{
|
||||
uint compressor = (uint)header.Compressors[i];
|
||||
byte[] compressorBytes = BitConverter.GetBytes(compressor);
|
||||
Array.Reverse(compressorBytes);
|
||||
string compressorString = Encoding.ASCII.GetString(compressorBytes);
|
||||
compressors.Add(compressorString);
|
||||
}
|
||||
|
||||
compressorsLine += string.Join(", ", [.. compressors]);
|
||||
}
|
||||
builder.AppendLine(compressorsLine);
|
||||
|
||||
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
|
||||
builder.AppendLine(header.MapOffset, $"Map offset");
|
||||
builder.AppendLine(header.MetaOffset, $"Meta offset");
|
||||
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
|
||||
builder.AppendLine(header.UnitBytes, $"Unit bytes");
|
||||
builder.AppendLine(header.RawSHA1, $"Raw SHA-1");
|
||||
builder.AppendLine(header.SHA1, $"SHA-1");
|
||||
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -315,27 +315,19 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine(fileGroup.NameOffset, " Name offset");
|
||||
builder.AppendLine(fileGroup.Name, " Name");
|
||||
builder.AppendLine(fileGroup.ExpandedSize, " Expanded size");
|
||||
builder.AppendLine(fileGroup.Reserved0, " Reserved 0");
|
||||
builder.AppendLine(fileGroup.CompressedSize, " Compressed size");
|
||||
builder.AppendLine(fileGroup.Reserved1, " Reserved 1");
|
||||
builder.AppendLine(fileGroup.Reserved2, " Reserved 2");
|
||||
builder.AppendLine(fileGroup.Attribute1, " Attribute 1");
|
||||
builder.AppendLine(fileGroup.Attribute2, " Attribute 2");
|
||||
builder.AppendLine($" Attributes: {fileGroup.Attributes} (0x{fileGroup.Attributes:X})");
|
||||
builder.AppendLine(fileGroup.FirstFile, " First file");
|
||||
builder.AppendLine(fileGroup.LastFile, " Last file");
|
||||
builder.AppendLine(fileGroup.UnknownOffset, " Unknown offset");
|
||||
builder.AppendLine(fileGroup.Var4Offset, " Var 4 offset");
|
||||
builder.AppendLine(fileGroup.Var1Offset, " Var 1 offset");
|
||||
builder.AppendLine(fileGroup.UnknownStringOffset, " Unknown string offset");
|
||||
builder.AppendLine(fileGroup.OperatingSystemOffset, " Operating system offset");
|
||||
builder.AppendLine(fileGroup.LanguageOffset, " Language offset");
|
||||
builder.AppendLine(fileGroup.HTTPLocationOffset, " HTTP location offset");
|
||||
builder.AppendLine(fileGroup.FTPLocationOffset, " FTP location offset");
|
||||
builder.AppendLine(fileGroup.MiscOffset, " Misc. offset");
|
||||
builder.AppendLine(fileGroup.Var2Offset, " Var 2 offset");
|
||||
builder.AppendLine(fileGroup.TargetDirectoryOffset, " Target directory offset");
|
||||
builder.AppendLine(fileGroup.Reserved3, " Reserved 3");
|
||||
builder.AppendLine(fileGroup.Reserved4, " Reserved 4");
|
||||
builder.AppendLine(fileGroup.Reserved5, " Reserved 5");
|
||||
builder.AppendLine(fileGroup.Reserved6, " Reserved 6");
|
||||
builder.AppendLine(fileGroup.Reserved7, " Reserved 7");
|
||||
builder.AppendLine($" Overwrite flags: {fileGroup.OverwriteFlags} (0x{fileGroup.OverwriteFlags:X})");
|
||||
builder.AppendLine(fileGroup.Reserved, " Reserved");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
@@ -366,16 +358,16 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine(component.DescriptorOffset, " Descriptor offset");
|
||||
builder.AppendLine(component.DisplayNameOffset, " Display name offset");
|
||||
builder.AppendLine(component.DisplayName, " Display name");
|
||||
builder.AppendLine(component.Reserved0, " Reserved 0");
|
||||
builder.AppendLine(component.ReservedOffset0, " Reserved offset 0");
|
||||
builder.AppendLine(component.ReservedOffset1, " Reserved offset 1");
|
||||
builder.AppendLine($" Status: {component.Status} (0x{component.Status:X})");
|
||||
builder.AppendLine(component.PasswordOffset, " Password offset");
|
||||
builder.AppendLine(component.MiscOffset, " Misc. offset");
|
||||
builder.AppendLine(component.ComponentIndex, " Component index");
|
||||
builder.AppendLine(component.NameOffset, " Name offset");
|
||||
builder.AppendLine(component.Name, " Name");
|
||||
builder.AppendLine(component.ReservedOffset2, " Reserved offset 2");
|
||||
builder.AppendLine(component.ReservedOffset3, " Reserved offset 3");
|
||||
builder.AppendLine(component.ReservedOffset4, " Reserved offset 4");
|
||||
builder.AppendLine(component.Reserved1, " Reserved 1");
|
||||
builder.AppendLine(component.CDRomFolderOffset, " CD-ROM folder offset");
|
||||
builder.AppendLine(component.HTTPLocationOffset, " HTTP location offset");
|
||||
builder.AppendLine(component.FTPLocationOffset, " FTP location offset");
|
||||
builder.AppendLine(component.Guid, " GUIDs");
|
||||
builder.AppendLine(component.CLSIDOffset, " CLSID offset");
|
||||
builder.AppendLine(component.CLSID, " CLSID");
|
||||
builder.AppendLine(component.Reserved2, " Reserved 2");
|
||||
@@ -406,10 +398,10 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine(component.SubComponentsCount, " Sub-components count");
|
||||
builder.AppendLine(component.SubComponentsOffset, " Sub-components offset");
|
||||
builder.AppendLine(component.NextComponentOffset, " Next component offset");
|
||||
builder.AppendLine(component.ReservedOffset5, " Reserved offset 5");
|
||||
builder.AppendLine(component.ReservedOffset6, " Reserved offset 6");
|
||||
builder.AppendLine(component.ReservedOffset7, " Reserved offset 7");
|
||||
builder.AppendLine(component.ReservedOffset8, " Reserved offset 8");
|
||||
builder.AppendLine(component.OnInstallingOffset, " On installing offset");
|
||||
builder.AppendLine(component.OnInstalledOffset, " On installed offset");
|
||||
builder.AppendLine(component.OnUninstallingOffset, " On uninstalling offset");
|
||||
builder.AppendLine(component.OnUninstalledOffset, " On uninstalled offset");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
using SabreTools.Models.N3DS;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
@@ -44,26 +45,26 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine(header.PartitionsCryptType, " Partitions crypt type");
|
||||
builder.AppendLine();
|
||||
|
||||
builder.AppendLine(" Partition table:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
builder.AppendLine(" Partition table:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header.PartitionsTable == null || header.PartitionsTable.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No partition table entries");
|
||||
builder.AppendLine(" No partition table entries");
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int i = 0; i < header.PartitionsTable.Length; i++)
|
||||
{
|
||||
var partitionTableEntry = header.PartitionsTable[i];
|
||||
builder.AppendLine($" Partition table entry {i}");
|
||||
builder.AppendLine($" Partition table entry {i}");
|
||||
if (partitionTableEntry == null)
|
||||
{
|
||||
builder.AppendLine(" [NULL]");
|
||||
builder.AppendLine(" [NULL]");
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.AppendLine(partitionTableEntry.Offset, " Offset");
|
||||
builder.AppendLine(partitionTableEntry.Length, " Length");
|
||||
builder.AppendLine(partitionTableEntry.Offset, " Offset");
|
||||
builder.AppendLine(partitionTableEntry.Length, " Length");
|
||||
}
|
||||
}
|
||||
builder.AppendLine();
|
||||
@@ -77,17 +78,17 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine(header.PartitionFlags, " Partition flags");
|
||||
builder.AppendLine();
|
||||
|
||||
builder.AppendLine(" Partition ID table:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
builder.AppendLine(" Partition ID table:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header.PartitionIdTable == null || header.PartitionIdTable.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No partition ID table entries");
|
||||
builder.AppendLine(" No partition ID table entries");
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int i = 0; i < header.PartitionIdTable.Length; i++)
|
||||
{
|
||||
builder.AppendLine(header.PartitionIdTable[i], $" Partition {i} ID");
|
||||
builder.AppendLine(header.PartitionIdTable[i], $" Partition {i} ID");
|
||||
}
|
||||
}
|
||||
builder.AppendLine();
|
||||
@@ -131,6 +132,8 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine(header.CVerVersionNumber, " Version number of CVer in included update partition");
|
||||
builder.AppendLine(header.Reserved4, " Reserved 4");
|
||||
builder.AppendLine();
|
||||
|
||||
Print(builder, header.InitialData);
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, DevelopmentCardInfoHeader? header)
|
||||
@@ -144,62 +147,6 @@ namespace SabreTools.Serialization.Printers
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine();
|
||||
builder.AppendLine(" Initial Data:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header.InitialData == null)
|
||||
{
|
||||
builder.AppendLine(" No initial data");
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(header.InitialData.CardSeedKeyY, " Card seed keyY");
|
||||
builder.AppendLine(header.InitialData.EncryptedCardSeed, " Encrypted card seed");
|
||||
builder.AppendLine(header.InitialData.CardSeedAESMAC, " Card seed AES-MAC");
|
||||
builder.AppendLine(header.InitialData.CardSeedNonce, " Card seed nonce");
|
||||
builder.AppendLine(header.InitialData.Reserved, " Reserved");
|
||||
builder.AppendLine();
|
||||
|
||||
builder.AppendLine(" Backup Header:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header.InitialData.BackupHeader == null)
|
||||
{
|
||||
builder.AppendLine(" No backup header");
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(header.InitialData.BackupHeader.MagicID, " Magic ID");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.ContentSizeInMediaUnits, " Content size in media units");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.PartitionId, " Partition ID");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.MakerCode, " Maker code");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.Version, " Version");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.VerificationHash, " Verification hash");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.ProgramId, " Program ID");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.Reserved1, " Reserved 1");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.LogoRegionHash, " Logo region SHA-256 hash");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.ProductCode, " Product code");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.ExtendedHeaderHash, " Extended header SHA-256 hash");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.ExtendedHeaderSizeInBytes, " Extended header size in bytes");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.Reserved2, " Reserved 2");
|
||||
builder.AppendLine($" Flags: {header.InitialData.BackupHeader.Flags} (0x{header.InitialData.BackupHeader.Flags:X})");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.PlainRegionOffsetInMediaUnits, " Plain region offset, in media units");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.PlainRegionSizeInMediaUnits, " Plain region size, in media units");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.LogoRegionOffsetInMediaUnits, " Logo region offset, in media units");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.LogoRegionSizeInMediaUnits, " Logo region size, in media units");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.ExeFSOffsetInMediaUnits, " ExeFS offset, in media units");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.ExeFSSizeInMediaUnits, " ExeFS size, in media units");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.ExeFSHashRegionSizeInMediaUnits, " ExeFS hash region size, in media units");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.Reserved3, " Reserved 3");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.RomFSOffsetInMediaUnits, " RomFS offset, in media units");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.RomFSSizeInMediaUnits, " RomFS size, in media units");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.RomFSHashRegionSizeInMediaUnits, " RomFS hash region size, in media units");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.Reserved4, " Reserved 4");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.ExeFSSuperblockHash, " ExeFS superblock SHA-256 hash");
|
||||
builder.AppendLine(header.InitialData.BackupHeader.RomFSSuperblockHash, " RomFS superblock SHA-256 hash");
|
||||
}
|
||||
}
|
||||
builder.AppendLine();
|
||||
|
||||
builder.AppendLine(header.CardDeviceReserved1, " Card device reserved 1");
|
||||
builder.AppendLine(header.TitleKey, " Title key");
|
||||
builder.AppendLine(header.CardDeviceReserved2, " Card device reserved 2");
|
||||
@@ -227,6 +174,96 @@ namespace SabreTools.Serialization.Printers
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, InitialData? id)
|
||||
{
|
||||
builder.AppendLine(" Initial Data Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (id == null)
|
||||
{
|
||||
builder.AppendLine(" No initial data");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(id.CardSeedKeyY, " Card seed KeyY");
|
||||
builder.AppendLine(id.EncryptedCardSeed, " Encrypted card seed");
|
||||
builder.AppendLine(id.CardSeedAESMAC, " Card seed AES-MAC");
|
||||
builder.AppendLine(id.CardSeedNonce, " Card seed nonce");
|
||||
builder.AppendLine(id.Reserved, " Reserved");
|
||||
builder.AppendLine();
|
||||
|
||||
PrintBackup(builder, id.BackupHeader);
|
||||
}
|
||||
|
||||
private static void PrintBackup(StringBuilder builder, NCCHHeader? header)
|
||||
{
|
||||
builder.AppendLine(" Backup NCCH Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
{
|
||||
builder.AppendLine(" No backup NCCH header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (header.MagicID == string.Empty)
|
||||
{
|
||||
builder.AppendLine(" Empty backup header, no data can be parsed");
|
||||
}
|
||||
else if (header.MagicID != Constants.NCCHMagicNumber)
|
||||
{
|
||||
builder.AppendLine(" Unrecognized backup header, no data can be parsed");
|
||||
}
|
||||
else
|
||||
{
|
||||
// Backup header omits RSA signature
|
||||
builder.AppendLine(header.MagicID, " Magic ID");
|
||||
builder.AppendLine(header.ContentSizeInMediaUnits, " Content size in media units");
|
||||
builder.AppendLine(header.PartitionId, " Partition ID");
|
||||
builder.AppendLine(header.MakerCode, " Maker code");
|
||||
builder.AppendLine(header.Version, " Version");
|
||||
builder.AppendLine(header.VerificationHash, " Verification hash");
|
||||
builder.AppendLine(header.ProgramId, " Program ID");
|
||||
builder.AppendLine(header.Reserved1, " Reserved 1");
|
||||
builder.AppendLine(header.LogoRegionHash, " Logo region SHA-256 hash");
|
||||
builder.AppendLine(header.ProductCode, " Product code");
|
||||
builder.AppendLine(header.ExtendedHeaderHash, " Extended header SHA-256 hash");
|
||||
builder.AppendLine(header.ExtendedHeaderSizeInBytes, " Extended header size in bytes");
|
||||
builder.AppendLine(header.Reserved2, " Reserved 2");
|
||||
builder.AppendLine(" Flags:");
|
||||
if (header.Flags == null)
|
||||
{
|
||||
builder.AppendLine(" [NULL]");
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(header.Flags.Reserved0, " Reserved 0");
|
||||
builder.AppendLine(header.Flags.Reserved1, " Reserved 1");
|
||||
builder.AppendLine(header.Flags.Reserved2, " Reserved 2");
|
||||
builder.AppendLine($" Crypto method: {header.Flags.CryptoMethod} (0x{header.Flags.CryptoMethod:X})");
|
||||
builder.AppendLine($" Content platform: {header.Flags.ContentPlatform} (0x{header.Flags.ContentPlatform:X})");
|
||||
builder.AppendLine($" Content type: {header.Flags.MediaPlatformIndex} (0x{header.Flags.MediaPlatformIndex:X})");
|
||||
builder.AppendLine(header.Flags.ContentUnitSize, " Content unit size");
|
||||
builder.AppendLine($" Bitmasks: {header.Flags.BitMasks} (0x{header.Flags.BitMasks:X})");
|
||||
}
|
||||
builder.AppendLine(header.PlainRegionOffsetInMediaUnits, " Plain region offset, in media units");
|
||||
builder.AppendLine(header.PlainRegionSizeInMediaUnits, " Plain region size, in media units");
|
||||
builder.AppendLine(header.LogoRegionOffsetInMediaUnits, " Logo region offset, in media units");
|
||||
builder.AppendLine(header.LogoRegionSizeInMediaUnits, " Logo region size, in media units");
|
||||
builder.AppendLine(header.ExeFSOffsetInMediaUnits, " ExeFS offset, in media units");
|
||||
builder.AppendLine(header.ExeFSSizeInMediaUnits, " ExeFS size, in media units");
|
||||
builder.AppendLine(header.ExeFSHashRegionSizeInMediaUnits, " ExeFS hash region size, in media units");
|
||||
builder.AppendLine(header.Reserved3, " Reserved 3");
|
||||
builder.AppendLine(header.RomFSOffsetInMediaUnits, " RomFS offset, in media units");
|
||||
builder.AppendLine(header.RomFSSizeInMediaUnits, " RomFS size, in media units");
|
||||
builder.AppendLine(header.RomFSHashRegionSizeInMediaUnits, " RomFS hash region size, in media units");
|
||||
builder.AppendLine(header.Reserved4, " Reserved 4");
|
||||
builder.AppendLine(header.ExeFSSuperblockHash, " ExeFS superblock SHA-256 hash");
|
||||
builder.AppendLine(header.RomFSSuperblockHash, " RomFS superblock SHA-256 hash");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, NCCHHeader?[]? entries)
|
||||
{
|
||||
builder.AppendLine(" NCCH Partition Header Information:");
|
||||
@@ -455,14 +492,20 @@ namespace SabreTools.Serialization.Printers
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(entry.ACI.ARM9AccessControl.Descriptors, " Descriptors");
|
||||
string descriptorsStr = "[NULL]";
|
||||
if (entry.ACI.ARM9AccessControl.Descriptors != null)
|
||||
{
|
||||
var descriptors = Array.ConvertAll(entry.ACI.ARM9AccessControl.Descriptors, d => d.ToString());
|
||||
descriptorsStr = string.Join(", ", descriptors);
|
||||
}
|
||||
builder.AppendLine(descriptorsStr, " Descriptors");
|
||||
builder.AppendLine(entry.ACI.ARM9AccessControl.DescriptorVersion, " Descriptor version");
|
||||
}
|
||||
|
||||
builder.AppendLine(entry.AccessDescSignature, " AccessDec signature (RSA-2048-SHA256)");
|
||||
builder.AppendLine(entry.NCCHHDRPublicKey, " NCCH HDR RSA-2048 public key");
|
||||
}
|
||||
|
||||
builder.AppendLine(entry.AccessDescSignature, " AccessDec signature (RSA-2048-SHA256)");
|
||||
builder.AppendLine(entry.NCCHHDRPublicKey, " NCCH HDR RSA-2048 public key");
|
||||
|
||||
builder.AppendLine(" Access control info (for limitations of first ACI):");
|
||||
if (entry.ACIForLimitations == null)
|
||||
{
|
||||
@@ -523,7 +566,13 @@ namespace SabreTools.Serialization.Printers
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(entry.ACIForLimitations.ARM9AccessControl.Descriptors, " Descriptors");
|
||||
string descriptorsStr = "[NULL]";
|
||||
if (entry.ACIForLimitations.ARM9AccessControl.Descriptors != null)
|
||||
{
|
||||
var descriptors = Array.ConvertAll(entry.ACIForLimitations.ARM9AccessControl.Descriptors, d => d.ToString());
|
||||
descriptorsStr = string.Join(", ", descriptors);
|
||||
}
|
||||
builder.AppendLine(descriptorsStr, " Descriptors");
|
||||
builder.AppendLine(entry.ACIForLimitations.ARM9AccessControl.DescriptorVersion, " Descriptor version");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Xml;
|
||||
using SabreTools.ASN1;
|
||||
using SabreTools.IO.Extensions;
|
||||
using SabreTools.Models.PortableExecutable;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
using static SabreTools.Serialization.Extensions;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
@@ -254,7 +252,7 @@ namespace SabreTools.Serialization.Printers
|
||||
}
|
||||
if (header.DelayImportDescriptor != null)
|
||||
{
|
||||
builder.AppendLine(" Delay Import Descriptior (14)");
|
||||
builder.AppendLine(" Delay Import Descriptor (14)");
|
||||
builder.AppendLine(header.DelayImportDescriptor.VirtualAddress, " Virtual address");
|
||||
builder.AppendLine(header.DelayImportDescriptor.VirtualAddress.ConvertVirtualAddress(table), " Physical address");
|
||||
builder.AppendLine(header.DelayImportDescriptor.Size, " Size");
|
||||
@@ -999,7 +997,8 @@ namespace SabreTools.Serialization.Printers
|
||||
string padding = new(' ', (level + 1) * 2);
|
||||
|
||||
// TODO: Use ordered list of base types to determine the shape of the data
|
||||
builder.AppendLine($"{padding}Base types: {string.Join(", ", types.Select(t => t.ToString()).ToArray())}");
|
||||
var baseTypes = Array.ConvertAll(types.ToArray(), t => t.ToString());
|
||||
builder.AppendLine($"{padding}Base types: {string.Join(", ", baseTypes)}");
|
||||
|
||||
builder.AppendLine(level, $"{padding}Entry level");
|
||||
builder.AppendLine(entry.DataRVA, $"{padding}Data RVA");
|
||||
@@ -1401,6 +1400,18 @@ namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded OLE Library File]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x03 && magic[3] == 0x04)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x05 && magic[3] == 0x06)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded empty PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x07 && magic[3] == 0x08)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded spanned PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(magic, $"{padding}Data");
|
||||
@@ -1912,6 +1923,18 @@ namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded OLE Library File]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x03 && magic[3] == 0x04)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x05 && magic[3] == 0x06)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded empty PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else if (magic[0] == 0x50 && magic[1] == 0x4B && magic[2] == 0x07 && magic[3] == 0x08)
|
||||
{
|
||||
builder.AppendLine($"{padding}Data: [Embedded spanned PKZIP file]"); // TODO: Parse this out and print separately
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(magic, $"{padding}Data");
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -35,7 +35,7 @@ namespace SabreTools.Serialization.Printers
|
||||
|
||||
builder.AppendLine(header.Signature, " Signature");
|
||||
builder.AppendLine(header.Version, " Version");
|
||||
builder.AppendLine(header.DirectoryLength, " Directory length");
|
||||
builder.AppendLine(header.TreeSize, " Tree size");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
@@ -50,10 +50,10 @@ namespace SabreTools.Serialization.Printers
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.Dummy0, " Dummy 0");
|
||||
builder.AppendLine(header.ArchiveHashLength, " Archive hash length");
|
||||
builder.AppendLine(header.ExtraLength, " Extra length");
|
||||
builder.AppendLine(header.Dummy1, " Dummy 1");
|
||||
builder.AppendLine(header.FileDataSectionSize, " File data section size");
|
||||
builder.AppendLine(header.ArchiveMD5SectionSize, " Archive MD5 section size");
|
||||
builder.AppendLine(header.OtherMD5SectionSize, " Other MD5 section size");
|
||||
builder.AppendLine(header.SignatureSectionSize, " Signature section size");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
using System.Text;
|
||||
using SabreTools.Models.WAD;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
public class WAD : IPrinter<File>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public void PrintInformation(StringBuilder builder, File model)
|
||||
=> Print(builder, model);
|
||||
|
||||
public static void Print(StringBuilder builder, File file)
|
||||
{
|
||||
builder.AppendLine("WAD Information:");
|
||||
builder.AppendLine("-------------------------");
|
||||
builder.AppendLine();
|
||||
|
||||
Print(builder, file.Header);
|
||||
Print(builder, file.Lumps);
|
||||
Print(builder, file.LumpInfos);
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Header? header)
|
||||
{
|
||||
builder.AppendLine(" Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
{
|
||||
builder.AppendLine(" No header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.Signature, " Signature");
|
||||
builder.AppendLine(header.LumpCount, " Lump count");
|
||||
builder.AppendLine(header.LumpOffset, " Lump offset");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Lump?[]? entries)
|
||||
{
|
||||
builder.AppendLine(" Lumps Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (entries == null || entries.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No lumps");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < entries.Length; i++)
|
||||
{
|
||||
var entry = entries[i];
|
||||
builder.AppendLine($" Lump {i}");
|
||||
if (entry == null)
|
||||
{
|
||||
builder.AppendLine(" [NULL]");
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.AppendLine(entry.Offset, " Offset");
|
||||
builder.AppendLine(entry.DiskLength, " Disk length");
|
||||
builder.AppendLine(entry.Length, " Length");
|
||||
builder.AppendLine(entry.Type, " Type");
|
||||
builder.AppendLine(entry.Compression, " Compression");
|
||||
builder.AppendLine(entry.Padding0, " Padding 0");
|
||||
builder.AppendLine(entry.Padding1, " Padding 1");
|
||||
builder.AppendLine(entry.Name, " Name");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, LumpInfo?[]? entries)
|
||||
{
|
||||
builder.AppendLine(" Lump Infos Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (entries == null || entries.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No lump infos");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < entries.Length; i++)
|
||||
{
|
||||
var entry = entries[i];
|
||||
builder.AppendLine($" Lump Info {i}");
|
||||
if (entry == null)
|
||||
{
|
||||
builder.AppendLine(" Lump is compressed");
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.AppendLine(entry.Name, " Name");
|
||||
builder.AppendLine(entry.Width, " Width");
|
||||
builder.AppendLine(entry.Height, " Height");
|
||||
builder.AppendLine(entry.PixelOffset, " Pixel offset");
|
||||
// TODO: Print unknown data?
|
||||
// TODO: Print pixel data?
|
||||
builder.AppendLine(entry.PaletteSize, " Palette size");
|
||||
// TODO: Print palette data?
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
131
SabreTools.Serialization/Printers/WAD3.cs
Normal file
131
SabreTools.Serialization/Printers/WAD3.cs
Normal file
@@ -0,0 +1,131 @@
|
||||
using System.Text;
|
||||
using SabreTools.Models.WAD3;
|
||||
using SabreTools.Serialization.Interfaces;
|
||||
|
||||
namespace SabreTools.Serialization.Printers
|
||||
{
|
||||
public class WAD3 : IPrinter<File>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public void PrintInformation(StringBuilder builder, File model)
|
||||
=> Print(builder, model);
|
||||
|
||||
public static void Print(StringBuilder builder, File file)
|
||||
{
|
||||
builder.AppendLine("WAD Information:");
|
||||
builder.AppendLine("-------------------------");
|
||||
builder.AppendLine();
|
||||
|
||||
Print(builder, file.Header);
|
||||
Print(builder, file.DirEntries);
|
||||
Print(builder, file.FileEntries);
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, Header? header)
|
||||
{
|
||||
builder.AppendLine(" Header Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (header == null)
|
||||
{
|
||||
builder.AppendLine(" No header");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
builder.AppendLine(header.Signature, " Signature");
|
||||
builder.AppendLine(header.NumDirs, " Number of directory entries");
|
||||
builder.AppendLine(header.DirOffset, " Offset to first directory entry");
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, DirEntry?[]? entries)
|
||||
{
|
||||
builder.AppendLine(" Directory Entries Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (entries == null || entries.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No directory entries");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < entries.Length; i++)
|
||||
{
|
||||
var entry = entries[i];
|
||||
builder.AppendLine($" Directory Entry {i}");
|
||||
if (entry == null)
|
||||
{
|
||||
builder.AppendLine(" [NULL]");
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.AppendLine(entry.Offset, " Offset");
|
||||
builder.AppendLine(entry.DiskLength, " Disk length");
|
||||
builder.AppendLine(entry.Length, " Length");
|
||||
builder.AppendLine($" File type: {entry.Type} (0x{entry.Type:X})");
|
||||
builder.AppendLine(entry.Compression, " Compression");
|
||||
builder.AppendLine(entry.Padding, " Padding");
|
||||
builder.AppendLine(entry.Name, " Name");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
private static void Print(StringBuilder builder, FileEntry?[]? entries)
|
||||
{
|
||||
builder.AppendLine(" File Entries Information:");
|
||||
builder.AppendLine(" -------------------------");
|
||||
if (entries == null || entries.Length == 0)
|
||||
{
|
||||
builder.AppendLine(" No file entries");
|
||||
builder.AppendLine();
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < entries.Length; i++)
|
||||
{
|
||||
var entry = entries[i];
|
||||
builder.AppendLine($" File Entry {i}");
|
||||
if (entry == null)
|
||||
{
|
||||
builder.AppendLine(" [NULL]");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry is MipTex mipTex)
|
||||
{
|
||||
builder.AppendLine(mipTex.Name, " Name");
|
||||
builder.AppendLine(mipTex.Width, " Width");
|
||||
builder.AppendLine(mipTex.Height, " Height");
|
||||
builder.AppendLine(mipTex.MipOffsets, " Mipmap Offsets");
|
||||
builder.AppendLine(" Mipmap Images skipped...");
|
||||
builder.AppendLine(mipTex.ColorsUsed, " Colors used");
|
||||
builder.AppendLine(" Palette skipped...");
|
||||
}
|
||||
else if (entry is QpicImage qpic)
|
||||
{
|
||||
builder.AppendLine(qpic.Width, " Width");
|
||||
builder.AppendLine(qpic.Height, " Height");
|
||||
builder.AppendLine(" Image data skipped...");
|
||||
builder.AppendLine(qpic.ColorsUsed, " Colors used");
|
||||
builder.AppendLine(" Palette skipped...");
|
||||
}
|
||||
else if (entry is Font font)
|
||||
{
|
||||
builder.AppendLine(font.Width, " Width");
|
||||
builder.AppendLine(font.Height, " Height");
|
||||
builder.AppendLine(font.RowCount, " Row count");
|
||||
builder.AppendLine(font.RowHeight, " Row height");
|
||||
builder.AppendLine(" Font info skipped...");
|
||||
builder.AppendLine(" Image data skipped...");
|
||||
builder.AppendLine(font.ColorsUsed, " Colors used");
|
||||
builder.AppendLine(" Palette skipped...");
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine(" Unrecognized entry type");
|
||||
}
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user