Compare commits

...

98 Commits
1.6.5 ... 1.7.5

Author SHA1 Message Date
Matt Nadareski
2776928946 Bump version 2024-11-15 22:38:35 -05:00
Matt Nadareski
8cc87c6540 Recombine WrapperBase files 2024-11-15 22:26:45 -05:00
Matt Nadareski
3c212022aa Use safe enumeration 2024-11-15 22:25:28 -05:00
Matt Nadareski
511c4d09e5 Update ASN1 to 1.4.1 and IO to 1.5.1 2024-11-15 22:22:22 -05:00
Matt Nadareski
d7eba27dc5 Framework only matters for executable 2024-11-15 21:10:27 -05:00
Matt Nadareski
09370618ca Reorder some methods 2024-11-14 20:51:25 -05:00
Matt Nadareski
2197167088 Add remaining easy sizes per partition 2024-11-14 20:48:42 -05:00
Matt Nadareski
b527635fe7 Add remaining easy offsets per partition 2024-11-14 20:46:56 -05:00
Matt Nadareski
695309bc32 Bump version 2024-11-14 13:32:19 -05:00
Matt Nadareski
97b2f68ec7 Use offsets instead of guessing... 2024-11-14 13:27:02 -05:00
Matt Nadareski
593044dbf3 Fix code binary check in N3DS 2024-11-14 12:49:16 -05:00
Matt Nadareski
1fcf44fb8d Bump version 2024-11-14 11:32:17 -05:00
Matt Nadareski
a2a472baf9 Fix top level printing issue 2024-11-14 11:24:08 -05:00
Matt Nadareski
b5b4a50d94 Fix deserialization of NCCH extended header
The actual fix to this is somewhere in the conversion code where an array of Enum values somehow just... fails? I'm not totally sure how that's happening but this is the easiest way around it until that auto stuff can be fixed.
2024-11-14 11:20:46 -05:00
Matt Nadareski
f1b5464052 Extend N3DS wrapper further 2024-11-14 03:17:11 -05:00
Matt Nadareski
2c0224db22 Fix byte order for N3DS IV 2024-11-14 00:12:35 -05:00
Matt Nadareski
1e78eecb40 Bump version 2024-11-13 23:13:55 -05:00
Matt Nadareski
3626faea60 Fix building N3DS cart image 2024-11-13 23:05:26 -05:00
Matt Nadareski
a0177f1174 Add bitmasks helper method 2024-11-13 21:29:43 -05:00
Matt Nadareski
db5fe4a2cd Add extension property for backup header 2024-11-13 21:25:46 -05:00
Matt Nadareski
5716143168 Bump version 2024-11-13 20:48:55 -05:00
Matt Nadareski
2a59b23149 Add more extensions to N3DS wrapper 2024-11-13 20:47:25 -05:00
Matt Nadareski
bdbec4ed02 Update Models to 1.5.1 2024-11-13 20:41:13 -05:00
Matt Nadareski
25193f1805 Start making fixes to N3DS 2024-11-13 20:21:32 -05:00
Matt Nadareski
4840c816a2 Bump version 2024-11-13 02:51:46 -05:00
Matt Nadareski
d0a8e3770b Fix serialization issue 2024-11-13 02:48:38 -05:00
Matt Nadareski
1cf3d50864 Add .NET 9 to target frameworks 2024-11-13 02:42:14 -05:00
Matt Nadareski
d1b98f7d6d HashSet does what I need 2024-11-12 19:41:04 -05:00
Matt Nadareski
4bc87ff812 Use list sorting instead of Linq sorting 2024-11-12 19:35:08 -05:00
Matt Nadareski
e1df11b360 Build cached strings differently 2024-11-12 19:30:28 -05:00
Matt Nadareski
34606a4f04 Easier to read archive count in VPK 2024-11-12 17:05:10 -05:00
Matt Nadareski
c4c5fc4bf6 Array.Find works differently than I thought 2024-11-12 16:58:21 -05:00
Matt Nadareski
cd87ce5373 Unrolling Linq is more efficient 2024-11-12 16:44:41 -05:00
Matt Nadareski
90fc16b888 Reduce more Linq steps 2024-11-12 16:29:47 -05:00
Matt Nadareski
c2d0b71d22 Generic types all have ToString 2024-11-12 16:18:56 -05:00
Matt Nadareski
e54473682c Use TrueForAll 2024-11-12 16:11:15 -05:00
Matt Nadareski
1c8d64d98c Clean up usings 2024-11-12 16:07:31 -05:00
Matt Nadareski
a19437f42f More efficient checks 2024-11-12 16:07:27 -05:00
Matt Nadareski
855e2f2c77 Another overload I never knew about 2024-11-12 16:02:21 -05:00
Matt Nadareski
bd3cf88123 As I said, they're fun 2024-11-12 16:00:05 -05:00
Matt Nadareski
e4578ad3fc Match collections are fun 2024-11-12 15:56:31 -05:00
Matt Nadareski
39e56ef864 Remove one more unnecessary cast 2024-11-12 15:47:49 -05:00
Matt Nadareski
51b77da760 Reduce use of Cast 2024-11-12 15:46:36 -05:00
Matt Nadareski
4b83219a9b Fix conversion 2024-11-12 15:33:48 -05:00
Matt Nadareski
3ed07dd299 Linq is good, but it can be better 2024-11-12 15:30:33 -05:00
Matt Nadareski
bb7daed7f6 Reduce Linq steps 2024-11-12 15:06:28 -05:00
Matt Nadareski
0c84c47752 Explicitly add Linq library 2024-11-12 15:03:28 -05:00
Matt Nadareski
c18a185474 Fix old .NET 2024-11-12 14:56:53 -05:00
Matt Nadareski
8ff66b04d8 Reduce Linq to better query 2024-11-12 14:49:42 -05:00
Matt Nadareski
94d6556e04 Ignore additional elements 2024-11-12 14:40:43 -05:00
Matt Nadareski
6d960265e4 Minor Linq reduction 2024-11-12 14:27:34 -05:00
Matt Nadareski
cf4ca76e10 Select 2024-11-12 14:10:00 -05:00
Matt Nadareski
c7760e9903 Any 2024-11-12 13:10:47 -05:00
Matt Nadareski
d51bedceb6 ToArray 2024-11-12 13:03:06 -05:00
Matt Nadareski
125dc021d5 Disallow CRC-32 variants 2024-11-12 12:37:42 -05:00
Matt Nadareski
5bce481648 Update Hashing to 1.3.0 2024-11-12 12:36:46 -05:00
Matt Nadareski
20153f62cf Update Hashing to 1.2.3 2024-11-06 21:56:44 -05:00
Matt Nadareski
e302dfccf1 Attempt to reduce nesting in GHA builds 2024-11-05 13:53:34 -05:00
Matt Nadareski
594b841490 Make GitHub action Debug-only 2024-11-04 15:09:10 -05:00
Matt Nadareski
40c354f79f Add releases links for convenience 2024-11-04 13:17:55 -05:00
Matt Nadareski
b77959f300 Rename test executable 2024-11-04 12:14:28 -05:00
Matt Nadareski
59d6026a2b Create helper method for string reading; add UTF-8 2024-11-02 20:23:20 -04:00
Matt Nadareski
14226d1270 Completely reset cached data 2024-11-02 19:53:21 -04:00
Matt Nadareski
955f4da708 Fix minor formatting issue 2024-11-02 19:42:32 -04:00
Matt Nadareski
700b0359ea Limit fully repeating strings 2024-11-02 19:40:02 -04:00
Matt Nadareski
fe95b894d7 Bump version 2024-10-31 15:23:59 -04:00
Matt Nadareski
38a2712a8f Fake readable compressor names 2024-10-31 13:51:29 -04:00
Matt Nadareski
d1ea091574 Remove "press enter" on failure 2024-10-31 13:49:08 -04:00
Matt Nadareski
6bc812fc2f Fix formatting for CHD printing 2024-10-31 13:38:42 -04:00
Matt Nadareski
61b89fbd72 Fix typo in N3DS 2024-10-31 12:10:53 -04:00
Matt Nadareski
a2c065bdf2 Add CHD to factory 2024-10-31 12:09:36 -04:00
Matt Nadareski
88479f674b Add CHD printer 2024-10-31 12:06:25 -04:00
Matt Nadareski
5edbacde74 Add CHD printer 2024-10-31 12:03:34 -04:00
Matt Nadareski
67fc51224b Fix lack of ValueTuple in switch 2024-10-31 11:51:14 -04:00
Matt Nadareski
101f3294b4 Add CHD wrapper 2024-10-31 11:47:58 -04:00
Matt Nadareski
6c5622f732 Add CHD header deserialization 2024-10-31 11:40:50 -04:00
Matt Nadareski
f2a6fe1445 Update Models to 1.4.11 2024-10-31 11:34:45 -04:00
Matt Nadareski
b0b593443f Update packages 2024-10-24 17:27:55 -04:00
Matt Nadareski
9b05185add Fix old .NET compatibility 2024-10-14 00:20:02 -04:00
Matt Nadareski
17316da536 Port numerous extensions from NDecrypt 2024-10-14 00:15:14 -04:00
Matt Nadareski
f3ca4dd989 Port logic from UnshieldSharp 2024-10-03 11:14:41 -04:00
Matt Nadareski
e2b7bdac8c Temporary fix for IS-CAB file group parsing 2024-10-03 02:51:37 -04:00
Matt Nadareski
f86f6dc438 Bump version 2024-10-01 14:08:18 -04:00
Matt Nadareski
2bac0ed505 Update packages 2024-10-01 14:06:53 -04:00
Matt Nadareski
ae4078bb7f Fix inconsistencies in build and publish 2024-08-08 20:17:42 -04:00
Matt Nadareski
afaffbd9a2 Fix 3DS serialization and printing 2024-08-08 19:46:05 -04:00
TheRogueArchivist
b878e59e2e Fix typo in PortableExecutable Printer (#11) 2024-07-12 11:08:50 -04:00
Matt Nadareski
4bb3f625dd Make PE debug table parsing safer 2024-06-20 11:23:28 -04:00
Matt Nadareski
b7978cafa5 Bump version 2024-06-13 11:12:40 -04:00
Matt Nadareski
17f376c76f Remove all instances of this. 2024-06-05 22:49:27 -04:00
Matt Nadareski
2774fdf158 Clean up enumerables and namespace use 2024-06-05 22:48:42 -04:00
Matt Nadareski
11081efcb0 Make PE header reading even saferer 2024-06-05 22:22:22 -04:00
TheRogueArchivist
1b412c3027 Add header length safeguards to PortableExecutable wrapper (#9) 2024-06-05 22:19:35 -04:00
Matt Nadareski
73ec66e627 Fix ISv3 deserialization 2024-06-03 11:55:12 -04:00
Matt Nadareski
4ae4cd80b1 Bump version 2024-05-30 21:07:04 -04:00
Matt Nadareski
6eb27c66fc Merge pull request #8 from TheRogueArchivist/streamdatalock
Add lock for reading data from stream
2024-05-30 12:30:33 -04:00
TheRogueArchivist
f96fd17fd3 Add lock for reading data from stream 2024-05-27 15:36:04 -06:00
Matt Nadareski
c255a2494d Fix IS-CAB file group name parsing 2024-05-18 21:27:09 -04:00
107 changed files with 4012 additions and 3389 deletions

View File

@@ -16,10 +16,16 @@ jobs:
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
dotnet-version: 9.0.x
- name: Restore dependencies
run: dotnet restore
- name: Build library
run: dotnet build
- name: Run tests
run: dotnet test
- name: Pack
run: dotnet pack

View File

@@ -1,4 +1,4 @@
name: Build Test
name: Build InfoPrint
on:
push:
@@ -10,10 +10,10 @@ jobs:
strategy:
matrix:
project: [Test]
runtime: [win-x86, win-x64, linux-x64, osx-x64] #[win-x86, win-x64, win-arm64, linux-x64, linux-arm64, osx-x64]
framework: [net8.0] #[net20, net35, net40, net452, net472, net48, netcoreapp3.1, net5.0, net6.0, net7.0, net8.0]
conf: [Release, Debug]
project: [InfoPrint]
runtime: [win-x86, win-x64, win-arm64, linux-x64, linux-arm64, osx-x64]
framework: [net9.0] #[net20, net35, net40, net452, net472, net48, netcoreapp3.1, net5.0, net6.0, net7.0, net8.0, net9.0]
conf: [Debug] #[Release, Debug]
steps:
- uses: actions/checkout@v4
@@ -23,16 +23,18 @@ jobs:
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
dotnet-version: 9.0.x
- name: Restore dependencies
run: dotnet restore
- name: Build
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8')) && '-p:PublishSingleFile=true' || ''}}
run: dotnet publish ${{ matrix.project }}/${{ matrix.project }}.csproj -f ${{ matrix.framework }} -r ${{ matrix.runtime }} -c ${{ matrix.conf == 'Release' && 'Release -p:DebugType=None -p:DebugSymbols=false' || 'Debug'}} --self-contained true --version-suffix ${{ github.sha }} ${{ (startsWith(matrix.framework, 'net5') || startsWith(matrix.framework, 'net6') || startsWith(matrix.framework, 'net7') || startsWith(matrix.framework, 'net8') || startsWith(matrix.framework, 'net9')) && '-p:PublishSingleFile=true' || ''}}
- name: Archive build
run: zip -r ${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ${{ matrix.project }}/bin/${{ matrix.conf }}/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
run: |
cd ${{ matrix.project }}/bin/Debug/${{ matrix.framework }}/${{ matrix.runtime }}/publish/
zip -r ${{ github.workspace }}/${{ matrix.project }}_${{ matrix.framework }}_${{ matrix.runtime }}_${{ matrix.conf }}.zip ./
- name: Upload build
uses: actions/upload-artifact@v4

View File

@@ -11,7 +11,10 @@ jobs:
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
dotnet-version: 9.0.x
- name: Build
run: dotnet build
run: dotnet build
- name: Run tests
run: dotnet test

328
.gitignore vendored
View File

@@ -1,15 +1,7 @@
*.swp
*.*~
project.lock.json
.DS_Store
*.pyc
nupkg/
# Visual Studio Code
.vscode
# Rider
.idea
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.suo
@@ -17,6 +9,9 @@ nupkg/
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
@@ -24,15 +19,312 @@ nupkg/
[Rr]eleases/
x64/
x86/
build/
bld/
[Bb]in/
[Oo]bj/
[Oo]ut/
msbuild.log
msbuild.err
msbuild.wrn
[Ll]og/
# Visual Studio 2015
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
**/Properties/launchSettings.json
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# JetBrains Rider
.idea/
*.sln.iml
# CodeRush
.cr/
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/

28
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,28 @@
{
// Use IntelliSense to find out which attributes exist for C# debugging
// Use hover for the description of the existing attributes
// For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md
"version": "0.2.0",
"configurations": [
{
"name": ".NET Core Launch (InfoPrint)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
// If you have changed target frameworks, make sure to update the program path.
"program": "${workspaceFolder}/InfoPrint/bin/Debug/net9.0/InfoPrint.dll",
"args": [],
"cwd": "${workspaceFolder}",
// For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console
"console": "internalConsole",
"stopAtEntry": false,
"justMyCode": false
},
{
"name": ".NET Core Attach",
"type": "coreclr",
"request": "attach",
"processId": "${command:pickProcess}"
}
]
}

24
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,24 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build",
"command": "dotnet",
"type": "shell",
"args": [
"build",
// Ask dotnet build to generate full paths for file names.
"/property:GenerateFullPaths=true",
// Do not generate summary otherwise it leads to duplicate errors in Problems panel
"/consoleloggerparameters:NoSummary"
],
"group": "build",
"presentation": {
"reveal": "silent"
},
"problemMatcher": "$msCompile"
}
]
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
<OutputType>Exe</OutputType>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.7.4</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`)) OR $(TargetFramework.StartsWith(`net9`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.Serialization\SabreTools.Serialization.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,7 +1,7 @@
using System;
using System.Collections.Generic;
namespace Test
namespace InfoPrint
{
/// <summary>
/// Set of options for the test executable
@@ -104,15 +104,15 @@ namespace Test
/// </summary>
public static void DisplayHelp()
{
Console.WriteLine("SabreTools.Serialization Test Program");
Console.WriteLine("Information Printing Program");
Console.WriteLine();
Console.WriteLine("test.exe <options> file|directory ...");
Console.WriteLine("infoprint.exe <options> file|directory ...");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine("-?, -h, --help Display this help text and quit");
Console.WriteLine("-d, --debug Enable debug mode");
#if NET6_0_OR_GREATER
Console.WriteLine("-j, --json Print executable info as JSON");
Console.WriteLine("-j, --json Print info as JSON");
#endif
}
}

View File

@@ -4,7 +4,7 @@ using SabreTools.IO.Extensions;
using SabreTools.Serialization;
using SabreTools.Serialization.Wrappers;
namespace Test
namespace InfoPrint
{
public static class Program
{
@@ -17,8 +17,6 @@ namespace Test
if (options == null)
{
Options.DisplayHelp();
Console.WriteLine("Press enter to close the program...");
Console.ReadLine();
return;
}
@@ -50,11 +48,7 @@ namespace Test
}
else if (Directory.Exists(path))
{
#if NET20 || NET35
foreach (string file in Directory.GetFiles(path, "*", SearchOption.AllDirectories))
#else
foreach (string file in Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories))
#endif
foreach (string file in IOExtensions.SafeEnumerateFiles(path, "*", SearchOption.AllDirectories))
{
PrintFileInfo(file, json, debug);
}

View File

@@ -4,6 +4,12 @@ This library comprises of serializers that both read and write from files and st
Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTools.Serialization).
## Releases
For the most recent stable build, download the latest release here: [Releases Page](https://github.com/SabreTools/SabreTools.Serialization/releases)
For the latest WIP build here: [Rolling Release](https://github.com/SabreTools/SabreTools.Serialization/releases/tag/rolling)
## Interfaces
Below is a table representing the various conversion interfaces that are implemented within this library.

View File

@@ -31,13 +31,9 @@ namespace SabreTools.Serialization.Test
Assert.Equal(count, dat.File.Length);
// Validate we're not missing any attributes or elements
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
Assert.Null(dat.ADDITIONAL_ELEMENTS);
foreach (var file in dat.File)
{
Assert.NotNull(file);
Assert.Null(file.ADDITIONAL_ATTRIBUTES);
Assert.Null(file.ADDITIONAL_ELEMENTS);
}
}
@@ -59,7 +55,6 @@ namespace SabreTools.Serialization.Test
foreach (var file in dat.Row)
{
Assert.NotNull(file);
Assert.Null(file.ADDITIONAL_ELEMENTS);
}
}
@@ -76,103 +71,12 @@ namespace SabreTools.Serialization.Test
// Validate the values
if (expectHeader)
{
Assert.NotNull(dat?.ClrMamePro);
Assert.Null(dat.ClrMamePro.ADDITIONAL_ELEMENTS);
}
else
{
Assert.Null(dat?.ClrMamePro);
}
Assert.NotNull(dat?.Game);
Assert.Equal(count, dat.Game.Length);
// Validate we're not missing any attributes or elements
Assert.NotNull(dat?.ADDITIONAL_ELEMENTS);
Assert.Empty(dat.ADDITIONAL_ELEMENTS);
foreach (var game in dat.Game)
{
Assert.NotNull(game?.ADDITIONAL_ELEMENTS);
Assert.Empty(game.ADDITIONAL_ELEMENTS);
foreach (var release in game.Release ?? Array.Empty<Models.ClrMamePro.Release>())
{
Assert.NotNull(release?.ADDITIONAL_ELEMENTS);
Assert.Empty(release.ADDITIONAL_ELEMENTS);
}
foreach (var biosset in game.BiosSet ?? Array.Empty<Models.ClrMamePro.BiosSet>())
{
Assert.NotNull(biosset?.ADDITIONAL_ELEMENTS);
Assert.Empty(biosset.ADDITIONAL_ELEMENTS);
}
foreach (var rom in game.Rom ?? Array.Empty<Models.ClrMamePro.Rom>())
{
Assert.NotNull(rom?.ADDITIONAL_ELEMENTS);
Assert.Empty(rom.ADDITIONAL_ELEMENTS);
}
foreach (var disk in game.Disk ?? Array.Empty<Models.ClrMamePro.Disk>())
{
Assert.NotNull(disk?.ADDITIONAL_ELEMENTS);
Assert.Empty(disk.ADDITIONAL_ELEMENTS);
}
foreach (var media in game.Media ?? Array.Empty<Models.ClrMamePro.Media>())
{
Assert.NotNull(media?.ADDITIONAL_ELEMENTS);
Assert.Empty(media.ADDITIONAL_ELEMENTS);
}
foreach (var sample in game.Sample ?? Array.Empty<Models.ClrMamePro.Sample>())
{
Assert.NotNull(sample?.ADDITIONAL_ELEMENTS);
Assert.Empty(sample.ADDITIONAL_ELEMENTS);
}
foreach (var archive in game.Archive ?? Array.Empty<Models.ClrMamePro.Archive>())
{
Assert.NotNull(archive?.ADDITIONAL_ELEMENTS);
Assert.Empty(archive.ADDITIONAL_ELEMENTS);
}
foreach (var chip in game.Chip ?? Array.Empty<Models.ClrMamePro.Chip>())
{
Assert.NotNull(chip?.ADDITIONAL_ELEMENTS);
Assert.Empty(chip.ADDITIONAL_ELEMENTS);
}
foreach (var video in game.Video ?? Array.Empty<Models.ClrMamePro.Video>())
{
Assert.NotNull(video?.ADDITIONAL_ELEMENTS);
Assert.Empty(video.ADDITIONAL_ELEMENTS);
}
if (game.Sound != null)
{
Assert.NotNull(game.Sound?.ADDITIONAL_ELEMENTS);
Assert.Empty(game.Sound.ADDITIONAL_ELEMENTS);
}
if (game.Input != null)
{
Assert.NotNull(game.Input?.ADDITIONAL_ELEMENTS);
Assert.Empty(game.Input.ADDITIONAL_ELEMENTS);
}
foreach (var dipswitch in game.DipSwitch ?? Array.Empty<Models.ClrMamePro.DipSwitch>())
{
Assert.NotNull(dipswitch?.ADDITIONAL_ELEMENTS);
Assert.Empty(dipswitch.ADDITIONAL_ELEMENTS);
}
if (game.Driver != null)
{
Assert.NotNull(game.Driver?.ADDITIONAL_ELEMENTS);
Assert.Empty(game.Driver.ADDITIONAL_ELEMENTS);
}
}
}
[Theory]
@@ -192,23 +96,9 @@ namespace SabreTools.Serialization.Test
Assert.NotNull(dat?.Game);
Assert.Equal(count, dat.Game.Length);
// Validate we're not missing any attributes or elements
Assert.NotNull(dat?.ADDITIONAL_ELEMENTS);
Assert.Empty(dat.ADDITIONAL_ELEMENTS);
Assert.NotNull(dat.DosCenter?.ADDITIONAL_ELEMENTS);
Assert.Empty(dat.DosCenter.ADDITIONAL_ELEMENTS);
foreach (var game in dat.Game)
{
Assert.NotNull(game?.ADDITIONAL_ELEMENTS);
Assert.Empty(game.ADDITIONAL_ELEMENTS);
Assert.NotNull(game.File);
foreach (var file in game.File)
{
Assert.NotNull(file?.ADDITIONAL_ELEMENTS);
Assert.Empty(file.ADDITIONAL_ELEMENTS);
}
}
}
@@ -225,20 +115,10 @@ namespace SabreTools.Serialization.Test
// Validate the values
Assert.NotNull(dat?.Row);
Assert.Equal(count, dat.Row.Length);
// Validate we're not missing any attributes or elements
foreach (var file in dat.Row)
{
Assert.Null(file.ADDITIONAL_ELEMENTS);
}
}
[Theory]
[InlineData("test-sfv-files.sfv", HashType.CRC32, 100)]
[InlineData("test-sfv-files.sfv", HashType.CRC32_ISO, 100)]
[InlineData("test-sfv-files.sfv", HashType.CRC32_Naive, 100)]
[InlineData("test-sfv-files.sfv", HashType.CRC32_Optimized, 100)]
[InlineData("test-sfv-files.sfv", HashType.CRC32_Parallel, 100)]
[InlineData("test-md5-files.md5", HashType.MD5, 100)]
[InlineData("test-sha1-files.sha1", HashType.SHA1, 100)]
[InlineData("test-sha256.sha256", HashType.SHA256, 1)]
@@ -259,10 +139,6 @@ namespace SabreTools.Serialization.Test
switch (hash)
{
case HashType.CRC32:
case HashType.CRC32_ISO:
case HashType.CRC32_Naive:
case HashType.CRC32_Optimized:
case HashType.CRC32_Parallel:
Assert.NotNull(dat.SFV);
Assert.Equal(count, dat.SFV.Length);
break;
@@ -308,10 +184,6 @@ namespace SabreTools.Serialization.Test
// Validate the values
Assert.NotNull(dat?.Set);
Assert.Equal(count, dat.Set.Length);
// Validate we're not missing any attributes or elements
Assert.NotNull(dat.ADDITIONAL_ELEMENTS);
Assert.Empty(dat.ADDITIONAL_ELEMENTS);
}
[Theory]
@@ -328,219 +200,6 @@ namespace SabreTools.Serialization.Test
// Validate the values
Assert.NotNull(dat?.Game);
Assert.Equal(count, dat.Game.Length);
// Validate we're not missing any attributes or elements
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
Assert.Null(dat.ADDITIONAL_ELEMENTS);
foreach (var game in dat.Game)
{
Assert.Null(game.ADDITIONAL_ATTRIBUTES);
Assert.Null(game.ADDITIONAL_ELEMENTS);
foreach (var biosset in game.BiosSet ?? Array.Empty<Models.Listxml.BiosSet>())
{
Assert.Null(biosset.ADDITIONAL_ATTRIBUTES);
Assert.Null(biosset.ADDITIONAL_ELEMENTS);
}
foreach (var rom in game.Rom ?? Array.Empty<Models.Listxml.Rom>())
{
Assert.Null(rom.ADDITIONAL_ATTRIBUTES);
Assert.Null(rom.ADDITIONAL_ELEMENTS);
}
foreach (var disk in game.Disk ?? Array.Empty<Models.Listxml.Disk>())
{
Assert.Null(disk.ADDITIONAL_ATTRIBUTES);
Assert.Null(disk.ADDITIONAL_ELEMENTS);
}
foreach (var deviceRef in game.DeviceRef ?? Array.Empty<Models.Listxml.DeviceRef>())
{
Assert.Null(deviceRef.ADDITIONAL_ATTRIBUTES);
Assert.Null(deviceRef.ADDITIONAL_ELEMENTS);
}
foreach (var sample in game.Sample ?? Array.Empty<Models.Listxml.Sample>())
{
Assert.Null(sample.ADDITIONAL_ATTRIBUTES);
Assert.Null(sample.ADDITIONAL_ELEMENTS);
}
foreach (var chip in game.Chip ?? Array.Empty<Models.Listxml.Chip>())
{
Assert.Null(chip.ADDITIONAL_ATTRIBUTES);
Assert.Null(chip.ADDITIONAL_ELEMENTS);
}
foreach (var display in game.Display ?? Array.Empty<Models.Listxml.Display>())
{
Assert.Null(display.ADDITIONAL_ATTRIBUTES);
Assert.Null(display.ADDITIONAL_ELEMENTS);
}
foreach (var video in game.Video ?? Array.Empty<Models.Listxml.Video>())
{
Assert.Null(video.ADDITIONAL_ATTRIBUTES);
Assert.Null(video.ADDITIONAL_ELEMENTS);
}
if (game.Sound != null)
{
Assert.Null(game.Sound.ADDITIONAL_ATTRIBUTES);
Assert.Null(game.Sound.ADDITIONAL_ELEMENTS);
}
if (game.Input != null)
{
Assert.Null(game.Input.ADDITIONAL_ATTRIBUTES);
Assert.Null(game.Input.ADDITIONAL_ELEMENTS);
foreach (var control in game.Input.Control ?? Array.Empty<Models.Listxml.Control>())
{
Assert.Null(control.ADDITIONAL_ATTRIBUTES);
Assert.Null(control.ADDITIONAL_ELEMENTS);
}
}
foreach (var dipswitch in game.DipSwitch ?? Array.Empty<Models.Listxml.DipSwitch>())
{
Assert.Null(dipswitch.ADDITIONAL_ATTRIBUTES);
Assert.Null(dipswitch.ADDITIONAL_ELEMENTS);
if (dipswitch.Condition != null)
{
Assert.Null(dipswitch.Condition.ADDITIONAL_ATTRIBUTES);
Assert.Null(dipswitch.Condition.ADDITIONAL_ELEMENTS);
}
foreach (var diplocation in dipswitch.DipLocation ?? Array.Empty<Models.Listxml.DipLocation>())
{
Assert.Null(diplocation.ADDITIONAL_ATTRIBUTES);
Assert.Null(diplocation.ADDITIONAL_ELEMENTS);
}
foreach (var dipvalue in dipswitch.DipValue ?? Array.Empty<Models.Listxml.DipValue>())
{
Assert.Null(dipvalue.ADDITIONAL_ATTRIBUTES);
Assert.Null(dipvalue.ADDITIONAL_ELEMENTS);
if (dipvalue.Condition != null)
{
Assert.Null(dipvalue.Condition.ADDITIONAL_ATTRIBUTES);
Assert.Null(dipvalue.Condition.ADDITIONAL_ELEMENTS);
}
}
}
foreach (var configuration in game.Configuration ?? Array.Empty<Models.Listxml.Configuration>())
{
Assert.Null(configuration.ADDITIONAL_ATTRIBUTES);
Assert.Null(configuration.ADDITIONAL_ELEMENTS);
if (configuration.Condition != null)
{
Assert.Null(configuration.Condition.ADDITIONAL_ATTRIBUTES);
Assert.Null(configuration.Condition.ADDITIONAL_ELEMENTS);
}
foreach (var conflocation in configuration.ConfLocation ?? Array.Empty<Models.Listxml.ConfLocation>())
{
Assert.Null(conflocation.ADDITIONAL_ATTRIBUTES);
Assert.Null(conflocation.ADDITIONAL_ELEMENTS);
}
foreach (var confsetting in configuration.ConfSetting ?? Array.Empty<Models.Listxml.ConfSetting>())
{
Assert.Null(confsetting.ADDITIONAL_ATTRIBUTES);
Assert.Null(confsetting.ADDITIONAL_ELEMENTS);
if (confsetting.Condition != null)
{
Assert.Null(confsetting.Condition.ADDITIONAL_ATTRIBUTES);
Assert.Null(confsetting.Condition.ADDITIONAL_ELEMENTS);
}
}
}
foreach (var port in game.Port ?? Array.Empty<Models.Listxml.Port>())
{
Assert.Null(port.ADDITIONAL_ATTRIBUTES);
Assert.Null(port.ADDITIONAL_ELEMENTS);
foreach (var analog in port.Analog ?? Array.Empty<Models.Listxml.Analog>())
{
Assert.Null(analog.ADDITIONAL_ATTRIBUTES);
Assert.Null(analog.ADDITIONAL_ELEMENTS);
}
}
foreach (var adjuster in game.Adjuster ?? Array.Empty<Models.Listxml.Adjuster>())
{
Assert.Null(adjuster.ADDITIONAL_ATTRIBUTES);
Assert.Null(adjuster.ADDITIONAL_ELEMENTS);
if (adjuster.Condition != null)
{
Assert.Null(adjuster.Condition.ADDITIONAL_ATTRIBUTES);
Assert.Null(adjuster.Condition.ADDITIONAL_ELEMENTS);
}
}
if (game.Driver != null)
{
Assert.Null(game.Driver.ADDITIONAL_ATTRIBUTES);
Assert.Null(game.Driver.ADDITIONAL_ELEMENTS);
}
foreach (var feature in game.Feature ?? Array.Empty<Models.Listxml.Feature>())
{
Assert.Null(feature.ADDITIONAL_ATTRIBUTES);
Assert.Null(feature.ADDITIONAL_ELEMENTS);
}
foreach (var device in game.Device ?? Array.Empty<Models.Listxml.Device>())
{
Assert.Null(device.ADDITIONAL_ATTRIBUTES);
Assert.Null(device.ADDITIONAL_ELEMENTS);
if (device.Instance != null)
{
Assert.Null(device.Instance.ADDITIONAL_ATTRIBUTES);
Assert.Null(device.Instance.ADDITIONAL_ELEMENTS);
}
foreach (var extension in device.Extension ?? Array.Empty<Models.Listxml.Extension>())
{
Assert.Null(extension.ADDITIONAL_ATTRIBUTES);
Assert.Null(extension.ADDITIONAL_ELEMENTS);
}
}
foreach (var slot in game.Slot ?? Array.Empty<Models.Listxml.Slot>())
{
Assert.Null(slot.ADDITIONAL_ATTRIBUTES);
Assert.Null(slot.ADDITIONAL_ELEMENTS);
foreach (var slotoption in slot.SlotOption ?? Array.Empty<Models.Listxml.SlotOption>())
{
Assert.Null(slotoption.ADDITIONAL_ATTRIBUTES);
Assert.Null(slotoption.ADDITIONAL_ELEMENTS);
}
}
foreach (var softwarelist in game.SoftwareList ?? Array.Empty<Models.Listxml.SoftwareList>())
{
Assert.Null(softwarelist.ADDITIONAL_ATTRIBUTES);
Assert.Null(softwarelist.ADDITIONAL_ELEMENTS);
}
foreach (var ramoption in game.RamOption ?? Array.Empty<Models.Listxml.RamOption>())
{
Assert.Null(ramoption.ADDITIONAL_ATTRIBUTES);
Assert.Null(ramoption.ADDITIONAL_ELEMENTS);
}
}
}
[Theory]
@@ -557,180 +216,6 @@ namespace SabreTools.Serialization.Test
// Validate the values
Assert.NotNull(dat?.Game);
Assert.Equal(count, dat.Game.Length);
// Validate we're not missing any attributes or elements
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
Assert.Null(dat.ADDITIONAL_ELEMENTS);
if (dat.Header != null)
{
var header = dat.Header;
Assert.Null(header.ADDITIONAL_ATTRIBUTES);
Assert.Null(header.ADDITIONAL_ELEMENTS);
if (header.ClrMamePro != null)
{
var cmp = header.ClrMamePro;
Assert.Null(cmp.ADDITIONAL_ATTRIBUTES);
Assert.Null(cmp.ADDITIONAL_ELEMENTS);
}
if (header.RomCenter != null)
{
var rc = header.RomCenter;
Assert.Null(rc.ADDITIONAL_ATTRIBUTES);
Assert.Null(rc.ADDITIONAL_ELEMENTS);
}
}
foreach (var game in dat.Game)
{
Assert.Null(game.ADDITIONAL_ATTRIBUTES);
Assert.Null(game.ADDITIONAL_ELEMENTS);
foreach (var item in game.Release ?? Array.Empty<Models.Logiqx.Release>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.BiosSet ?? Array.Empty<Models.Logiqx.BiosSet>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.Rom ?? Array.Empty<Models.Logiqx.Rom>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.Disk ?? Array.Empty<Models.Logiqx.Disk>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.Media ?? Array.Empty<Models.Logiqx.Media>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.DeviceRef ?? Array.Empty<Models.Logiqx.DeviceRef>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.Sample ?? Array.Empty<Models.Logiqx.Sample>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.Archive ?? Array.Empty<Models.Logiqx.Archive>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
if (game.Driver != null)
{
Assert.Null(game.Driver.ADDITIONAL_ATTRIBUTES);
Assert.Null(game.Driver.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.SoftwareList ?? Array.Empty<Models.Logiqx.SoftwareList>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
if (game.Trurip != null)
{
var trurip = game.Trurip;
Assert.Null(trurip.ADDITIONAL_ATTRIBUTES);
Assert.Null(trurip.ADDITIONAL_ELEMENTS);
}
}
foreach (var dir in dat.Dir ?? Array.Empty<Models.Logiqx.Dir>())
{
Assert.NotNull(dir.Game);
foreach (var game in dir.Game)
{
Assert.Null(game.ADDITIONAL_ATTRIBUTES);
Assert.Null(game.ADDITIONAL_ELEMENTS);
foreach (var item in game.Release ?? Array.Empty<Models.Logiqx.Release>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.BiosSet ?? Array.Empty<Models.Logiqx.BiosSet>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.Rom ?? Array.Empty<Models.Logiqx.Rom>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.Disk ?? Array.Empty<Models.Logiqx.Disk>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.Media ?? Array.Empty<Models.Logiqx.Media>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.DeviceRef ?? Array.Empty<Models.Logiqx.DeviceRef>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.Sample ?? Array.Empty<Models.Logiqx.Sample>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.Archive ?? Array.Empty<Models.Logiqx.Archive>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
if (game.Driver != null)
{
Assert.Null(game.Driver.ADDITIONAL_ATTRIBUTES);
Assert.Null(game.Driver.ADDITIONAL_ELEMENTS);
}
foreach (var item in game.SoftwareList ?? Array.Empty<Models.Logiqx.SoftwareList>())
{
Assert.Null(item.ADDITIONAL_ATTRIBUTES);
Assert.Null(item.ADDITIONAL_ELEMENTS);
}
if (game.Trurip != null)
{
var trurip = game.Trurip;
Assert.Null(trurip.ADDITIONAL_ATTRIBUTES);
Assert.Null(trurip.ADDITIONAL_ELEMENTS);
}
}
}
}
[Theory]
@@ -746,196 +231,6 @@ namespace SabreTools.Serialization.Test
// Validate the values
Assert.NotNull(dat?.Games?.Game);
Assert.Equal(count, dat.Games.Game.Length);
// Validate we're not missing any attributes or elements
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
Assert.Null(dat.ADDITIONAL_ELEMENTS);
if (dat.Configuration != null)
{
var configuration = dat.Configuration;
Assert.Null(configuration.ADDITIONAL_ATTRIBUTES);
Assert.Null(configuration.ADDITIONAL_ELEMENTS);
if (configuration.Infos != null)
{
var infos = configuration.Infos;
Assert.Null(infos.ADDITIONAL_ATTRIBUTES);
Assert.Null(infos.ADDITIONAL_ELEMENTS);
if (infos.Title != null)
{
var title = infos.Title;
Assert.Null(title.ADDITIONAL_ATTRIBUTES);
Assert.Null(title.ADDITIONAL_ELEMENTS);
}
if (infos.Location != null)
{
var location = infos.Location;
Assert.Null(location.ADDITIONAL_ATTRIBUTES);
Assert.Null(location.ADDITIONAL_ELEMENTS);
}
if (infos.Publisher != null)
{
var publisher = infos.Publisher;
Assert.Null(publisher.ADDITIONAL_ATTRIBUTES);
Assert.Null(publisher.ADDITIONAL_ELEMENTS);
}
if (infos.SourceRom != null)
{
var sourceRom = infos.SourceRom;
Assert.Null(sourceRom.ADDITIONAL_ATTRIBUTES);
Assert.Null(sourceRom.ADDITIONAL_ELEMENTS);
}
if (infos.SaveType != null)
{
var saveType = infos.SaveType;
Assert.Null(saveType.ADDITIONAL_ATTRIBUTES);
Assert.Null(saveType.ADDITIONAL_ELEMENTS);
}
if (infos.RomSize != null)
{
var romSize = infos.RomSize;
Assert.Null(romSize.ADDITIONAL_ATTRIBUTES);
Assert.Null(romSize.ADDITIONAL_ELEMENTS);
}
if (infos.ReleaseNumber != null)
{
var releaseNumber = infos.ReleaseNumber;
Assert.Null(releaseNumber.ADDITIONAL_ATTRIBUTES);
Assert.Null(releaseNumber.ADDITIONAL_ELEMENTS);
}
if (infos.LanguageNumber != null)
{
var languageNumber = infos.LanguageNumber;
Assert.Null(languageNumber.ADDITIONAL_ATTRIBUTES);
Assert.Null(languageNumber.ADDITIONAL_ELEMENTS);
}
if (infos.Comment != null)
{
var comment = infos.Comment;
Assert.Null(comment.ADDITIONAL_ATTRIBUTES);
Assert.Null(comment.ADDITIONAL_ELEMENTS);
}
if (infos.RomCRC != null)
{
var romCRC = infos.RomCRC;
Assert.Null(romCRC.ADDITIONAL_ATTRIBUTES);
Assert.Null(romCRC.ADDITIONAL_ELEMENTS);
}
if (infos.Im1CRC != null)
{
var im1CRC = infos.Im1CRC;
Assert.Null(im1CRC.ADDITIONAL_ATTRIBUTES);
Assert.Null(im1CRC.ADDITIONAL_ELEMENTS);
}
if (infos.Im2CRC != null)
{
var im2CRC = infos.Im2CRC;
Assert.Null(im2CRC.ADDITIONAL_ATTRIBUTES);
Assert.Null(im2CRC.ADDITIONAL_ELEMENTS);
}
if (infos.Languages != null)
{
var languages = infos.Languages;
Assert.Null(languages.ADDITIONAL_ATTRIBUTES);
Assert.Null(languages.ADDITIONAL_ELEMENTS);
}
}
if (configuration.CanOpen != null)
{
var canOpen = configuration.CanOpen;
Assert.Null(canOpen.ADDITIONAL_ATTRIBUTES);
Assert.Null(canOpen.ADDITIONAL_ELEMENTS);
}
if (configuration.NewDat != null)
{
var newDat = configuration.NewDat;
Assert.Null(newDat.ADDITIONAL_ATTRIBUTES);
Assert.Null(newDat.ADDITIONAL_ELEMENTS);
if (newDat.DatUrl != null)
{
var datURL = newDat.DatUrl;
Assert.Null(datURL.ADDITIONAL_ATTRIBUTES);
Assert.Null(datURL.ADDITIONAL_ELEMENTS);
}
}
if (configuration.Search != null)
{
var search = configuration.Search;
Assert.Null(search.ADDITIONAL_ATTRIBUTES);
Assert.Null(search.ADDITIONAL_ELEMENTS);
foreach (var to in search.To ?? Array.Empty<Models.OfflineList.To>())
{
Assert.Null(to.ADDITIONAL_ATTRIBUTES);
Assert.Null(to.ADDITIONAL_ELEMENTS);
foreach (var find in to.Find ?? Array.Empty<Models.OfflineList.Find>())
{
Assert.Null(find.ADDITIONAL_ATTRIBUTES);
Assert.Null(find.ADDITIONAL_ELEMENTS);
}
}
}
}
Assert.Null(dat.Games.ADDITIONAL_ATTRIBUTES);
Assert.Null(dat.Games.ADDITIONAL_ELEMENTS);
foreach (var game in dat.Games.Game)
{
Assert.Null(game.ADDITIONAL_ATTRIBUTES);
//Assert.Null(game.ADDITIONAL_ELEMENTS); // TODO: Re-enable line when Models is fixed again
if (game.Files != null)
{
var files = game.Files;
Assert.Null(files.ADDITIONAL_ATTRIBUTES);
Assert.Null(files.ADDITIONAL_ELEMENTS);
foreach (var romCRC in files.RomCRC ?? Array.Empty<Models.OfflineList.FileRomCRC>())
{
Assert.Null(romCRC.ADDITIONAL_ATTRIBUTES);
Assert.Null(romCRC.ADDITIONAL_ELEMENTS);
}
}
}
if (dat.GUI != null)
{
var gui = dat.GUI;
Assert.Null(gui.ADDITIONAL_ATTRIBUTES);
Assert.Null(gui.ADDITIONAL_ELEMENTS);
if (gui.Images != null)
{
var images = gui.Images;
Assert.Null(images.ADDITIONAL_ATTRIBUTES);
Assert.Null(images.ADDITIONAL_ELEMENTS);
foreach (var image in images.Image ?? Array.Empty<Models.OfflineList.Image>())
{
Assert.Null(image.ADDITIONAL_ATTRIBUTES);
Assert.Null(image.ADDITIONAL_ELEMENTS);
}
}
}
}
[Theory]
@@ -952,33 +247,6 @@ namespace SabreTools.Serialization.Test
Assert.NotNull(dat);
Assert.NotNull(dat.Software);
Assert.Equal(count, dat.Software.Length);
// Validate we're not missing any attributes or elements
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
Assert.Null(dat.ADDITIONAL_ELEMENTS);
foreach (var software in dat.Software)
{
Assert.Null(software.ADDITIONAL_ATTRIBUTES);
Assert.Null(software.ADDITIONAL_ELEMENTS);
foreach (var dump in software.Dump ?? Array.Empty<Models.OpenMSX.Dump>())
{
Assert.Null(dump.ADDITIONAL_ATTRIBUTES);
Assert.Null(dump.ADDITIONAL_ELEMENTS);
if (dump.Original != null)
{
Assert.Null(dump.Original.ADDITIONAL_ATTRIBUTES);
Assert.Null(dump.Original.ADDITIONAL_ELEMENTS);
}
if (dump.Rom != null)
{
Assert.Null(dump.Rom.ADDITIONAL_ATTRIBUTES);
Assert.Null(dump.Rom.ADDITIONAL_ELEMENTS);
}
}
}
}
[Theory]
@@ -994,37 +262,6 @@ namespace SabreTools.Serialization.Test
// Validate the values
Assert.NotNull(dat?.Games?.Rom);
Assert.Equal(count, dat.Games.Rom.Length);
// Validate we're not missing any attributes or elements
Assert.NotNull(dat.ADDITIONAL_ELEMENTS);
Assert.Empty(dat.ADDITIONAL_ELEMENTS);
if (dat.Credits != null)
{
Assert.NotNull(dat.Credits.ADDITIONAL_ELEMENTS);
Assert.Empty(dat.Credits.ADDITIONAL_ELEMENTS);
}
if (dat.Dat != null)
{
Assert.NotNull(dat.Dat.ADDITIONAL_ELEMENTS);
Assert.Empty(dat.Dat.ADDITIONAL_ELEMENTS);
}
if (dat.Emulator != null)
{
Assert.NotNull(dat.Emulator.ADDITIONAL_ELEMENTS);
Assert.Empty(dat.Emulator.ADDITIONAL_ELEMENTS);
}
if (dat.Games != null)
{
Assert.NotNull(dat.Games.ADDITIONAL_ELEMENTS);
Assert.Empty(dat.Games.ADDITIONAL_ELEMENTS);
foreach (var rom in dat.Games.Rom ?? Array.Empty<Models.RomCenter.Rom>())
{
Assert.Null(rom.ADDITIONAL_ELEMENTS);
}
}
}
[Theory]
@@ -1045,12 +282,6 @@ namespace SabreTools.Serialization.Test
// Validate the values
Assert.NotNull(dat?.Row);
Assert.Equal(count, dat.Row.Length);
// Validate we're not missing any attributes or elements
foreach (var rom in dat.Row ?? Array.Empty<Models.SeparatedValue.Row>())
{
Assert.Null(rom.ADDITIONAL_ELEMENTS);
}
}
[Theory]
@@ -1069,77 +300,8 @@ namespace SabreTools.Serialization.Test
Assert.NotNull(dat);
Assert.NotNull(dat.Software);
Assert.Equal(count, dat.Software.Length);
// Validate we're not missing any attributes or elements
Assert.Null(dat.ADDITIONAL_ATTRIBUTES);
Assert.Null(dat.ADDITIONAL_ELEMENTS);
foreach (var software in dat.Software)
{
Assert.Null(software.ADDITIONAL_ATTRIBUTES);
Assert.Null(software.ADDITIONAL_ELEMENTS);
foreach (var info in software.Info ?? Array.Empty<Models.SoftwareList.Info>())
{
Assert.Null(info.ADDITIONAL_ATTRIBUTES);
Assert.Null(info.ADDITIONAL_ELEMENTS);
}
foreach (var sharedfeat in software.SharedFeat ?? Array.Empty<Models.SoftwareList.SharedFeat>())
{
Assert.Null(sharedfeat.ADDITIONAL_ATTRIBUTES);
Assert.Null(sharedfeat.ADDITIONAL_ELEMENTS);
}
foreach (var part in software.Part ?? Array.Empty<Models.SoftwareList.Part>())
{
Assert.Null(part.ADDITIONAL_ATTRIBUTES);
Assert.Null(part.ADDITIONAL_ELEMENTS);
foreach (var feature in part.Feature ?? Array.Empty<Models.SoftwareList.Feature>())
{
Assert.Null(feature.ADDITIONAL_ATTRIBUTES);
Assert.Null(feature.ADDITIONAL_ELEMENTS);
}
foreach (var dataarea in part.DataArea ?? Array.Empty<Models.SoftwareList.DataArea>())
{
Assert.Null(dataarea.ADDITIONAL_ATTRIBUTES);
Assert.Null(dataarea.ADDITIONAL_ELEMENTS);
foreach (var rom in dataarea.Rom ?? Array.Empty<Models.SoftwareList.Rom>())
{
Assert.Null(rom.ADDITIONAL_ATTRIBUTES);
Assert.Null(rom.ADDITIONAL_ELEMENTS);
}
}
foreach (var diskarea in part.DiskArea ?? Array.Empty<Models.SoftwareList.DiskArea>())
{
Assert.Null(diskarea.ADDITIONAL_ATTRIBUTES);
Assert.Null(diskarea.ADDITIONAL_ELEMENTS);
foreach (var disk in diskarea.Disk ?? Array.Empty<Models.SoftwareList.Disk>())
{
Assert.Null(disk.ADDITIONAL_ATTRIBUTES);
Assert.Null(disk.ADDITIONAL_ELEMENTS);
}
}
foreach (var dipswitch in part.DipSwitch ?? Array.Empty<Models.SoftwareList.DipSwitch>())
{
Assert.Null(dipswitch.ADDITIONAL_ATTRIBUTES);
Assert.Null(dipswitch.ADDITIONAL_ELEMENTS);
foreach (var dipvalue in dipswitch.DipValue ?? Array.Empty<Models.SoftwareList.DipValue>())
{
Assert.Null(dipvalue.ADDITIONAL_ATTRIBUTES);
Assert.Null(dipvalue.ADDITIONAL_ELEMENTS);
}
}
}
}
}
/// <summary>
/// Get the path to the test file
/// </summary>

View File

@@ -63,9 +63,9 @@ namespace SabreTools.Serialization.Test
var dump = new Models.OpenMSX.Dump[]
{
new Models.OpenMSX.Dump { Original = original, Rom = rom },
new Models.OpenMSX.Dump { Rom = megaRom },
new Models.OpenMSX.Dump { Rom = sccPlusCart },
new() { Original = original, Rom = rom },
new() { Rom = megaRom },
new() { Rom = sccPlusCart },
};
var software = new Models.OpenMSX.Software

View File

@@ -1,38 +1,38 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net6.0;net8.0</TargetFrameworks>
<IsPackable>false</IsPackable>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
</PropertyGroup>
<PropertyGroup>
<TargetFrameworks>net6.0;net8.0;net9.0</TargetFrameworks>
<IsPackable>false</IsPackable>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.Serialization\SabreTools.Serialization.csproj" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.Serialization\SabreTools.Serialization.csproj" />
</ItemGroup>
<ItemGroup>
<None Remove="TestData\*" />
</ItemGroup>
<ItemGroup>
<None Remove="TestData\*" />
</ItemGroup>
<ItemGroup>
<Content Include="TestData\*">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content>
</ItemGroup>
<ItemGroup>
<Content Include="TestData\*">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content>
</ItemGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.2">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.9.0" />
<PackageReference Include="SabreTools.Models" Version="1.4.8" />
<PackageReference Include="xunit" Version="2.8.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.0">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.2">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageReference Include="SabreTools.Models" Version="1.5.1" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
</Project>
</Project>

View File

@@ -5,7 +5,7 @@ VisualStudioVersion = 17.0.31903.59
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.Serialization", "SabreTools.Serialization\SabreTools.Serialization.csproj", "{5B688801-5F36-483E-B2E8-F219BA5923A2}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Test", "Test\Test.csproj", "{F3DEE31A-4726-464C-A90C-C19D78F51898}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InfoPrint", "InfoPrint\InfoPrint.csproj", "{F3DEE31A-4726-464C-A90C-C19D78F51898}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.Serialization.Test", "SabreTools.Serialization.Test\SabreTools.Serialization.Test.csproj", "{B8A04C5E-A14F-4842-9035-2F6871A1DA10}"
EndProject

View File

@@ -1,6 +1,5 @@
using System;
using System.Linq;
using SabreTools.Models.ArchiveDotOrg;
using System.Collections.Generic;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.CrossModel
@@ -16,14 +15,13 @@ namespace SabreTools.Serialization.CrossModel
var files = new Models.ArchiveDotOrg.Files();
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
var items = new List<Models.ArchiveDotOrg.File>();
foreach (var machine in machines ?? [])
{
files.File = machines
.Where(m => m != null)
.SelectMany(ConvertFromInternalModel)
.ToArray();
items.AddRange(ConvertFromInternalModel(machine));
}
files.File = [.. items];
return files;
}
@@ -36,9 +34,7 @@ namespace SabreTools.Serialization.CrossModel
if (roms == null)
return [];
return roms
.Where(r => r != null)
.Select(ConvertFromInternalModel).ToArray();
return Array.ConvertAll(roms, ConvertFromInternalModel);
}
/// <summary>

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.CrossModel
@@ -16,13 +16,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
};
if (item?.File != null && item.File.Any())
if (item?.File != null && item.File.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.File
.Where(f => f != null)
.Select(ConvertMachineToInternalModel)
.Where(m => m != null)
.ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(item.File, ConvertMachineToInternalModel);
}
return metadataFile;

View File

@@ -1,5 +1,5 @@
using System;
using System.Linq;
using System.Collections.Generic;
using SabreTools.Models.AttractMode;
using SabreTools.Serialization.Interfaces;
@@ -17,14 +17,13 @@ namespace SabreTools.Serialization.CrossModel
var metadataFile = header != null ? ConvertHeaderFromInternalModel(header) : new MetadataFile();
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
var items = new List<Row>();
foreach (var machine in machines ?? [])
{
metadataFile.Row = machines
.Where(m => m != null)
.SelectMany(ConvertMachineFromInternalModel)
.ToArray();
items.AddRange(ConvertMachineFromInternalModel(machine));
}
metadataFile.Row = [.. items];
return metadataFile;
}
@@ -46,13 +45,10 @@ namespace SabreTools.Serialization.CrossModel
private static Row[] ConvertMachineFromInternalModel(Models.Metadata.Machine item)
{
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
if (roms == null || !roms.Any())
if (roms == null || roms.Length == 0)
return [];
return roms
.Where(r => r != null)
.Select(rom => ConvertFromInternalModel(rom, item))
.ToArray();
return Array.ConvertAll(roms, r => ConvertFromInternalModel(r, item));
}
/// <summary>

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.AttractMode;
using SabreTools.Serialization.Interfaces;
@@ -17,13 +17,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(obj),
};
if (obj?.Row != null && obj.Row.Any())
if (obj?.Row != null && obj.Row.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Row
.Where(r => r != null)
.Select(ConvertMachineToInternalModel)
.Where(m => m != null)
.ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(obj.Row, ConvertMachineToInternalModel);
}
return metadataFile;

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.ClrMamePro;
using SabreTools.Serialization.Interfaces;
@@ -22,12 +22,10 @@ namespace SabreTools.Serialization.CrossModel
metadataFile.ClrMamePro = ConvertHeaderFromInternalModel(header);
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
if (machines != null && machines.Length > 0)
{
metadataFile.Game = machines
.Where(m => m != null)
.Select(machine => ConvertMachineFromInternalModel(machine, game))
.ToArray();
metadataFile.Game
= Array.ConvertAll(machines, m => ConvertMachineFromInternalModel(m));
}
return metadataFile;
@@ -76,85 +74,40 @@ namespace SabreTools.Serialization.CrossModel
gameBase.SampleOf = item.ReadString(Models.Metadata.Machine.SampleOfKey);
var releases = item.Read<Models.Metadata.Release[]>(Models.Metadata.Machine.ReleaseKey);
if (releases != null && releases.Any())
{
gameBase.Release = releases
.Where(r => r != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (releases != null && releases.Length > 0)
gameBase.Release = Array.ConvertAll(releases, ConvertFromInternalModel);
var biosSets = item.Read<Models.Metadata.BiosSet[]>(Models.Metadata.Machine.BiosSetKey);
if (biosSets != null && biosSets.Any())
{
gameBase.BiosSet = biosSets
.Where(r => r != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (biosSets != null && biosSets.Length > 0)
gameBase.BiosSet = Array.ConvertAll(biosSets, ConvertFromInternalModel);
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
if (roms != null && roms.Any())
{
gameBase.Rom = roms
.Where(r => r != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (roms != null && roms.Length > 0)
gameBase.Rom = Array.ConvertAll(roms, ConvertFromInternalModel);
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.Machine.DiskKey);
if (disks != null && disks.Any())
{
gameBase.Disk = disks
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (disks != null && disks.Length > 0)
gameBase.Disk = Array.ConvertAll(disks, ConvertFromInternalModel);
var medias = item.Read<Models.Metadata.Media[]>(Models.Metadata.Machine.MediaKey);
if (medias != null && medias.Any())
{
gameBase.Media = medias
.Where(m => m != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (medias != null && medias.Length > 0)
gameBase.Media = Array.ConvertAll(medias, ConvertFromInternalModel);
var samples = item.Read<Models.Metadata.Sample[]>(Models.Metadata.Machine.SampleKey);
if (samples != null && samples.Any())
{
gameBase.Sample = samples
.Where(m => m != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (samples != null && samples.Length > 0)
gameBase.Sample = Array.ConvertAll(samples, ConvertFromInternalModel);
var archives = item.Read<Models.Metadata.Archive[]>(Models.Metadata.Machine.ArchiveKey);
if (archives != null && archives.Any())
{
gameBase.Archive = archives
.Where(m => m != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (archives != null && archives.Length > 0)
gameBase.Archive = Array.ConvertAll(archives, ConvertFromInternalModel);
var chips = item.Read<Models.Metadata.Chip[]>(Models.Metadata.Machine.ChipKey);
if (chips != null && chips.Any())
{
gameBase.Chip = chips
.Where(m => m != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (chips != null && chips.Length > 0)
gameBase.Chip = Array.ConvertAll(chips, ConvertFromInternalModel);
var videos = item.Read<Models.Metadata.Video[]>(Models.Metadata.Machine.VideoKey);
if (videos != null && videos.Any())
{
gameBase.Video = videos
.Where(m => m != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (videos != null && videos.Length > 0)
gameBase.Video = Array.ConvertAll(videos, ConvertFromInternalModel);
var sound = item.Read<Models.Metadata.Sound>(Models.Metadata.Machine.SoundKey);
if (sound != null)
@@ -165,13 +118,8 @@ namespace SabreTools.Serialization.CrossModel
gameBase.Input = ConvertFromInternalModel(input);
var dipSwitches = item.Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Machine.DipSwitchKey);
if (dipSwitches != null && dipSwitches.Any())
{
gameBase.DipSwitch = dipSwitches
.Where(m => m != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (dipSwitches != null && dipSwitches.Length > 0)
gameBase.DipSwitch = Array.ConvertAll(dipSwitches, ConvertFromInternalModel);
var driver = item.Read<Models.Metadata.Driver>(Models.Metadata.Machine.DriverKey);
if (driver != null)

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.ClrMamePro;
using SabreTools.Serialization.Interfaces;
@@ -17,13 +17,10 @@ namespace SabreTools.Serialization.CrossModel
if (obj?.ClrMamePro != null)
metadataFile[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(obj.ClrMamePro);
if (obj?.Game != null && obj.Game.Any())
if (obj?.Game != null && obj.Game.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Game
.Where(g => g != null)
.Select(ConvertMachineToInternalModel)
.Where(m => m != null)
.ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(obj.Game, ConvertMachineToInternalModel);
}
return metadataFile;
@@ -75,76 +72,58 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Machine.SampleOfKey] = item.SampleOf,
};
if (item.Release != null && item.Release.Any())
if (item.Release != null && item.Release.Length > 0)
{
machine[Models.Metadata.Machine.ReleaseKey] = item.Release
.Where(r => r != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.ReleaseKey]
= Array.ConvertAll(item.Release, ConvertToInternalModel);
}
if (item.BiosSet != null && item.BiosSet.Any())
if (item.BiosSet != null && item.BiosSet.Length > 0)
{
machine[Models.Metadata.Machine.BiosSetKey] = item.BiosSet
.Where(b => b != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.BiosSetKey]
= Array.ConvertAll(item.BiosSet, ConvertToInternalModel);
}
if (item.Rom != null && item.Rom.Any())
if (item.Rom != null && item.Rom.Length > 0)
{
machine[Models.Metadata.Machine.RomKey] = item.Rom
.Where(r => r != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.RomKey]
= Array.ConvertAll(item.Rom, ConvertToInternalModel);
}
if (item.Disk != null && item.Disk.Any())
if (item.Disk != null && item.Disk.Length > 0)
{
machine[Models.Metadata.Machine.DiskKey] = item.Disk
.Where(d => d != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.DiskKey]
= Array.ConvertAll(item.Disk, ConvertToInternalModel);
}
if (item.Media != null && item.Media.Any())
if (item.Media != null && item.Media.Length > 0)
{
machine[Models.Metadata.Machine.MediaKey] = item.Media
.Where(m => m != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.MediaKey]
= Array.ConvertAll(item.Media, ConvertToInternalModel);
}
if (item.Sample != null && item.Sample.Any())
if (item.Sample != null && item.Sample.Length > 0)
{
machine[Models.Metadata.Machine.SampleKey] = item.Sample
.Where(s => s != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.SampleKey]
= Array.ConvertAll(item.Sample, ConvertToInternalModel);
}
if (item.Archive != null && item.Archive.Any())
if (item.Archive != null && item.Archive.Length > 0)
{
machine[Models.Metadata.Machine.ArchiveKey] = item.Archive
.Where(a => a != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.ArchiveKey]
= Array.ConvertAll(item.Archive, ConvertToInternalModel);
}
if (item.Chip != null && item.Chip.Any())
if (item.Chip != null && item.Chip.Length > 0)
{
machine[Models.Metadata.Machine.ChipKey] = item.Chip
.Where(c => c != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.ChipKey]
= Array.ConvertAll(item.Chip, ConvertToInternalModel);
}
if (item.Video != null)
{
machine[Models.Metadata.Machine.VideoKey] = item.Video
.Where(v => v != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.VideoKey]
= Array.ConvertAll(item.Video, ConvertToInternalModel);
}
if (item.Sound != null)
@@ -153,12 +132,10 @@ namespace SabreTools.Serialization.CrossModel
if (item.Input != null)
machine[Models.Metadata.Machine.InputKey] = ConvertToInternalModel(item.Input);
if (item.DipSwitch != null && item.DipSwitch.Any())
if (item.DipSwitch != null && item.DipSwitch.Length > 0)
{
machine[Models.Metadata.Machine.DipSwitchKey] = item.DipSwitch
.Where(d => d != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.DipSwitchKey]
= Array.ConvertAll(item.DipSwitch, ConvertToInternalModel);
}
if (item.Driver != null)

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.DosCenter;
using SabreTools.Serialization.Interfaces;
@@ -19,13 +19,8 @@ namespace SabreTools.Serialization.CrossModel
metadataFile.DosCenter = ConvertHeaderFromInternalModel(header);
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
{
metadataFile.Game = machines
.Where(m => m != null)
.Select(ConvertMachineFromInternalModel)
.ToArray();
}
if (machines != null && machines.Length > 0)
metadataFile.Game = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
return metadataFile;
}
@@ -59,13 +54,8 @@ namespace SabreTools.Serialization.CrossModel
};
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
if (roms != null && roms.Any())
{
game.File = roms
.Where(r => r != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (roms != null && roms.Length > 0)
game.File = Array.ConvertAll(roms, ConvertFromInternalModel);
return game;
}

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.DosCenter;
using SabreTools.Serialization.Interfaces;
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
if (obj?.DosCenter != null)
metadataFile[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(obj.DosCenter);
if (obj?.Game != null && obj.Game.Any())
if (obj?.Game != null && obj.Game.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Game
.Where(g => g != null)
.Select(ConvertMachineToInternalModel)
.ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(obj.Game, ConvertMachineToInternalModel);
}
return metadataFile;
@@ -56,12 +54,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Machine.NameKey] = item.Name,
};
if (item.File != null && item.File.Any())
if (item.File != null && item.File.Length > 0)
{
machine[Models.Metadata.Machine.RomKey] = item.File
.Where(f => f != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.RomKey]
= Array.ConvertAll(item.File, ConvertToInternalModel);
}
return machine;

View File

@@ -1,5 +1,5 @@
using System;
using System.Linq;
using System.Collections.Generic;
using SabreTools.Models.EverdriveSMDB;
using SabreTools.Serialization.Interfaces;
@@ -16,13 +16,13 @@ namespace SabreTools.Serialization.CrossModel
var metadataFile = new MetadataFile();
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
{
metadataFile.Row = machines
.Where(m => m != null)
.SelectMany(ConvertMachineFromInternalModel)
.ToArray();
}
var items = new List<Row>();
foreach (var machine in machines ?? [])
{
items.AddRange(ConvertMachineFromInternalModel(machine));
}
metadataFile.Row = [.. items];
return metadataFile;
}
@@ -33,13 +33,10 @@ namespace SabreTools.Serialization.CrossModel
private static Row[] ConvertMachineFromInternalModel(Models.Metadata.Machine item)
{
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
if (roms == null || !roms.Any())
if (roms == null || roms.Length == 0)
return [];
return roms
.Where(r => r != null)
.Select(ConvertFromInternalModel)
.ToArray();
return Array.ConvertAll(roms, ConvertFromInternalModel);
}
/// <summary>

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.EverdriveSMDB;
using SabreTools.Serialization.Interfaces;
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(),
};
if (obj?.Row != null && obj.Row.Any())
if (obj?.Row != null && obj.Row.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Row
.Where(r => r != null)
.Select(ConvertMachineToInternalModel)
.ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(obj.Row, ConvertMachineToInternalModel);
}
return metadataFile;

View File

@@ -1,5 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SabreTools.Hashing;
using SabreTools.Models.Hashfile;
using SabreTools.Serialization.Interfaces;
@@ -18,12 +18,10 @@ namespace SabreTools.Serialization.CrossModel
return null;
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines == null || !machines.Any())
if (machines == null || machines.Length == 0)
return null;
var hashfiles = machines
.Where(m => m != null)
.Select(machine => ConvertMachineFromInternalModel(machine, hash));
var hashfiles = Array.ConvertAll(machines, m => ConvertMachineFromInternalModel(m, hash));
var sfvs = new List<SFV>();
var md5s = new List<MD5>();
@@ -35,38 +33,38 @@ namespace SabreTools.Serialization.CrossModel
foreach (var hashfile in hashfiles)
{
if (hashfile.SFV != null && hashfile.SFV.Any())
if (hashfile.SFV != null && hashfile.SFV.Length > 0)
sfvs.AddRange(hashfile.SFV);
if (hashfile.MD5 != null && hashfile.MD5.Any())
if (hashfile.MD5 != null && hashfile.MD5.Length > 0)
md5s.AddRange(hashfile.MD5);
if (hashfile.SHA1 != null && hashfile.SHA1.Any())
if (hashfile.SHA1 != null && hashfile.SHA1.Length > 0)
sha1s.AddRange(hashfile.SHA1);
if (hashfile.SHA256 != null && hashfile.SHA256.Any())
if (hashfile.SHA256 != null && hashfile.SHA256.Length > 0)
sha256s.AddRange(hashfile.SHA256);
if (hashfile.SHA384 != null && hashfile.SHA384.Any())
if (hashfile.SHA384 != null && hashfile.SHA384.Length > 0)
sha384s.AddRange(hashfile.SHA384);
if (hashfile.SHA512 != null && hashfile.SHA512.Any())
if (hashfile.SHA512 != null && hashfile.SHA512.Length > 0)
sha512s.AddRange(hashfile.SHA512);
if (hashfile.SpamSum != null && hashfile.SpamSum.Any())
if (hashfile.SpamSum != null && hashfile.SpamSum.Length > 0)
spamsums.AddRange(hashfile.SpamSum);
}
var hashfileItem = new Models.Hashfile.Hashfile();
if (sfvs.Any())
hashfileItem.SFV = sfvs.ToArray();
if (md5s.Any())
hashfileItem.MD5 = md5s.ToArray();
if (sha1s.Any())
hashfileItem.SHA1 = sha1s.ToArray();
if (sha256s.Any())
hashfileItem.SHA256 = sha256s.ToArray();
if (sha384s.Any())
hashfileItem.SHA384 = sha384s.ToArray();
if (sha512s.Any())
hashfileItem.SHA512 = sha512s.ToArray();
if (spamsums.Any())
hashfileItem.SpamSum = spamsums.ToArray();
if (sfvs.Count > 0)
hashfileItem.SFV = [.. sfvs];
if (md5s.Count > 0)
hashfileItem.MD5 = [.. md5s];
if (sha1s.Count > 0)
hashfileItem.SHA1 = [.. sha1s];
if (sha256s.Count > 0)
hashfileItem.SHA256 = [.. sha256s];
if (sha384s.Count > 0)
hashfileItem.SHA384 = [.. sha384s];
if (sha512s.Count > 0)
hashfileItem.SHA512 = [.. sha512s];
if (spamsums.Count > 0)
hashfileItem.SpamSum = [.. spamsums];
return hashfileItem;
}
@@ -80,13 +78,8 @@ namespace SabreTools.Serialization.CrossModel
return null;
var machines = item.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
{
return machines
.Where(m => m != null)
.Select(machine => ConvertMachineFromInternalModel(machine, hash))
.ToArray();
}
if (machines != null && machines.Length > 0)
return Array.ConvertAll(machines, m => ConvertMachineFromInternalModel(m, hash));
return null;
}
@@ -102,47 +95,26 @@ namespace SabreTools.Serialization.CrossModel
return new Models.Hashfile.Hashfile
{
SFV = hash == HashType.CRC32 || hash == HashType.CRC32_ISO || hash == HashType.CRC32_Naive || hash == HashType.CRC32_Optimized || hash == HashType.CRC32_Parallel
? roms
.Where(r => r != null)
.Select(ConvertToSFV)
.ToArray()
SFV = hash == HashType.CRC32
? Array.ConvertAll(roms, ConvertToSFV)
: null,
MD5 = hash == HashType.MD5
? roms
.Where(r => r != null)
.Select(ConvertToMD5)
.ToArray()
? Array.ConvertAll(roms, ConvertToMD5)
: null,
SHA1 = hash == HashType.SHA1
? roms
.Where(r => r != null)
.Select(ConvertToSHA1)
.ToArray()
? Array.ConvertAll(roms, ConvertToSHA1)
: null,
SHA256 = hash == HashType.SHA256
? roms
.Where(r => r != null)
.Select(ConvertToSHA256)
.ToArray()
? Array.ConvertAll(roms, ConvertToSHA256)
: null,
SHA384 = hash == HashType.SHA384
? roms
.Where(r => r != null)
.Select(ConvertToSHA384)
.ToArray()
? Array.ConvertAll(roms, ConvertToSHA384)
: null,
SHA512 = hash == HashType.SHA512
? roms
.Where(r => r != null)
.Select(ConvertToSHA512)
.ToArray()
? Array.ConvertAll(roms, ConvertToSHA512)
: null,
SpamSum = hash == HashType.SpamSum
? roms
.Where(r => r != null)
.Select(ConvertToSpamSum)
.ToArray()
? Array.ConvertAll(roms, ConvertToSpamSum)
: null,
};
}

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.Hashfile;
using SabreTools.Serialization.Interfaces;
@@ -42,20 +42,20 @@ namespace SabreTools.Serialization.CrossModel
{
var machine = new Models.Metadata.Machine();
if (item.SFV != null && item.SFV.Any())
machine[Models.Metadata.Machine.RomKey] = item.SFV.Select(ConvertToInternalModel).ToArray();
else if (item.MD5 != null && item.MD5.Any())
machine[Models.Metadata.Machine.RomKey] = item.MD5.Select(ConvertToInternalModel).ToArray();
else if (item.SHA1 != null && item.SHA1.Any())
machine[Models.Metadata.Machine.RomKey] = item.SHA1.Select(ConvertToInternalModel).ToArray();
else if (item.SHA256 != null && item.SHA256.Any())
machine[Models.Metadata.Machine.RomKey] = item.SHA256.Select(ConvertToInternalModel).ToArray();
else if (item.SHA384 != null && item.SHA384.Any())
machine[Models.Metadata.Machine.RomKey] = item.SHA384.Select(ConvertToInternalModel).ToArray();
else if (item.SHA512 != null && item.SHA512.Any())
machine[Models.Metadata.Machine.RomKey] = item.SHA512.Select(ConvertToInternalModel).ToArray();
else if (item.SpamSum != null && item.SpamSum.Any())
machine[Models.Metadata.Machine.RomKey] = item.SpamSum.Select(ConvertToInternalModel).ToArray();
if (item.SFV != null && item.SFV.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SFV, ConvertToInternalModel);
else if (item.MD5 != null && item.MD5.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.MD5, ConvertToInternalModel);
else if (item.SHA1 != null && item.SHA1.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SHA1, ConvertToInternalModel);
else if (item.SHA256 != null && item.SHA256.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SHA256, ConvertToInternalModel);
else if (item.SHA384 != null && item.SHA384.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SHA384, ConvertToInternalModel);
else if (item.SHA512 != null && item.SHA512.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SHA512, ConvertToInternalModel);
else if (item.SpamSum != null && item.SpamSum.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.SpamSum, ConvertToInternalModel);
return machine;
}

View File

@@ -1,5 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SabreTools.Models.Listrom;
using SabreTools.Serialization.Interfaces;
@@ -16,13 +16,8 @@ namespace SabreTools.Serialization.CrossModel
var metadataFile = new MetadataFile();
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
{
metadataFile.Set = machines
.Where(m => m != null)
.Select(ConvertMachineFromInternalModel)
.ToArray();
}
if (machines != null && machines.Length > 0)
metadataFile.Set = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
return metadataFile;
}
@@ -43,14 +38,14 @@ namespace SabreTools.Serialization.CrossModel
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
if (roms != null)
{
rowItems.AddRange(roms.Where(r => r != null).Select(ConvertFromInternalModel));
rowItems.AddRange(Array.ConvertAll(roms, ConvertFromInternalModel));
}
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.Machine.DiskKey);
if (disks != null)
rowItems.AddRange(disks.Where(d => d != null).Select(ConvertFromInternalModel));
rowItems.AddRange(Array.ConvertAll(disks, ConvertFromInternalModel));
set.Row = rowItems.ToArray();
set.Row = [.. rowItems];
return set;
}

View File

@@ -1,5 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SabreTools.Models.Listrom;
using SabreTools.Serialization.Interfaces;
@@ -18,12 +18,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(),
};
if (obj?.Set != null && obj.Set.Any())
if (obj?.Set != null && obj.Set.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Set
.Where(s => s != null)
.Select(ConvertMachineToInternalModel)
.ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(obj.Set, ConvertMachineToInternalModel);
}
return metadataFile;
@@ -57,16 +55,21 @@ namespace SabreTools.Serialization.CrossModel
machine[Models.Metadata.Machine.NameKey] = item.Driver;
}
if (item.Row != null && item.Row.Any())
if (item.Row != null && item.Row.Length > 0)
{
var datItems = new List<Models.Metadata.DatItem>();
var disks = new List<Models.Metadata.Disk>();
var roms = new List<Models.Metadata.Rom>();
foreach (var file in item.Row)
{
datItems.Add(ConvertToInternalModel(file));
var datItem = ConvertToInternalModel(file);
if (datItem is Models.Metadata.Disk disk)
disks.Add(disk);
else if (datItem is Models.Metadata.Rom rom)
roms.Add(rom);
}
machine[Models.Metadata.Machine.DiskKey] = datItems.Where(i => i.ReadString(Models.Metadata.DatItem.TypeKey) == "disk").Select(d => d as Models.Metadata.Disk).ToArray();
machine[Models.Metadata.Machine.RomKey] = datItems.Where(i => i.ReadString(Models.Metadata.DatItem.TypeKey) == "rom").Select(d => d as Models.Metadata.Rom).ToArray();
machine[Models.Metadata.Machine.DiskKey] = disks.ToArray();
machine[Models.Metadata.Machine.RomKey] = roms.ToArray();
}
return machine;

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.Listxml;
using SabreTools.Serialization.Interfaces;
@@ -16,13 +16,8 @@ namespace SabreTools.Serialization.CrossModel
var mame = header != null ? ConvertMameFromInternalModel(header) : new Mame();
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
{
mame.Game = machines
.Where(m => m != null)
.Select(ConvertMachineFromInternalModel)
.ToArray();
}
if (machines != null && machines.Length > 0)
mame.Game = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
return mame;
}
@@ -39,13 +34,8 @@ namespace SabreTools.Serialization.CrossModel
var mame = header != null ? ConvertMameFromInternalModel(header) : new Mame();
var machines = item.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
{
mame.Game = machines
.Where(m => m != null)
.Select(ConvertMachineFromInternalModel)
.ToArray();
}
if (machines != null && machines.Length > 0)
mame.Game = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
return mame;
}
@@ -88,76 +78,36 @@ namespace SabreTools.Serialization.CrossModel
};
var biosSets = item.Read<Models.Metadata.BiosSet[]>(Models.Metadata.Machine.BiosSetKey);
if (biosSets != null && biosSets.Any())
{
machine.BiosSet = biosSets
.Where(b => b != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (biosSets != null && biosSets.Length > 0)
machine.BiosSet = Array.ConvertAll(biosSets, ConvertFromInternalModel);
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
if (roms != null && roms.Any())
{
machine.Rom = roms
.Where(r => r != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (roms != null && roms.Length > 0)
machine.Rom = Array.ConvertAll(roms, ConvertFromInternalModel);
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.Machine.DiskKey);
if (disks != null && disks.Any())
{
machine.Disk = disks
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (disks != null && disks.Length > 0)
machine.Disk = Array.ConvertAll(disks, ConvertFromInternalModel);
var deviceRefs = item.Read<Models.Metadata.DeviceRef[]>(Models.Metadata.Machine.DeviceRefKey);
if (deviceRefs != null && deviceRefs.Any())
{
machine.DeviceRef = deviceRefs
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (deviceRefs != null && deviceRefs.Length > 0)
machine.DeviceRef = Array.ConvertAll(deviceRefs, ConvertFromInternalModel);
var samples = item.Read<Models.Metadata.Sample[]>(Models.Metadata.Machine.SampleKey);
if (samples != null && samples.Any())
{
machine.Sample = samples
.Where(s => s != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (samples != null && samples.Length > 0)
machine.Sample = Array.ConvertAll(samples, ConvertFromInternalModel);
var chips = item.Read<Models.Metadata.Chip[]>(Models.Metadata.Machine.ChipKey);
if (chips != null && chips.Any())
{
machine.Chip = chips
.Where(c => c != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (chips != null && chips.Length > 0)
machine.Chip = Array.ConvertAll(chips, ConvertFromInternalModel);
var displays = item.Read<Models.Metadata.Display[]>(Models.Metadata.Machine.DisplayKey);
if (displays != null && displays.Any())
{
machine.Display = displays
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (displays != null && displays.Length > 0)
machine.Display = Array.ConvertAll(displays, ConvertFromInternalModel);
var videos = item.Read<Models.Metadata.Video[]>(Models.Metadata.Machine.VideoKey);
if (videos != null && videos.Any())
{
machine.Video = videos
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (videos != null && videos.Length > 0)
machine.Video = Array.ConvertAll(videos, ConvertFromInternalModel);
var sound = item.Read<Models.Metadata.Sound>(Models.Metadata.Machine.SoundKey);
if (sound != null)
@@ -168,89 +118,44 @@ namespace SabreTools.Serialization.CrossModel
machine.Input = ConvertFromInternalModel(input);
var dipSwitches = item.Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Machine.DipSwitchKey);
if (dipSwitches != null && dipSwitches.Any())
{
machine.DipSwitch = dipSwitches
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (dipSwitches != null && dipSwitches.Length > 0)
machine.DipSwitch = Array.ConvertAll(dipSwitches, ConvertFromInternalModel);
var configurations = item.Read<Models.Metadata.Configuration[]>(Models.Metadata.Machine.ConfigurationKey);
if (configurations != null && configurations.Any())
{
machine.Configuration = configurations
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (configurations != null && configurations.Length > 0)
machine.Configuration = Array.ConvertAll(configurations, ConvertFromInternalModel);
var ports = item.Read<Models.Metadata.Port[]>(Models.Metadata.Machine.PortKey);
if (ports != null && ports.Any())
{
machine.Port = ports
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (ports != null && ports.Length > 0)
machine.Port = Array.ConvertAll(ports, ConvertFromInternalModel);
var adjusters = item.Read<Models.Metadata.Adjuster[]>(Models.Metadata.Machine.AdjusterKey);
if (adjusters != null && adjusters.Any())
{
machine.Adjuster = adjusters
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (adjusters != null && adjusters.Length > 0)
machine.Adjuster = Array.ConvertAll(adjusters, ConvertFromInternalModel);
var driver = item.Read<Models.Metadata.Driver>(Models.Metadata.Machine.DriverKey);
if (driver != null)
machine.Driver = ConvertFromInternalModel(driver);
var features = item.Read<Models.Metadata.Feature[]>(Models.Metadata.Machine.FeatureKey);
if (features != null && features.Any())
{
machine.Feature = features
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (features != null && features.Length > 0)
machine.Feature = Array.ConvertAll(features, ConvertFromInternalModel);
var devices = item.Read<Models.Metadata.Device[]>(Models.Metadata.Machine.DeviceKey);
if (devices != null && devices.Any())
{
machine.Device = devices
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (devices != null && devices.Length > 0)
machine.Device = Array.ConvertAll(devices, ConvertFromInternalModel);
var slots = item.Read<Models.Metadata.Slot[]>(Models.Metadata.Machine.SlotKey);
if (slots != null && slots.Any())
{
machine.Slot = slots
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (slots != null && slots.Length > 0)
machine.Slot = Array.ConvertAll(slots, ConvertFromInternalModel);
var softwareLists = item.Read<Models.Metadata.SoftwareList[]>(Models.Metadata.Machine.SoftwareListKey);
if (softwareLists != null && softwareLists.Any())
{
machine.SoftwareList = softwareLists
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (softwareLists != null && softwareLists.Length > 0)
machine.SoftwareList = Array.ConvertAll(softwareLists, ConvertFromInternalModel);
var ramOptions = item.Read<Models.Metadata.RamOption[]>(Models.Metadata.Machine.RamOptionKey);
if (ramOptions != null && ramOptions.Any())
{
machine.RamOption = ramOptions
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (ramOptions != null && ramOptions.Length > 0)
machine.RamOption = Array.ConvertAll(ramOptions, ConvertFromInternalModel);
return machine;
}
@@ -347,22 +252,12 @@ namespace SabreTools.Serialization.CrossModel
configuration.Condition = ConvertFromInternalModel(condition);
var confLocations = item.Read<Models.Metadata.ConfLocation[]>(Models.Metadata.Configuration.ConfLocationKey);
if (confLocations != null && confLocations.Any())
{
configuration.ConfLocation = confLocations
.Where(c => c != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (confLocations != null && confLocations.Length > 0)
configuration.ConfLocation = Array.ConvertAll(confLocations, ConvertFromInternalModel);
var confSettings = item.Read<Models.Metadata.ConfSetting[]>(Models.Metadata.Configuration.ConfSettingKey);
if (confSettings != null && confSettings.Any())
{
configuration.ConfSetting = confSettings
.Where(c => c != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (confSettings != null && confSettings.Length > 0)
configuration.ConfSetting = Array.ConvertAll(confSettings, ConvertFromInternalModel);
return configuration;
}
@@ -442,13 +337,8 @@ namespace SabreTools.Serialization.CrossModel
device.Instance = ConvertFromInternalModel(instance);
var extensions = item.Read<Models.Metadata.Extension[]>(Models.Metadata.Device.ExtensionKey);
if (extensions != null && extensions.Any())
{
device.Extension = extensions
.Where(e => e != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (extensions != null && extensions.Length > 0)
device.Extension = Array.ConvertAll(extensions, ConvertFromInternalModel);
return device;
}
@@ -496,22 +386,12 @@ namespace SabreTools.Serialization.CrossModel
dipSwitch.Condition = ConvertFromInternalModel(condition);
var dipLocations = item.Read<Models.Metadata.DipLocation[]>(Models.Metadata.DipSwitch.DipLocationKey);
if (dipLocations != null && dipLocations.Any())
{
dipSwitch.DipLocation = dipLocations
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (dipLocations != null && dipLocations.Length > 0)
dipSwitch.DipLocation = Array.ConvertAll(dipLocations, ConvertFromInternalModel);
var dipValues = item.Read<Models.Metadata.DipValue[]>(Models.Metadata.DipSwitch.DipValueKey);
if (dipValues != null && dipValues.Any())
{
dipSwitch.DipValue = dipValues
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (dipValues != null && dipValues.Length > 0)
dipSwitch.DipValue = Array.ConvertAll(dipValues, ConvertFromInternalModel);
return dipSwitch;
}
@@ -644,13 +524,8 @@ namespace SabreTools.Serialization.CrossModel
};
var controls = item.Read<Models.Metadata.Control[]>(Models.Metadata.Input.ControlKey);
if (controls != null && controls.Any())
{
input.Control = controls
.Where(c => c != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (controls != null && controls.Length > 0)
input.Control = Array.ConvertAll(controls, ConvertFromInternalModel);
return input;
}
@@ -679,13 +554,8 @@ namespace SabreTools.Serialization.CrossModel
};
var analogs = item.Read<Models.Metadata.Analog[]>(Models.Metadata.Port.AnalogKey);
if (analogs != null && analogs.Any())
{
port.Analog = analogs
.Where(a => a != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (analogs != null && analogs.Length > 0)
port.Analog = Array.ConvertAll(analogs, ConvertFromInternalModel);
return port;
}
@@ -750,13 +620,8 @@ namespace SabreTools.Serialization.CrossModel
};
var slotOptions = item.Read<Models.Metadata.SlotOption[]>(Models.Metadata.Slot.SlotOptionKey);
if (slotOptions != null && slotOptions.Any())
{
slot.SlotOption = slotOptions
.Where(s => s != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (slotOptions != null && slotOptions.Length > 0)
slot.SlotOption = Array.ConvertAll(slotOptions, ConvertFromInternalModel);
return slot;
}

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.Listxml;
using SabreTools.Serialization.Interfaces;
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
};
if (item?.Game != null && item.Game.Any())
if (item?.Game != null && item.Game.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.Game
.Where(g => g != null)
.Select(ConvertMachineToInternalModel)
.ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(item.Game, ConvertMachineToInternalModel);
}
return metadataFile;
@@ -64,68 +62,52 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Machine.HistoryKey] = item.History,
};
if (item.BiosSet != null && item.BiosSet.Any())
if (item.BiosSet != null && item.BiosSet.Length > 0)
{
machine[Models.Metadata.Machine.BiosSetKey] = item.BiosSet
.Where(b => b != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.BiosSetKey]
= Array.ConvertAll(item.BiosSet, ConvertToInternalModel);
}
if (item.Rom != null && item.Rom.Any())
if (item.Rom != null && item.Rom.Length > 0)
{
machine[Models.Metadata.Machine.RomKey] = item.Rom
.Where(r => r != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.RomKey]
= Array.ConvertAll(item.Rom, ConvertToInternalModel);
}
if (item.Disk != null && item.Disk.Any())
if (item.Disk != null && item.Disk.Length > 0)
{
machine[Models.Metadata.Machine.DiskKey] = item.Disk
.Where(d => d != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.DiskKey]
= Array.ConvertAll(item.Disk, ConvertToInternalModel);
}
if (item.DeviceRef != null && item.DeviceRef.Any())
if (item.DeviceRef != null && item.DeviceRef.Length > 0)
{
machine[Models.Metadata.Machine.DeviceRefKey] = item.DeviceRef
.Where(d => d != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.DeviceRefKey]
= Array.ConvertAll(item.DeviceRef, ConvertToInternalModel);
}
if (item.Sample != null && item.Sample.Any())
if (item.Sample != null && item.Sample.Length > 0)
{
machine[Models.Metadata.Machine.SampleKey] = item.Sample
.Where(s => s != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.SampleKey]
= Array.ConvertAll(item.Sample, ConvertToInternalModel);
}
if (item.Chip != null && item.Chip.Any())
if (item.Chip != null && item.Chip.Length > 0)
{
machine[Models.Metadata.Machine.ChipKey] = item.Chip
.Where(c => c != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.ChipKey]
= Array.ConvertAll(item.Chip, ConvertToInternalModel);
}
if (item.Display != null && item.Display.Any())
if (item.Display != null && item.Display.Length > 0)
{
machine[Models.Metadata.Machine.DisplayKey] = item.Display
.Where(d => d != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.DisplayKey]
= Array.ConvertAll(item.Display, ConvertToInternalModel);
}
if (item.Video != null && item.Video.Any())
if (item.Video != null && item.Video.Length > 0)
{
machine[Models.Metadata.Machine.VideoKey] = item.Video
.Where(v => v != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.VideoKey]
= Array.ConvertAll(item.Video, ConvertToInternalModel);
}
if (item.Sound != null)
@@ -134,79 +116,61 @@ namespace SabreTools.Serialization.CrossModel
if (item.Input != null)
machine[Models.Metadata.Machine.InputKey] = ConvertToInternalModel(item.Input);
if (item.DipSwitch != null && item.DipSwitch.Any())
if (item.DipSwitch != null && item.DipSwitch.Length > 0)
{
machine[Models.Metadata.Machine.DipSwitchKey] = item.DipSwitch
.Where(d => d != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.DipSwitchKey]
= Array.ConvertAll(item.DipSwitch, ConvertToInternalModel);
}
if (item.Configuration != null && item.Configuration.Any())
if (item.Configuration != null && item.Configuration.Length > 0)
{
machine[Models.Metadata.Machine.ConfigurationKey] = item.Configuration
.Where(c => c != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.ConfigurationKey]
= Array.ConvertAll(item.Configuration, ConvertToInternalModel);
}
if (item.Port != null && item.Port.Any())
if (item.Port != null && item.Port.Length > 0)
{
machine[Models.Metadata.Machine.PortKey] = item.Port
.Where(p => p != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.PortKey]
= Array.ConvertAll(item.Port, ConvertToInternalModel);
}
if (item.Adjuster != null && item.Adjuster.Any())
if (item.Adjuster != null && item.Adjuster.Length > 0)
{
machine[Models.Metadata.Machine.AdjusterKey] = item.Adjuster
.Where(a => a != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.AdjusterKey]
= Array.ConvertAll(item.Adjuster, ConvertToInternalModel);
}
if (item.Driver != null)
machine[Models.Metadata.Machine.DriverKey] = ConvertToInternalModel(item.Driver);
if (item.Feature != null && item.Feature.Any())
if (item.Feature != null && item.Feature.Length > 0)
{
machine[Models.Metadata.Machine.FeatureKey] = item.Feature
.Where(f => f != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.FeatureKey]
= Array.ConvertAll(item.Feature, ConvertToInternalModel);
}
if (item.Device != null && item.Device.Any())
if (item.Device != null && item.Device.Length > 0)
{
machine[Models.Metadata.Machine.DeviceKey] = item.Device
.Where(d => d != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.DeviceKey]
= Array.ConvertAll(item.Device, ConvertToInternalModel);
}
if (item.Slot != null && item.Slot.Any())
if (item.Slot != null && item.Slot.Length > 0)
{
machine[Models.Metadata.Machine.SlotKey] = item.Slot
.Where(s => s != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.SlotKey]
= Array.ConvertAll(item.Slot, ConvertToInternalModel);
}
if (item.SoftwareList != null && item.SoftwareList.Any())
if (item.SoftwareList != null && item.SoftwareList.Length > 0)
{
machine[Models.Metadata.Machine.SoftwareListKey] = item.SoftwareList
.Where(s => s != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.SoftwareListKey]
= Array.ConvertAll(item.SoftwareList, ConvertToInternalModel);
}
if (item.RamOption != null && item.RamOption.Any())
if (item.RamOption != null && item.RamOption.Length > 0)
{
machine[Models.Metadata.Machine.RamOptionKey] = item.RamOption
.Where(r => r != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.RamOptionKey]
= Array.ConvertAll(item.RamOption, ConvertToInternalModel);
}
return machine;
@@ -301,20 +265,16 @@ namespace SabreTools.Serialization.CrossModel
if (item.Condition != null)
configuration[Models.Metadata.Configuration.ConditionKey] = ConvertToInternalModel(item.Condition);
if (item.ConfLocation != null && item.ConfLocation.Any())
if (item.ConfLocation != null && item.ConfLocation.Length > 0)
{
configuration[Models.Metadata.Configuration.ConfLocationKey] = item.ConfLocation
.Where(c => c != null)
.Select(ConvertToInternalModel)
.ToArray();
configuration[Models.Metadata.Configuration.ConfLocationKey]
= Array.ConvertAll(item.ConfLocation, ConvertToInternalModel);
}
if (item.ConfSetting != null && item.ConfSetting.Any())
if (item.ConfSetting != null && item.ConfSetting.Length > 0)
{
configuration[Models.Metadata.Configuration.ConfSettingKey] = item.ConfSetting
.Where(c => c != null)
.Select(ConvertToInternalModel)
.ToArray();
configuration[Models.Metadata.Configuration.ConfSettingKey]
= Array.ConvertAll(item.ConfSetting, ConvertToInternalModel);
}
return configuration;
@@ -392,12 +352,10 @@ namespace SabreTools.Serialization.CrossModel
if (item.Instance != null)
device[Models.Metadata.Device.InstanceKey] = ConvertToInternalModel(item.Instance);
if (item.Extension != null && item.Extension.Any())
if (item.Extension != null && item.Extension.Length > 0)
{
device[Models.Metadata.Device.ExtensionKey] = item.Extension
.Where(e => e != null)
.Select(ConvertToInternalModel)
.ToArray();
device[Models.Metadata.Device.ExtensionKey]
= Array.ConvertAll(item.Extension, ConvertToInternalModel);
}
return device;
@@ -444,20 +402,16 @@ namespace SabreTools.Serialization.CrossModel
if (item.Condition != null)
dipSwitch[Models.Metadata.DipSwitch.ConditionKey] = ConvertToInternalModel(item.Condition);
if (item.DipLocation != null && item.DipLocation.Any())
if (item.DipLocation != null && item.DipLocation.Length > 0)
{
dipSwitch[Models.Metadata.DipSwitch.DipLocationKey] = item.DipLocation
.Where(d => d != null)
.Select(ConvertToInternalModel)
.ToArray();
dipSwitch[Models.Metadata.DipSwitch.DipLocationKey]
= Array.ConvertAll(item.DipLocation, ConvertToInternalModel);
}
if (item.DipValue != null && item.DipValue.Any())
if (item.DipValue != null && item.DipValue.Length > 0)
{
dipSwitch[Models.Metadata.DipSwitch.DipValueKey] = item.DipValue
.Where(d => d != null)
.Select(ConvertToInternalModel)
.ToArray();
dipSwitch[Models.Metadata.DipSwitch.DipValueKey]
= Array.ConvertAll(item.DipValue, ConvertToInternalModel);
}
return dipSwitch;
@@ -589,12 +543,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Input.CoinsKey] = item.Coins,
};
if (item.Control != null && item.Control.Any())
if (item.Control != null && item.Control.Length > 0)
{
input[Models.Metadata.Input.ControlKey] = item.Control
.Where(c => c != null)
.Select(ConvertToInternalModel)
.ToArray();
input[Models.Metadata.Input.ControlKey]
= Array.ConvertAll(item.Control, ConvertToInternalModel);
}
return input;
@@ -623,12 +575,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Port.TagKey] = item.Tag,
};
if (item.Analog != null && item.Analog.Any())
if (item.Analog != null && item.Analog.Length > 0)
{
port[Models.Metadata.Port.AnalogKey] = item.Analog
.Where(a => a != null)
.Select(ConvertToInternalModel)
.ToArray();
port[Models.Metadata.Port.AnalogKey]
= Array.ConvertAll(item.Analog, ConvertToInternalModel);
}
return port;
@@ -693,12 +643,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Slot.NameKey] = item.Name,
};
if (item.SlotOption != null && item.SlotOption.Any())
if (item.SlotOption != null && item.SlotOption.Length > 0)
{
slot[Models.Metadata.Slot.SlotOptionKey] = item.SlotOption
.Where(s => s != null)
.Select(ConvertToInternalModel)
.ToArray();
slot[Models.Metadata.Slot.SlotOptionKey]
= Array.ConvertAll(item.SlotOption, ConvertToInternalModel);
}
return slot;

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.Logiqx;
using SabreTools.Serialization.Interfaces;
@@ -28,13 +28,8 @@ namespace SabreTools.Serialization.CrossModel
// TODO: Handle Dir items - Currently need to be generated from the machines
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
{
datafile.Game = machines
.Where(m => m != null)
.Select(machine => ConvertMachineFromInternalModel(machine, game))
.ToArray();
}
if (machines != null && machines.Length > 0)
datafile.Game = Array.ConvertAll(machines, m => ConvertMachineFromInternalModel(m, game));
return datafile;
}
@@ -130,89 +125,44 @@ namespace SabreTools.Serialization.CrossModel
gameBase.Trurip = trurip;
var releases = item.Read<Models.Metadata.Release[]>(Models.Metadata.Machine.ReleaseKey);
if (releases != null && releases.Any())
{
gameBase.Release = releases
.Where(r => r != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (releases != null && releases.Length > 0)
gameBase.Release = Array.ConvertAll(releases, ConvertFromInternalModel);
var biosSets = item.Read<Models.Metadata.BiosSet[]>(Models.Metadata.Machine.BiosSetKey);
if (biosSets != null && biosSets.Any())
{
gameBase.BiosSet = biosSets
.Where(b => b != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (biosSets != null && biosSets.Length > 0)
gameBase.BiosSet = Array.ConvertAll(biosSets, ConvertFromInternalModel);
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
if (roms != null && roms.Any())
{
gameBase.Rom = roms
.Where(r => r != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (roms != null && roms.Length > 0)
gameBase.Rom = Array.ConvertAll(roms, ConvertFromInternalModel);
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.Machine.DiskKey);
if (disks != null && disks.Any())
{
gameBase.Disk = disks
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (disks != null && disks.Length > 0)
gameBase.Disk = Array.ConvertAll(disks, ConvertFromInternalModel);
var medias = item.Read<Models.Metadata.Media[]>(Models.Metadata.Machine.MediaKey);
if (medias != null && medias.Any())
{
gameBase.Media = medias
.Where(m => m != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (medias != null && medias.Length > 0)
gameBase.Media = Array.ConvertAll(medias, ConvertFromInternalModel);
var deviceRefs = item.Read<Models.Metadata.DeviceRef[]>(Models.Metadata.Machine.DeviceRefKey);
if (deviceRefs != null && deviceRefs.Any())
{
gameBase.DeviceRef = deviceRefs
.Where(m => m != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (deviceRefs != null && deviceRefs.Length > 0)
gameBase.DeviceRef = Array.ConvertAll(deviceRefs, ConvertFromInternalModel);
var samples = item.Read<Models.Metadata.Sample[]>(Models.Metadata.Machine.SampleKey);
if (samples != null && samples.Any())
{
gameBase.Sample = samples
.Where(m => m != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (samples != null && samples.Length > 0)
gameBase.Sample = Array.ConvertAll(samples, ConvertFromInternalModel);
var archives = item.Read<Models.Metadata.Archive[]>(Models.Metadata.Machine.ArchiveKey);
if (archives != null && archives.Any())
{
gameBase.Archive = archives
.Where(m => m != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (archives != null && archives.Length > 0)
gameBase.Archive = Array.ConvertAll(archives, ConvertFromInternalModel);
var driver = item.Read<Models.Metadata.Driver>(Models.Metadata.Machine.DriverKey);
if (driver != null)
gameBase.Driver = ConvertFromInternalModel(driver);
var softwareLists = item.Read<Models.Metadata.SoftwareList[]>(Models.Metadata.Machine.SoftwareListKey);
if (softwareLists != null && softwareLists.Any())
{
gameBase.SoftwareList = softwareLists
.Where(m => m != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (softwareLists != null && softwareLists.Length > 0)
gameBase.SoftwareList = Array.ConvertAll(softwareLists, ConvertFromInternalModel);
return gameBase;
}

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SabreTools.Models.Logiqx;
using SabreTools.Serialization.Interfaces;
@@ -20,21 +19,15 @@ namespace SabreTools.Serialization.CrossModel
var machines = new List<Models.Metadata.Machine>();
if (item.Game != null && item.Game.Any())
if (item.Game != null && item.Game.Length > 0)
machines.AddRange(Array.ConvertAll(item.Game, ConvertMachineToInternalModel));
foreach (var dir in item.Dir ?? [])
{
machines.AddRange(item.Game
.Where(g => g != null)
.Select(ConvertMachineToInternalModel));
machines.AddRange(ConvertDirToInternalModel(dir));
}
if (item.Dir != null && item.Dir.Any())
{
machines.AddRange(item.Dir
.Where(d => d != null)
.SelectMany(ConvertDirToInternalModel));
}
if (machines.Any())
if (machines.Count > 0)
metadataFile[Models.Metadata.MetadataFile.MachineKey] = machines.ToArray();
return metadataFile;
@@ -103,18 +96,15 @@ namespace SabreTools.Serialization.CrossModel
/// </summary>
private static Models.Metadata.Machine[] ConvertDirToInternalModel(Dir item)
{
if (item.Game == null || !item.Game.Any())
if (item.Game == null || item.Game.Length == 0)
return [];
return item.Game
.Where(g => g != null)
.Select(game =>
return Array.ConvertAll(item.Game, g =>
{
var machine = ConvertMachineToInternalModel(game);
var machine = ConvertMachineToInternalModel(g);
machine[Models.Metadata.Machine.DirNameKey] = item.Name;
return machine;
})
.ToArray();
});
}
/// <summary>
@@ -146,35 +136,35 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Machine.TruripKey] = item.Trurip,
};
if (item.Release != null && item.Release.Any())
machine[Models.Metadata.Machine.ReleaseKey] = item.Release.Select(ConvertToInternalModel).ToArray();
if (item.Release != null && item.Release.Length > 0)
machine[Models.Metadata.Machine.ReleaseKey] = Array.ConvertAll(item.Release, ConvertToInternalModel);
if (item.BiosSet != null && item.BiosSet.Any())
machine[Models.Metadata.Machine.BiosSetKey] = item.BiosSet.Select(ConvertToInternalModel).ToArray();
if (item.BiosSet != null && item.BiosSet.Length > 0)
machine[Models.Metadata.Machine.BiosSetKey] = Array.ConvertAll(item.BiosSet, ConvertToInternalModel);
if (item.Rom != null && item.Rom.Any())
machine[Models.Metadata.Machine.RomKey] = item.Rom.Select(ConvertToInternalModel).ToArray();
if (item.Rom != null && item.Rom.Length > 0)
machine[Models.Metadata.Machine.RomKey] = Array.ConvertAll(item.Rom, ConvertToInternalModel);
if (item.Disk != null && item.Disk.Any())
machine[Models.Metadata.Machine.DiskKey] = item.Disk.Select(ConvertToInternalModel).ToArray();
if (item.Disk != null && item.Disk.Length > 0)
machine[Models.Metadata.Machine.DiskKey] = Array.ConvertAll(item.Disk, ConvertToInternalModel);
if (item.Media != null && item.Media.Any())
machine[Models.Metadata.Machine.MediaKey] = item.Media.Select(ConvertToInternalModel).ToArray();
if (item.Media != null && item.Media.Length > 0)
machine[Models.Metadata.Machine.MediaKey] = Array.ConvertAll(item.Media, ConvertToInternalModel);
if (item.DeviceRef != null && item.DeviceRef.Any())
machine[Models.Metadata.Machine.DeviceRefKey] = item.DeviceRef.Select(ConvertToInternalModel).ToArray();
if (item.DeviceRef != null && item.DeviceRef.Length > 0)
machine[Models.Metadata.Machine.DeviceRefKey] = Array.ConvertAll(item.DeviceRef, ConvertToInternalModel);
if (item.Sample != null && item.Sample.Any())
machine[Models.Metadata.Machine.SampleKey] = item.Sample.Select(ConvertToInternalModel).ToArray();
if (item.Sample != null && item.Sample.Length > 0)
machine[Models.Metadata.Machine.SampleKey] = Array.ConvertAll(item.Sample, ConvertToInternalModel);
if (item.Archive != null && item.Archive.Any())
machine[Models.Metadata.Machine.ArchiveKey] = item.Archive.Select(ConvertToInternalModel).ToArray();
if (item.Archive != null && item.Archive.Length > 0)
machine[Models.Metadata.Machine.ArchiveKey] = Array.ConvertAll(item.Archive, ConvertToInternalModel);
if (item.Driver != null)
machine[Models.Metadata.Machine.DriverKey] = ConvertToInternalModel(item.Driver);
if (item.SoftwareList != null && item.SoftwareList.Any())
machine[Models.Metadata.Machine.SoftwareListKey] = item.SoftwareList.Select(ConvertToInternalModel).ToArray();
if (item.SoftwareList != null && item.SoftwareList.Length > 0)
machine[Models.Metadata.Machine.SoftwareListKey] = Array.ConvertAll(item.SoftwareList, ConvertToInternalModel);
return machine;
}

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.CrossModel
@@ -15,13 +15,8 @@ namespace SabreTools.Serialization.CrossModel
var m1 = header != null ? ConvertM1FromInternalModel(header) : new Models.Listxml.M1();
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
{
m1.Game = machines
.Where(m => m != null)
.Select(Listxml.ConvertMachineFromInternalModel)
.ToArray();
}
if (machines != null && machines.Length > 0)
m1.Game = Array.ConvertAll(machines, Listxml.ConvertMachineFromInternalModel);
return m1;
}

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.CrossModel
@@ -16,12 +16,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
};
if (item?.Game != null && item.Game.Any())
if (item?.Game != null && item.Game.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.Game
.Where(g => g != null)
.Select(Listxml.ConvertMachineToInternalModel)
.ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(item.Game, Listxml.ConvertMachineToInternalModel);
}
return metadataFile;

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.OfflineList;
using SabreTools.Serialization.Interfaces;
@@ -16,14 +16,11 @@ namespace SabreTools.Serialization.CrossModel
var dat = header != null ? ConvertHeaderFromInternalModel(header) : new Dat();
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
if (machines != null && machines.Length > 0)
{
dat.Games = new Games
{
Game = machines
.Where(m => m != null)
.Select(ConvertMachineFromInternalModel)
.ToArray()
Game = Array.ConvertAll(machines, ConvertMachineFromInternalModel),
};
}
@@ -101,17 +98,12 @@ namespace SabreTools.Serialization.CrossModel
};
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
if (roms != null && roms.Any())
if (roms != null && roms.Length > 0)
{
game.RomSize = roms
.Select(rom => rom.ReadString(Models.Metadata.Rom.SizeKey))
.FirstOrDefault(s => s != null);
var romCRCs = roms
.Where(r => r != null)
.Select(ConvertFromInternalModel)
.ToArray();
var romSizes = Array.ConvertAll(roms, r => r.ReadLong(Models.Metadata.Rom.SizeKey) ?? -1);
game.RomSize = Array.Find(romSizes, s => s > -1).ToString();
var romCRCs = Array.ConvertAll(roms, ConvertFromInternalModel);;
game.Files = new Models.OfflineList.Files { RomCRC = romCRCs };
}

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.OfflineList;
using SabreTools.Serialization.Interfaces;
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
};
if (item?.Games?.Game != null && item.Games.Game.Any())
if (item?.Games?.Game != null && item.Games.Game.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.Games.Game
.Where(g => g != null)
.Select(ConvertMachineToInternalModel)
.ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(item.Games.Game, ConvertMachineToInternalModel);
}
return metadataFile;
@@ -82,17 +80,15 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Machine.DuplicateIDKey] = item.DuplicateID,
};
if (item.Files?.RomCRC != null && item.Files.RomCRC.Any())
if (item.Files?.RomCRC != null && item.Files.RomCRC.Length > 0)
{
machine[Models.Metadata.Machine.RomKey] = item.Files.RomCRC
.Where(r => r != null)
.Select(romCRC =>
{
var rom = ConvertToInternalModel(romCRC);
rom[Models.Metadata.Rom.SizeKey] = item.RomSize;
return rom;
})
.ToArray();
machine[Models.Metadata.Machine.RomKey]
= Array.ConvertAll(item.Files.RomCRC, romCRC =>
{
var rom = ConvertToInternalModel(romCRC);
rom[Models.Metadata.Rom.SizeKey] = item.RomSize;
return rom;
});
}
return machine;

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.OpenMSX;
using SabreTools.Serialization.Interfaces;
@@ -16,13 +16,8 @@ namespace SabreTools.Serialization.CrossModel
var softwareDb = header != null ? ConvertHeaderFromInternalModel(header) : new SoftwareDb();
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
{
softwareDb.Software = machines
.Where(m => m != null)
.Select(ConvertMachineFromInternalModel)
.ToArray();
}
if (machines != null && machines.Length > 0)
softwareDb.Software = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
return softwareDb;
}
@@ -55,13 +50,8 @@ namespace SabreTools.Serialization.CrossModel
};
var dumps = item.Read<Models.Metadata.Dump[]>(Models.Metadata.Machine.DumpKey);
if (dumps != null && dumps.Any())
{
game.Dump = dumps
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (dumps != null && dumps.Length > 0)
game.Dump = Array.ConvertAll(dumps, ConvertFromInternalModel);
return game;
}

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.OpenMSX;
using SabreTools.Serialization.Interfaces;
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
};
if (item?.Software != null && item.Software.Any())
if (item?.Software != null && item.Software.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.Software
.Where(s => s != null)
.Select(ConvertMachineToInternalModel)
.ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(item.Software, ConvertMachineToInternalModel);
}
return metadataFile;
@@ -55,12 +53,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Machine.CountryKey] = item.Country,
};
if (item.Dump != null && item.Dump.Any())
if (item.Dump != null && item.Dump.Length > 0)
{
machine[Models.Metadata.Machine.DumpKey] = item.Dump
.Where(d => d != null)
.Select(ConvertToInternalModel)
.ToArray();
machine[Models.Metadata.Machine.DumpKey]
= Array.ConvertAll(item.Dump, ConvertToInternalModel);
}
return machine;

View File

@@ -1,5 +1,5 @@
using System;
using System.Linq;
using System.Collections.Generic;
using SabreTools.Models.RomCenter;
using SabreTools.Serialization.Interfaces;
@@ -17,17 +17,13 @@ namespace SabreTools.Serialization.CrossModel
var metadataFile = header != null ? ConvertHeaderFromInternalModel(header) : new MetadataFile();
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
var items = new List<Rom>();
foreach (var machine in machines ?? [])
{
metadataFile.Games = new Games
{
Rom = machines
.Where(m => m != null)
.SelectMany(ConvertMachineFromInternalModel)
.ToArray()
};
items.AddRange(ConvertMachineFromInternalModel(machine));
}
metadataFile.Games = new Games { Rom = [.. items] };
return metadataFile;
}
@@ -93,10 +89,7 @@ namespace SabreTools.Serialization.CrossModel
if (roms == null)
return [];
return roms
.Where(r => r != null)
.Select(rom => ConvertFromInternalModel(rom, item))
.ToArray();
return Array.ConvertAll(roms, r => ConvertFromInternalModel(r, item));
}
/// <summary>

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.RomCenter;
using SabreTools.Serialization.Interfaces;
@@ -17,11 +17,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(obj),
};
if (obj?.Games?.Rom != null && obj.Games.Rom.Any())
if (obj?.Games?.Rom != null && obj.Games.Rom.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Games.Rom
.Where(r => r != null)
.Select(ConvertMachineToInternalModel).ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(obj.Games.Rom, ConvertMachineToInternalModel);
}
return metadataFile;

View File

@@ -1,5 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using SabreTools.Models.SeparatedValue;
using SabreTools.Serialization.Interfaces;
@@ -17,14 +17,13 @@ namespace SabreTools.Serialization.CrossModel
var metadataFile = header != null ? ConvertHeaderFromInternalModel(header) : new MetadataFile();
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
var items = new List<Row>();
foreach (var machine in machines ?? [])
{
metadataFile.Row = machines
.Where(m => m != null)
.SelectMany(m => ConvertMachineFromInternalModel(m, header))
.ToArray();
items.AddRange(ConvertMachineFromInternalModel(machine, header));
}
metadataFile.Row = [.. items];
return metadataFile;
}
@@ -48,27 +47,24 @@ namespace SabreTools.Serialization.CrossModel
var rowItems = new List<Row>();
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.Machine.RomKey);
if (roms != null && roms.Any())
if (roms != null && roms.Length > 0)
{
rowItems.AddRange(roms
.Where(r => r != null)
.Select(rom => ConvertFromInternalModel(rom, item, header)));
rowItems.AddRange(
Array.ConvertAll(roms, r => ConvertFromInternalModel(r, item, header)));
}
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.Machine.DiskKey);
if (disks != null && disks.Any())
if (disks != null && disks.Length > 0)
{
rowItems.AddRange(disks
.Where(d => d != null)
.Select(disk => ConvertFromInternalModel(disk, item, header)));
rowItems.AddRange(
Array.ConvertAll(disks, d => ConvertFromInternalModel(d, item, header)));
}
var media = item.Read<Models.Metadata.Media[]>(Models.Metadata.Machine.MediaKey);
if (media != null && media.Any())
if (media != null && media.Length > 0)
{
rowItems.AddRange(media
.Where(m => m != null)
.Select(medium => ConvertFromInternalModel(medium, item, header)));
rowItems.AddRange(
Array.ConvertAll(media, m => ConvertFromInternalModel(m, item, header)));
}
return rowItems.ToArray();

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.SeparatedValue;
using SabreTools.Serialization.Interfaces;
@@ -17,8 +17,11 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(obj),
};
if (obj?.Row != null && obj.Row.Any())
metadataFile[Models.Metadata.MetadataFile.MachineKey] = obj.Row.Select(ConvertMachineToInternalModel).ToArray();
if (obj?.Row != null && obj.Row.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(obj.Row, ConvertMachineToInternalModel);
}
return metadataFile;
}
@@ -33,7 +36,7 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Header.HeaderKey] = item.Header,
};
if (item.Row != null && item.Row.Any())
if (item.Row != null && item.Row.Length > 0)
{
var first = item.Row[0];
//header[Models.Metadata.Header.FileNameKey] = first.FileName; // Not possible to map

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.SoftwareList;
using SabreTools.Serialization.Interfaces;
@@ -16,13 +16,8 @@ namespace SabreTools.Serialization.CrossModel
var metadataFile = header != null ? ConvertHeaderFromInternalModel(header) : new Models.SoftwareList.SoftwareList();
var machines = obj.Read<Models.Metadata.Machine[]>(Models.Metadata.MetadataFile.MachineKey);
if (machines != null && machines.Any())
{
metadataFile.Software = machines
.Where(m => m != null)
.Select(ConvertMachineFromInternalModel)
.ToArray();
}
if (machines != null && machines.Length > 0)
metadataFile.Software = Array.ConvertAll(machines, ConvertMachineFromInternalModel);
return metadataFile;
}
@@ -58,31 +53,16 @@ namespace SabreTools.Serialization.CrossModel
};
var infos = item.Read<Models.Metadata.Info[]>(Models.Metadata.Machine.InfoKey);
if (infos != null && infos.Any())
{
software.Info = infos
.Where(i => i != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (infos != null && infos.Length > 0)
software.Info = Array.ConvertAll(infos, ConvertFromInternalModel);
var sharedFeats = item.Read<Models.Metadata.SharedFeat[]>(Models.Metadata.Machine.SharedFeatKey);
if (sharedFeats != null && sharedFeats.Any())
{
software.SharedFeat = sharedFeats
.Where(s => s != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (sharedFeats != null && sharedFeats.Length > 0)
software.SharedFeat = Array.ConvertAll(sharedFeats, ConvertFromInternalModel);
var parts = item.Read<Models.Metadata.Part[]>(Models.Metadata.Machine.PartKey);
if (parts != null && parts.Any())
{
software.Part = parts
.Where(p => p != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (parts != null && parts.Length > 0)
software.Part = Array.ConvertAll(parts, ConvertFromInternalModel);
return software;
}
@@ -101,13 +81,8 @@ namespace SabreTools.Serialization.CrossModel
};
var roms = item.Read<Models.Metadata.Rom[]>(Models.Metadata.DataArea.RomKey);
if (roms != null && roms.Any())
{
dataArea.Rom = roms
.Where(r => r != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (roms != null && roms.Length > 0)
dataArea.Rom = Array.ConvertAll(roms,ConvertFromInternalModel);
return dataArea;
}
@@ -125,13 +100,8 @@ namespace SabreTools.Serialization.CrossModel
};
var dipValues = item.Read<Models.Metadata.DipValue[]>(Models.Metadata.DipSwitch.DipValueKey);
if (dipValues != null && dipValues.Any())
{
dipSwitch.DipValue = dipValues
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (dipValues != null && dipValues.Length > 0)
dipSwitch.DipValue = Array.ConvertAll(dipValues, ConvertFromInternalModel);
return dipSwitch;
}
@@ -177,13 +147,8 @@ namespace SabreTools.Serialization.CrossModel
};
var disks = item.Read<Models.Metadata.Disk[]>(Models.Metadata.DiskArea.DiskKey);
if (disks != null && disks.Any())
{
diskArea.Disk = disks
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (disks != null && disks.Length > 0)
diskArea.Disk = Array.ConvertAll(disks, ConvertFromInternalModel);
return diskArea;
}
@@ -226,40 +191,20 @@ namespace SabreTools.Serialization.CrossModel
};
var features = item.Read<Models.Metadata.Feature[]>(Models.Metadata.Part.FeatureKey);
if (features != null && features.Any())
{
part.Feature = features
.Where(f => f != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (features != null && features.Length > 0)
part.Feature = Array.ConvertAll(features, ConvertFromInternalModel);
var dataAreas = item.Read<Models.Metadata.DataArea[]>(Models.Metadata.Part.DataAreaKey);
if (dataAreas != null && dataAreas.Any())
{
part.DataArea = dataAreas
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (dataAreas != null && dataAreas.Length > 0)
part.DataArea = Array.ConvertAll(dataAreas, ConvertFromInternalModel);
var diskAreas = item.Read<Models.Metadata.DiskArea[]>(Models.Metadata.Part.DiskAreaKey);
if (diskAreas != null && diskAreas.Any())
{
part.DiskArea = diskAreas
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (diskAreas != null && diskAreas.Length > 0)
part.DiskArea = Array.ConvertAll(diskAreas, ConvertFromInternalModel);
var dipSwitches = item.Read<Models.Metadata.DipSwitch[]>(Models.Metadata.Part.DipSwitchKey);
if (dipSwitches != null && dipSwitches.Any())
{
part.DipSwitch = dipSwitches
.Where(d => d != null)
.Select(ConvertFromInternalModel)
.ToArray();
}
if (dipSwitches != null && dipSwitches.Length > 0)
part.DipSwitch = Array.ConvertAll(dipSwitches, ConvertFromInternalModel);
return part;
}

View File

@@ -1,4 +1,4 @@
using System.Linq;
using System;
using SabreTools.Models.SoftwareList;
using SabreTools.Serialization.Interfaces;
@@ -17,12 +17,10 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.MetadataFile.HeaderKey] = ConvertHeaderToInternalModel(item),
};
if (item?.Software != null && item.Software.Any())
if (item?.Software != null && item.Software.Length > 0)
{
metadataFile[Models.Metadata.MetadataFile.MachineKey] = item.Software
.Where(s => s != null)
.Select(ConvertMachineToInternalModel)
.ToArray();
metadataFile[Models.Metadata.MetadataFile.MachineKey]
= Array.ConvertAll(item.Software, ConvertMachineToInternalModel);
}
return metadataFile;
@@ -58,14 +56,14 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Machine.NotesKey] = item.Notes,
};
if (item.Info != null && item.Info.Any())
machine[Models.Metadata.Machine.InfoKey] = item.Info.Select(ConvertToInternalModel).ToArray();
if (item.Info != null && item.Info.Length > 0)
machine[Models.Metadata.Machine.InfoKey] = Array.ConvertAll(item.Info, ConvertToInternalModel);
if (item.SharedFeat != null && item.SharedFeat.Any())
machine[Models.Metadata.Machine.SharedFeatKey] = item.SharedFeat.Select(ConvertToInternalModel).ToArray();
if (item.SharedFeat != null && item.SharedFeat.Length > 0)
machine[Models.Metadata.Machine.SharedFeatKey] = Array.ConvertAll(item.SharedFeat, ConvertToInternalModel);
if (item.Part != null && item.Part.Any())
machine[Models.Metadata.Machine.PartKey] = item.Part.Select(ConvertToInternalModel).ToArray();
if (item.Part != null && item.Part.Length > 0)
machine[Models.Metadata.Machine.PartKey] = Array.ConvertAll(item.Part, ConvertToInternalModel);
return machine;
}
@@ -83,8 +81,8 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.DataArea.EndiannessKey] = item.Endianness,
};
if (item.Rom != null && item.Rom.Any())
dataArea[Models.Metadata.DataArea.RomKey] = item.Rom.Select(ConvertToInternalModel).ToArray();
if (item.Rom != null && item.Rom.Length > 0)
dataArea[Models.Metadata.DataArea.RomKey] = Array.ConvertAll(item.Rom, ConvertToInternalModel);
return dataArea;
}
@@ -101,8 +99,8 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.DipSwitch.MaskKey] = item.Mask,
};
if (item.DipValue != null && item.DipValue.Any())
dipSwitch[Models.Metadata.DipSwitch.DipValueKey] = item.DipValue.Select(ConvertToInternalModel).ToArray();
if (item.DipValue != null && item.DipValue.Length > 0)
dipSwitch[Models.Metadata.DipSwitch.DipValueKey] = Array.ConvertAll(item.DipValue, ConvertToInternalModel);
return dipSwitch;
}
@@ -147,8 +145,8 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.DiskArea.NameKey] = item.Name,
};
if (item.Disk != null && item.Disk.Any())
diskArea[Models.Metadata.DiskArea.DiskKey] = item.Disk.Select(ConvertToInternalModel).ToArray();
if (item.Disk != null && item.Disk.Length > 0)
diskArea[Models.Metadata.DiskArea.DiskKey] = Array.ConvertAll(item.Disk, ConvertToInternalModel);
return diskArea;
}
@@ -190,17 +188,17 @@ namespace SabreTools.Serialization.CrossModel
[Models.Metadata.Part.InterfaceKey] = item.Interface,
};
if (item.Feature != null && item.Feature.Any())
part[Models.Metadata.Part.FeatureKey] = item.Feature.Select(ConvertToInternalModel).ToArray();
if (item.Feature != null && item.Feature.Length > 0)
part[Models.Metadata.Part.FeatureKey] = Array.ConvertAll(item.Feature, ConvertToInternalModel);
if (item.DataArea != null && item.DataArea.Any())
part[Models.Metadata.Part.DataAreaKey] = item.DataArea.Select(ConvertToInternalModel).ToArray();
if (item.DataArea != null && item.DataArea.Length > 0)
part[Models.Metadata.Part.DataAreaKey] = Array.ConvertAll(item.DataArea, ConvertToInternalModel);
if (item.DiskArea != null && item.DiskArea.Any())
part[Models.Metadata.Part.DiskAreaKey] = item.DiskArea.Select(ConvertToInternalModel).ToArray();
if (item.DiskArea != null && item.DiskArea.Length > 0)
part[Models.Metadata.Part.DiskAreaKey] = Array.ConvertAll(item.DiskArea, ConvertToInternalModel);
if (item.DipSwitch != null && item.DipSwitch.Any())
part[Models.Metadata.Part.DipSwitchKey] = item.DipSwitch.Select(ConvertToInternalModel).ToArray();
if (item.DipSwitch != null && item.DipSwitch.Length > 0)
part[Models.Metadata.Part.DipSwitchKey] = Array.ConvertAll(item.DipSwitch, ConvertToInternalModel);
return part;
}

View File

@@ -59,7 +59,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Set the records
mediaKeyBlock.Records = records.ToArray();
mediaKeyBlock.Records = [.. records];
#endregion
@@ -164,7 +164,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Set the subset differences
record.SubsetDifferences = subsetDifferences.ToArray();
record.SubsetDifferences = [.. subsetDifferences];
// If there's any data left, discard it
if (data.Position < initialOffset + length)
@@ -205,7 +205,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Set the media keys
record.MediaKeyData = mediaKeys.ToArray();
record.MediaKeyData = [.. mediaKeys];
return record;
}
@@ -243,7 +243,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Set the offsets
record.Offsets = offsets.ToArray();
record.Offsets = [.. offsets];
return record;
}
@@ -322,7 +322,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Set the signature blocks
record.SignatureBlocks = blocks.ToArray();
record.SignatureBlocks = [.. blocks];
// If there's any data left, discard it
if (data.Position < initialOffset + length)
@@ -383,7 +383,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Set the signature blocks
record.SignatureBlocks = blocks.ToArray();
record.SignatureBlocks = [.. blocks];
// If there's any data left, discard it
if (data.Position < initialOffset + length)

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Readers;
using SabreTools.Models.AttractMode;
@@ -38,7 +37,7 @@ namespace SabreTools.Serialization.Deserializers
if (!reader.ReadHeader() || reader.HeaderValues == null)
return null;
dat.Header = reader.HeaderValues.ToArray();
dat.Header = [.. reader.HeaderValues];
// Loop through the rows and parse out values
var rows = new List<Row>();
@@ -72,10 +71,6 @@ namespace SabreTools.Serialization.Deserializers
Extra = reader.Line[15],
Buttons = reader.Line[16],
};
// If we have additional fields
if (reader.Line.Count > HeaderWithoutRomnameCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithoutRomnameCount).ToArray();
}
else
{
@@ -99,17 +94,13 @@ namespace SabreTools.Serialization.Deserializers
Extra = reader.Line[15],
Buttons = reader.Line[16],
};
// If we have additional fields
if (reader.Line.Count > HeaderWithRomnameCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithRomnameCount).ToArray();
}
rows.Add(row);
}
// Assign the rows to the Dat and return
dat.Row = rows.ToArray();
dat.Row = [.. rows];
return dat;
}

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.BSP;
@@ -165,9 +164,9 @@ namespace SabreTools.Serialization.Deserializers
// TODO: Use marshalling here instead of building
var texture = new Texture();
byte[]? name = data.ReadBytes(16)?.TakeWhile(c => c != '\0')?.ToArray();
byte[]? name = data.ReadBytes(16);
if (name != null)
texture.Name = Encoding.ASCII.GetString(name);
texture.Name = Encoding.ASCII.GetString(name).TrimEnd('\0');
texture.Width = data.ReadUInt32();
texture.Height = data.ReadUInt32();
texture.Offsets = new uint[4];

View File

@@ -1,7 +1,5 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using SabreTools.Serialization.Interfaces;
@@ -108,12 +106,13 @@ namespace SabreTools.Serialization.Deserializers
if (deserializerName == null)
return default;
// If the deserializer has no model type
Type? modelType = typeof(TDeserializer).GetGenericArguments()?.FirstOrDefault();
if (modelType == null)
// If the deserializer has no generic arguments
var genericArgs = typeof(TDeserializer).GetGenericArguments();
if (genericArgs.Length == 0)
return default;
// Loop through all loaded assemblies
Type modelType = genericArgs[0];
foreach (var assembly in AppDomain.CurrentDomain.GetAssemblies())
{
// If the assembly is invalid
@@ -121,19 +120,23 @@ namespace SabreTools.Serialization.Deserializers
return default;
// If not all types can be loaded, use the ones that could be
List<Type> assemblyTypes = [];
Type?[] assemblyTypes = [];
try
{
assemblyTypes = assembly.GetTypes().ToList<Type>();
assemblyTypes = assembly.GetTypes();
}
catch (ReflectionTypeLoadException rtle)
{
assemblyTypes = rtle.Types.Where(t => t != null)!.ToList<Type>();
assemblyTypes = rtle.Types ?? [];
}
// Loop through all types
foreach (Type type in assemblyTypes)
foreach (Type? type in assemblyTypes)
{
// If the type is invalid
if (type == null)
continue;
// If the type isn't a class
if (!type.IsClass)
continue;

View File

@@ -77,7 +77,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Assign the DIFAT sectors table
binary.DIFATSectorNumbers = difatSectors.ToArray();
binary.DIFATSectorNumbers = [.. difatSectors];
#endregion
@@ -115,7 +115,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Assign the FAT sectors table
binary.FATSectorNumbers = fatSectors.ToArray();
binary.FATSectorNumbers = [.. fatSectors];
#endregion
@@ -153,7 +153,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Assign the mini FAT sectors table
binary.MiniFATSectorNumbers = miniFatSectors.ToArray();
binary.MiniFATSectorNumbers = [.. miniFatSectors];
#endregion
@@ -219,7 +219,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Assign the Directory sectors table
binary.DirectoryEntries = directorySectors.ToArray();
binary.DirectoryEntries = [.. directorySectors];
#endregion

View File

@@ -0,0 +1,253 @@
using System;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.CHD;
namespace SabreTools.Serialization.Deserializers
{
// TODO: Expand this to full CHD files eventually
public class CHD : BaseBinaryDeserializer<Header>
{
/// <inheritdoc/>
public override Header? Deserialize(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
// If the offset is out of bounds
if (data.Position < 0 || data.Position >= data.Length)
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
// Determine the header version
uint version = GetVersion(data);
// Read and return the current CHD
return version switch
{
1 => ParseHeaderV1(data),
2 => ParseHeaderV2(data),
3 => ParseHeaderV3(data),
4 => ParseHeaderV4(data),
5 => ParseHeaderV5(data),
_ => null,
};
}
/// <summary>
/// Get the matching CHD version, if possible
/// </summary>
/// <returns>Matching version, 0 if none</returns>
private static uint GetVersion(Stream data)
{
// Read the header values
byte[] tagBytes = data.ReadBytes(8);
string tag = Encoding.ASCII.GetString(tagBytes);
uint length = data.ReadUInt32BigEndian();
uint version = data.ReadUInt32BigEndian();
// Seek back to start
data.SeekIfPossible();
// Check the signature
if (!string.Equals(tag, Constants.SignatureString, StringComparison.Ordinal))
return 0;
// Match the version to header length
#if NET472_OR_GREATER || NETCOREAPP
return (version, length) switch
{
(1, Constants.HeaderV1Size) => version,
(2, Constants.HeaderV2Size) => version,
(3, Constants.HeaderV3Size) => version,
(4, Constants.HeaderV4Size) => version,
(5, Constants.HeaderV5Size) => version,
_ => 0,
};
#else
return version switch
{
1 => length == Constants.HeaderV1Size ? version : 0,
2 => length == Constants.HeaderV2Size ? version : 0,
3 => length == Constants.HeaderV3Size ? version : 0,
4 => length == Constants.HeaderV4Size ? version : 0,
5 => length == Constants.HeaderV5Size ? version : 0,
_ => 0,
};
#endif
}
/// <summary>
/// Parse a Stream into a V1 header
/// </summary>
private static HeaderV1? ParseHeaderV1(Stream data)
{
var header = new HeaderV1();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV1Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
header.HunkSize = data.ReadUInt32BigEndian();
header.TotalHunks = data.ReadUInt32BigEndian();
header.Cylinders = data.ReadUInt32BigEndian();
header.Heads = data.ReadUInt32BigEndian();
header.Sectors = data.ReadUInt32BigEndian();
header.MD5 = data.ReadBytes(16);
header.ParentMD5 = data.ReadBytes(16);
return header;
}
/// <summary>
/// Parse a Stream into a V2 header
/// </summary>
private static HeaderV2? ParseHeaderV2(Stream data)
{
var header = new HeaderV2();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV2Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB)
return null;
header.HunkSize = data.ReadUInt32BigEndian();
header.TotalHunks = data.ReadUInt32BigEndian();
header.Cylinders = data.ReadUInt32BigEndian();
header.Heads = data.ReadUInt32BigEndian();
header.Sectors = data.ReadUInt32BigEndian();
header.MD5 = data.ReadBytes(16);
header.ParentMD5 = data.ReadBytes(16);
header.BytesPerSector = data.ReadUInt32BigEndian();
return header;
}
/// <summary>
/// Parse a Stream into a V3 header
/// </summary>
private static HeaderV3? ParseHeaderV3(Stream data)
{
var header = new HeaderV3();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV3Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_ZLIB_PLUS)
return null;
header.TotalHunks = data.ReadUInt32BigEndian();
header.LogicalBytes = data.ReadUInt64BigEndian();
header.MetaOffset = data.ReadUInt64BigEndian();
header.MD5 = data.ReadBytes(16);
header.ParentMD5 = data.ReadBytes(16);
header.HunkBytes = data.ReadUInt32BigEndian();
header.SHA1 = data.ReadBytes(20);
header.ParentSHA1 = data.ReadBytes(20);
return header;
}
/// <summary>
/// Parse a Stream into a V4 header
/// </summary>
private static HeaderV4? ParseHeaderV4(Stream data)
{
var header = new HeaderV4();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV4Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Flags = (Flags)data.ReadUInt32BigEndian();
header.Compression = (CompressionType)data.ReadUInt32BigEndian();
if (header.Compression > CompressionType.CHDCOMPRESSION_AV)
return null;
header.TotalHunks = data.ReadUInt32BigEndian();
header.LogicalBytes = data.ReadUInt64BigEndian();
header.MetaOffset = data.ReadUInt64BigEndian();
header.HunkBytes = data.ReadUInt32BigEndian();
header.SHA1 = data.ReadBytes(20);
header.ParentSHA1 = data.ReadBytes(20);
header.RawSHA1 = data.ReadBytes(20);
return header;
}
/// <summary>
/// Parse a Stream into a V5 header
/// </summary>
private static HeaderV5? ParseHeaderV5(Stream data)
{
var header = new HeaderV5();
byte[] tagBytes = data.ReadBytes(8);
header.Tag = Encoding.ASCII.GetString(tagBytes);
if (header.Tag != Constants.SignatureString)
return null;
header.Length = data.ReadUInt32BigEndian();
if (header.Length != Constants.HeaderV5Size)
return null;
header.Version = data.ReadUInt32BigEndian();
header.Compressors = new CodecType[4];
for (int i = 0; i < header.Compressors.Length; i++)
{
header.Compressors[i] = (CodecType)data.ReadUInt32BigEndian();
}
header.LogicalBytes = data.ReadUInt64BigEndian();
header.MapOffset = data.ReadUInt64BigEndian();
header.MetaOffset = data.ReadUInt64BigEndian();
header.HunkBytes = data.ReadUInt32BigEndian();
header.UnitBytes = data.ReadUInt32BigEndian();
header.RawSHA1 = data.ReadBytes(20);
header.SHA1 = data.ReadBytes(20);
header.ParentSHA1 = data.ReadBytes(20);
return header;
}
}
}

View File

@@ -70,7 +70,7 @@ namespace SabreTools.Serialization.Deserializers
var deserializer = new ClrMamePro();
return deserializer.Deserialize(data, quotes);
}
/// <inheritdoc/>
public override MetadataFile? Deserialize(Stream? data)
=> Deserialize(data, true);
@@ -102,9 +102,6 @@ namespace SabreTools.Serialization.Deserializers
var videos = new List<Video>();
var dipSwitches = new List<DipSwitch>();
var additional = new List<string>();
var headerAdditional = new List<string>();
var gameAdditional = new List<string>();
while (!reader.EndOfStream)
{
// If we have no next line
@@ -120,12 +117,6 @@ namespace SabreTools.Serialization.Deserializers
case CmpRowType.EndTopLevel:
switch (lastTopLevel)
{
case "doscenter":
if (dat.ClrMamePro != null)
dat.ClrMamePro.ADDITIONAL_ELEMENTS = [.. headerAdditional];
headerAdditional.Clear();
break;
case "game":
case "machine":
case "resource":
@@ -142,7 +133,6 @@ namespace SabreTools.Serialization.Deserializers
game.Chip = [.. chips];
game.Video = [.. videos];
game.DipSwitch = [.. dipSwitches];
game.ADDITIONAL_ELEMENTS = [.. gameAdditional];
games.Add(game);
game = null;
@@ -158,10 +148,6 @@ namespace SabreTools.Serialization.Deserializers
chips.Clear();
videos.Clear();
dipSwitches.Clear();
gameAdditional.Clear();
break;
default:
// No-op
break;
}
continue;
@@ -188,10 +174,6 @@ namespace SabreTools.Serialization.Deserializers
case "set":
game = new Set();
break;
default:
if (reader.CurrentLine != null)
additional.Add(reader.CurrentLine);
break;
}
}
@@ -249,10 +231,6 @@ namespace SabreTools.Serialization.Deserializers
case "forcepacking":
dat.ClrMamePro.ForcePacking = reader.Standalone?.Value;
break;
default:
if (reader.CurrentLine != null)
headerAdditional.Add(reader.CurrentLine);
break;
}
}
@@ -303,14 +281,9 @@ namespace SabreTools.Serialization.Deserializers
var sample = new Sample
{
Name = reader.Standalone?.Value ?? string.Empty,
ADDITIONAL_ELEMENTS = [],
};
samples.Add(sample);
break;
default:
if (reader.CurrentLine != null)
gameAdditional.Add(reader.CurrentLine);
break;
}
}
@@ -391,22 +364,13 @@ namespace SabreTools.Serialization.Deserializers
game.Driver = driver;
break;
default:
if (reader.CurrentLine != null)
gameAdditional.Add(reader.CurrentLine);
continue;
}
}
else
{
if (reader.CurrentLine != null)
additional.Add(reader.CurrentLine);
}
}
// Add extra pieces and return
dat.Game = [.. games];
dat.ADDITIONAL_ELEMENTS = [.. additional];
return dat;
}
@@ -447,7 +411,6 @@ namespace SabreTools.Serialization.Deserializers
}
}
release.ADDITIONAL_ELEMENTS = [.. itemAdditional];
return release;
}
@@ -461,7 +424,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var biosset = new BiosSet();
foreach (var kvp in reader.Internal)
{
@@ -476,13 +438,9 @@ namespace SabreTools.Serialization.Deserializers
case "default":
biosset.Default = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
biosset.ADDITIONAL_ELEMENTS = [.. itemAdditional];
return biosset;
}
@@ -496,7 +454,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var rom = new Rom();
foreach (var kvp in reader.Internal)
{
@@ -565,13 +522,9 @@ namespace SabreTools.Serialization.Deserializers
case "mia":
rom.MIA = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
rom.ADDITIONAL_ELEMENTS = [.. itemAdditional];
return rom;
}
@@ -585,7 +538,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var disk = new Disk();
foreach (var kvp in reader.Internal)
{
@@ -609,13 +561,9 @@ namespace SabreTools.Serialization.Deserializers
case "flags":
disk.Flags = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
disk.ADDITIONAL_ELEMENTS = [.. itemAdditional];
return disk;
}
@@ -629,7 +577,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var media = new Media();
foreach (var kvp in reader.Internal)
{
@@ -650,13 +597,9 @@ namespace SabreTools.Serialization.Deserializers
case "spamsum":
media.SpamSum = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
media.ADDITIONAL_ELEMENTS = [.. itemAdditional];
return media;
}
@@ -670,7 +613,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var sample = new Sample();
foreach (var kvp in reader.Internal)
{
@@ -679,13 +621,9 @@ namespace SabreTools.Serialization.Deserializers
case "name":
sample.Name = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
sample.ADDITIONAL_ELEMENTS = [.. itemAdditional];
return sample;
}
@@ -699,7 +637,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var archive = new Archive();
foreach (var kvp in reader.Internal)
{
@@ -708,13 +645,9 @@ namespace SabreTools.Serialization.Deserializers
case "name":
archive.Name = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
archive.ADDITIONAL_ELEMENTS = [.. itemAdditional];
return archive;
}
@@ -728,7 +661,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var chip = new Chip();
foreach (var kvp in reader.Internal)
{
@@ -746,13 +678,9 @@ namespace SabreTools.Serialization.Deserializers
case "clock":
chip.Clock = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
chip.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
return chip;
}
@@ -766,7 +694,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var video = new Video();
foreach (var kvp in reader.Internal)
{
@@ -793,13 +720,9 @@ namespace SabreTools.Serialization.Deserializers
case "freq":
video.Freq = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
video.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
return video;
}
@@ -813,7 +736,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var sound = new Sound();
foreach (var kvp in reader.Internal)
{
@@ -822,13 +744,9 @@ namespace SabreTools.Serialization.Deserializers
case "channels":
sound.Channels = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
sound.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
return sound;
}
@@ -842,7 +760,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var input = new Input();
foreach (var kvp in reader.Internal)
{
@@ -866,13 +783,9 @@ namespace SabreTools.Serialization.Deserializers
case "service":
input.Service = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
input.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
return input;
}
@@ -886,7 +799,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var dipswitch = new DipSwitch();
var entries = new List<string>();
foreach (var kvp in reader.Internal)
@@ -902,14 +814,10 @@ namespace SabreTools.Serialization.Deserializers
case "default":
dipswitch.Default = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
dipswitch.Entry = [.. entries];
dipswitch.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
return dipswitch;
}
@@ -923,7 +831,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var driver = new Driver();
foreach (var kvp in reader.Internal)
{
@@ -944,13 +851,9 @@ namespace SabreTools.Serialization.Deserializers
case "blit":
driver.Blit = kvp.Value;
break;
default:
itemAdditional.Add($"{kvp.Key}: {kvp.Value}");
break;
}
}
driver.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
return driver;
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using SabreTools.IO.Extensions;
using SabreTools.Models.CueSheets;
@@ -44,11 +43,13 @@ namespace SabreTools.Serialization.Deserializers
continue;
// http://stackoverflow.com/questions/554013/regular-expression-to-split-on-spaces-unless-in-quotes
string[] splitLine = Regex
.Matches(line, @"[^\s""]+|""[^""]*""")
.Cast<Match>()
.Select(m => m.Groups[0].Value)
.ToArray();
var matchCol = Regex.Matches(line, @"[^\s""]+|""[^""]*""");
var splitLine = new List<string>();
foreach (Match? match in matchCol)
{
if (match != null)
splitLine.Add(match.Groups[0].Value);
}
switch (splitLine[0])
{
@@ -59,7 +60,7 @@ namespace SabreTools.Serialization.Deserializers
// Read MCN
case "CATALOG":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"CATALOG line malformed: {line}");
cueSheet.Catalog = splitLine[1].Trim('"');
@@ -67,7 +68,7 @@ namespace SabreTools.Serialization.Deserializers
// Read external CD-Text file path
case "CDTEXTFILE":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"CDTEXTFILE line malformed: {line}");
cueSheet.CdTextFile = splitLine[1].Trim('"');
@@ -75,7 +76,7 @@ namespace SabreTools.Serialization.Deserializers
// Read CD-Text enhanced performer
case "PERFORMER":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"PERFORMER line malformed: {line}");
cueSheet.Performer = splitLine[1].Trim('"');
@@ -83,7 +84,7 @@ namespace SabreTools.Serialization.Deserializers
// Read CD-Text enhanced songwriter
case "SONGWRITER":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"SONGWRITER line malformed: {line}");
cueSheet.Songwriter = splitLine[1].Trim('"');
@@ -91,7 +92,7 @@ namespace SabreTools.Serialization.Deserializers
// Read CD-Text enhanced title
case "TITLE":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"TITLE line malformed: {line}");
cueSheet.Title = splitLine[1].Trim('"');
@@ -99,7 +100,7 @@ namespace SabreTools.Serialization.Deserializers
// Read file information
case "FILE":
if (splitLine.Length < 3)
if (splitLine.Count < 3)
throw new FormatException($"FILE line malformed: {line}");
var file = CreateCueFile(splitLine[1], splitLine[2], data, out lastLine);
@@ -152,11 +153,13 @@ namespace SabreTools.Serialization.Deserializers
continue;
// http://stackoverflow.com/questions/554013/regular-expression-to-split-on-spaces-unless-in-quotes
string[] splitLine = Regex
.Matches(line, @"[^\s""]+|""[^""]*""")
.Cast<Match>()
.Select(m => m.Groups[0].Value)
.ToArray();
var matchCol = Regex.Matches(line, @"[^\s""]+|""[^""]*""");
var splitLine = new List<string>();
foreach (Match? match in matchCol)
{
if (match != null)
splitLine.Add(match.Groups[0].Value);
}
switch (splitLine[0])
{
@@ -167,7 +170,7 @@ namespace SabreTools.Serialization.Deserializers
// Read track information
case "TRACK":
if (splitLine.Length < 3)
if (splitLine.Count < 3)
throw new FormatException($"TRACK line malformed: {line}");
var track = CreateCueTrack(splitLine[1], splitLine[2], data, out lastLine);
@@ -237,12 +240,13 @@ namespace SabreTools.Serialization.Deserializers
continue;
// http://stackoverflow.com/questions/554013/regular-expression-to-split-on-spaces-unless-in-quotes
string[] splitLine = Regex
.Matches(line, @"[^\s""]+|""[^""]*""")
.Cast<Match>()
.Select(m => m.Groups[0].Value)
.ToArray();
var matchCol = Regex.Matches(line, @"[^\s""]+|""[^""]*""");
var splitLine = new List<string>();
foreach (Match? match in matchCol)
{
if (match != null)
splitLine.Add(match.Groups[0].Value);
}
switch (splitLine[0])
{
// Read comments
@@ -252,15 +256,15 @@ namespace SabreTools.Serialization.Deserializers
// Read flag information
case "FLAGS":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"FLAGS line malformed: {line}");
cueTrack.Flags = GetFlags(splitLine);
cueTrack.Flags = GetFlags([.. splitLine]);
break;
// Read International Standard Recording Code
case "ISRC":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"ISRC line malformed: {line}");
cueTrack.ISRC = splitLine[1].Trim('"');
@@ -268,7 +272,7 @@ namespace SabreTools.Serialization.Deserializers
// Read CD-Text enhanced performer
case "PERFORMER":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"PERFORMER line malformed: {line}");
cueTrack.Performer = splitLine[1].Trim('"');
@@ -276,7 +280,7 @@ namespace SabreTools.Serialization.Deserializers
// Read CD-Text enhanced songwriter
case "SONGWRITER":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"SONGWRITER line malformed: {line}");
cueTrack.Songwriter = splitLine[1].Trim('"');
@@ -284,7 +288,7 @@ namespace SabreTools.Serialization.Deserializers
// Read CD-Text enhanced title
case "TITLE":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"TITLE line malformed: {line}");
cueTrack.Title = splitLine[1].Trim('"');
@@ -292,7 +296,7 @@ namespace SabreTools.Serialization.Deserializers
// Read pregap information
case "PREGAP":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"PREGAP line malformed: {line}");
var pregap = CreatePreGap(splitLine[1]);
@@ -304,7 +308,7 @@ namespace SabreTools.Serialization.Deserializers
// Read index information
case "INDEX":
if (splitLine.Length < 3)
if (splitLine.Count < 3)
throw new FormatException($"INDEX line malformed: {line}");
var index = CreateCueIndex(splitLine[1], splitLine[2]);
@@ -316,7 +320,7 @@ namespace SabreTools.Serialization.Deserializers
// Read postgap information
case "POSTGAP":
if (splitLine.Length < 2)
if (splitLine.Count < 2)
throw new FormatException($"POSTGAP line malformed: {line}");
var postgap = CreatePostGap(splitLine[1]);
@@ -356,7 +360,7 @@ namespace SabreTools.Serialization.Deserializers
throw new ArgumentException("Length was null or whitespace");
// Ignore lines that don't contain the correct information
if (length!.Length != 8 || length.Count(c => c == ':') != 2)
if (length!.Length != 8)
throw new FormatException($"Length was not in a recognized format: {length}");
// Split the line
@@ -413,7 +417,7 @@ namespace SabreTools.Serialization.Deserializers
throw new ArgumentException("Start time was null or whitespace");
// Ignore lines that don't contain the correct information
if (startTime!.Length != 8 || startTime.Count(c => c == ':') != 2)
if (startTime!.Length != 8)
throw new FormatException($"Start time was not in a recognized format: {startTime}");
// Split the line
@@ -464,7 +468,7 @@ namespace SabreTools.Serialization.Deserializers
throw new ArgumentException("Length was null or whitespace");
// Ignore lines that don't contain the correct information
if (length!.Length != 8 || length.Count(c => c == ':') != 2)
if (length!.Length != 8)
throw new FormatException($"Length was not in a recognized format: {length}");
// Split the line

View File

@@ -26,9 +26,6 @@ namespace SabreTools.Serialization.Deserializers
var games = new List<Game>();
var files = new List<Models.DosCenter.File>();
var additional = new List<string>();
var headerAdditional = new List<string>();
var gameAdditional = new List<string>();
while (!reader.EndOfStream)
{
// If we have no next line
@@ -44,26 +41,15 @@ namespace SabreTools.Serialization.Deserializers
case CmpRowType.EndTopLevel:
switch (lastTopLevel)
{
case "doscenter":
if (dat.DosCenter != null)
dat.DosCenter.ADDITIONAL_ELEMENTS = headerAdditional.ToArray();
headerAdditional.Clear();
break;
case "game":
if (game != null)
{
game.File = files.ToArray();
game.ADDITIONAL_ELEMENTS = gameAdditional.ToArray();
game.File = [.. files];
games.Add(game);
}
game = null;
files.Clear();
gameAdditional.Clear();
break;
default:
// No-op
break;
}
continue;
@@ -81,10 +67,6 @@ namespace SabreTools.Serialization.Deserializers
case "game":
game = new Game();
break;
default:
if (reader.CurrentLine != null)
additional.Add(reader.CurrentLine);
break;
}
}
@@ -117,10 +99,6 @@ namespace SabreTools.Serialization.Deserializers
case "comment:":
dat.DosCenter.Comment = reader.Standalone?.Value;
break;
default:
if (reader.CurrentLine != null)
headerAdditional.Add(item: reader.CurrentLine);
break;
}
}
@@ -135,10 +113,6 @@ namespace SabreTools.Serialization.Deserializers
case "name":
game.Name = reader.Standalone?.Value;
break;
default:
if (reader.CurrentLine != null)
gameAdditional.Add(item: reader.CurrentLine);
break;
}
}
@@ -147,28 +121,17 @@ namespace SabreTools.Serialization.Deserializers
{
// If we have an unknown type, log it
if (reader.InternalName != "file")
{
if (reader.CurrentLine != null)
gameAdditional.Add(reader.CurrentLine);
continue;
}
// Create the file and add to the list
var file = CreateFile(reader);
if (file != null)
files.Add(file);
}
else
{
if (reader.CurrentLine != null)
additional.Add(item: reader.CurrentLine);
}
}
// Add extra pieces and return
dat.Game = games.ToArray();
dat.ADDITIONAL_ELEMENTS = additional.ToArray();
dat.Game = [.. games];
return dat;
}
@@ -182,7 +145,6 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Internal == null)
return null;
var itemAdditional = new List<string>();
var file = new Models.DosCenter.File();
foreach (var kvp in reader.Internal)
{
@@ -200,14 +162,9 @@ namespace SabreTools.Serialization.Deserializers
case "date":
file.Date = kvp.Value;
break;
default:
if (reader.CurrentLine != null)
itemAdditional.Add(item: reader.CurrentLine);
break;
}
}
file.ADDITIONAL_ELEMENTS = itemAdditional.ToArray();
return file;
}
}

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Readers;
using SabreTools.Models.EverdriveSMDB;
@@ -47,15 +46,11 @@ namespace SabreTools.Serialization.Deserializers
if (reader.Line.Count > 5)
row.Size = reader.Line[5];
// If we have additional fields
if (reader.Line.Count > 6)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(5).ToArray();
rows.Add(row);
}
// Assign the rows to the Dat and return
dat.Row = rows.ToArray();
dat.Row = [.. rows];
return dat;
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SabreTools.Hashing;
using SabreTools.Models.Hashfile;
@@ -60,7 +59,7 @@ namespace SabreTools.Serialization.Deserializers
using var stream = PathProcessor.OpenStream(path);
return DeserializeStream(stream, hash);
}
#endregion
#region IStreamDeserializer
@@ -71,7 +70,7 @@ namespace SabreTools.Serialization.Deserializers
var deserializer = new Hashfile();
return deserializer.Deserialize(data, hash);
}
/// <inheritdoc/>
public override Models.Hashfile.Hashfile? Deserialize(Stream? data)
=> Deserialize(data, HashType.CRC32);
@@ -86,19 +85,22 @@ namespace SabreTools.Serialization.Deserializers
// Setup the reader and output
var reader = new StreamReader(data);
var dat = new Models.Hashfile.Hashfile();
var additional = new List<string>();
// Create lists for each hash type
var sfvList = new List<SFV>();
var md5List = new List<MD5>();
var sha1List = new List<SHA1>();
var sha256List = new List<SHA256>();
var sha384List = new List<SHA384>();
var sha512List = new List<SHA512>();
var spamsumList = new List<SpamSum>();
// Loop through the rows and parse out values
var hashes = new List<object>();
while (!reader.EndOfStream)
{
// Read and split the line
string? line = reader.ReadLine();
#if NETFRAMEWORK || NETCOREAPP3_1
string[]? lineParts = line?.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
#else
string[]? lineParts = line?.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
#endif
string[]? lineParts = line?.Split([' '], StringSplitOptions.RemoveEmptyEntries);
if (lineParts == null)
continue;
@@ -106,93 +108,60 @@ namespace SabreTools.Serialization.Deserializers
switch (hash)
{
case HashType.CRC32:
case HashType.CRC32_ISO:
case HashType.CRC32_Naive:
case HashType.CRC32_Optimized:
case HashType.CRC32_Parallel:
var sfv = new SFV
{
#if NETFRAMEWORK
File = string.Join(" ", lineParts.Take(lineParts.Length - 1).ToArray()),
File = string.Join(" ", lineParts, 0, lineParts.Length - 1),
Hash = lineParts[lineParts.Length - 1],
#else
File = string.Join(" ", lineParts[..^1]),
Hash = lineParts[^1],
#endif
};
hashes.Add(sfv);
sfvList.Add(sfv);
break;
case HashType.MD5:
var md5 = new MD5
{
Hash = lineParts[0],
#if NETFRAMEWORK
File = string.Join(" ", lineParts.Skip(1).ToArray()),
#else
File = string.Join(" ", lineParts[1..]),
#endif
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
};
hashes.Add(md5);
md5List.Add(md5);
break;
case HashType.SHA1:
var sha1 = new SHA1
{
Hash = lineParts[0],
#if NETFRAMEWORK
File = string.Join(" ", lineParts.Skip(1).ToArray()),
#else
File = string.Join(" ", lineParts[1..]),
#endif
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
};
hashes.Add(sha1);
sha1List.Add(sha1);
break;
case HashType.SHA256:
var sha256 = new SHA256
{
Hash = lineParts[0],
#if NETFRAMEWORK
File = string.Join(" ", lineParts.Skip(1).ToArray()),
#else
File = string.Join(" ", lineParts[1..]),
#endif
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
};
hashes.Add(sha256);
sha256List.Add(sha256);
break;
case HashType.SHA384:
var sha384 = new SHA384
{
Hash = lineParts[0],
#if NETFRAMEWORK
File = string.Join(" ", lineParts.Skip(1).ToArray()),
#else
File = string.Join(" ", lineParts[1..]),
#endif
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
};
hashes.Add(sha384);
sha384List.Add(sha384);
break;
case HashType.SHA512:
var sha512 = new SHA512
{
Hash = lineParts[0],
#if NETFRAMEWORK
File = string.Join(" ", lineParts.Skip(1).ToArray()),
#else
File = string.Join(" ", lineParts[1..]),
#endif
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
};
hashes.Add(sha512);
sha512List.Add(sha512);
break;
case HashType.SpamSum:
var spamSum = new SpamSum
{
Hash = lineParts[0],
#if NETFRAMEWORK
File = string.Join(" ", lineParts.Skip(1).ToArray()),
#else
File = string.Join(" ", lineParts[1..]),
#endif
File = string.Join(" ", lineParts, 1, lineParts.Length - 1),
};
hashes.Add(spamSum);
spamsumList.Add(spamSum);
break;
}
}
@@ -201,32 +170,28 @@ namespace SabreTools.Serialization.Deserializers
switch (hash)
{
case HashType.CRC32:
case HashType.CRC32_ISO:
case HashType.CRC32_Naive:
case HashType.CRC32_Optimized:
case HashType.CRC32_Parallel:
dat.SFV = hashes.Cast<SFV>().ToArray();
dat.SFV = [.. sfvList];
break;
case HashType.MD5:
dat.MD5 = hashes.Cast<MD5>().ToArray();
dat.MD5 = [.. md5List];
break;
case HashType.SHA1:
dat.SHA1 = hashes.Cast<SHA1>().ToArray();
dat.SHA1 = [.. sha1List];
break;
case HashType.SHA256:
dat.SHA256 = hashes.Cast<SHA256>().ToArray();
dat.SHA256 = [.. sha256List];
break;
case HashType.SHA384:
dat.SHA384 = hashes.Cast<SHA384>().ToArray();
dat.SHA384 = [.. sha384List];
break;
case HashType.SHA512:
dat.SHA512 = hashes.Cast<SHA512>().ToArray();
dat.SHA512 = [.. sha512List];
break;
case HashType.SpamSum:
dat.SpamSum = hashes.Cast<SpamSum>().ToArray();
dat.SpamSum = [.. spamsumList];
break;
}
dat.ADDITIONAL_ELEMENTS = [.. additional];
return dat;
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.InstallShieldArchiveV3;
@@ -114,7 +115,17 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled directory on success, null on error</returns>
public static Models.InstallShieldArchiveV3.Directory? ParseDirectory(Stream data)
{
return data.ReadType<Models.InstallShieldArchiveV3.Directory>();
var directory = new Models.InstallShieldArchiveV3.Directory();
directory.FileCount = data.ReadUInt16();
directory.ChunkSize = data.ReadUInt16();
// TODO: Is there any equivilent automatic type for UInt16-prefixed ANSI?
ushort nameLength = data.ReadUInt16();
byte[] nameBytes = data.ReadBytes(nameLength);
directory.Name = Encoding.ASCII.GetString(nameBytes);
return directory;
}
/// <summary>

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using SabreTools.IO.Extensions;
@@ -447,14 +448,9 @@ namespace SabreTools.Serialization.Deserializers
var fileGroup = new FileGroup();
fileGroup.NameOffset = data.ReadUInt32();
fileGroup.ExpandedSize = data.ReadUInt32();
fileGroup.Reserved0 = data.ReadBytes(4);
fileGroup.CompressedSize = data.ReadUInt32();
fileGroup.Reserved1 = data.ReadBytes(4);
fileGroup.Reserved2 = data.ReadBytes(2);
fileGroup.Attribute1 = data.ReadUInt16();
fileGroup.Attribute2 = data.ReadUInt16();
fileGroup.Attributes = (FileGroupAttributes)data.ReadUInt16();
// TODO: Figure out what data lives in this area for V5 and below
if (majorVersion <= 5)
@@ -462,19 +458,19 @@ namespace SabreTools.Serialization.Deserializers
fileGroup.FirstFile = data.ReadUInt32();
fileGroup.LastFile = data.ReadUInt32();
fileGroup.UnknownOffset = data.ReadUInt32();
fileGroup.Var4Offset = data.ReadUInt32();
fileGroup.Var1Offset = data.ReadUInt32();
fileGroup.UnknownStringOffset = data.ReadUInt32();
fileGroup.OperatingSystemOffset = data.ReadUInt32();
fileGroup.LanguageOffset = data.ReadUInt32();
fileGroup.HTTPLocationOffset = data.ReadUInt32();
fileGroup.FTPLocationOffset = data.ReadUInt32();
fileGroup.MiscOffset = data.ReadUInt32();
fileGroup.Var2Offset = data.ReadUInt32();
fileGroup.TargetDirectoryOffset = data.ReadUInt32();
fileGroup.Reserved3 = data.ReadBytes(2);
fileGroup.Reserved4 = data.ReadBytes(2);
fileGroup.Reserved5 = data.ReadBytes(2);
fileGroup.Reserved6 = data.ReadBytes(2);
fileGroup.Reserved7 = data.ReadBytes(2);
fileGroup.OverwriteFlags = (FileGroupFlags)data.ReadUInt32();
fileGroup.Reserved = new uint[4];
for (int i = 0; i < fileGroup.Reserved.Length; i++)
{
fileGroup.Reserved[i] = data.ReadUInt32();
}
// Cache the current position
long currentPosition = data.Position;
@@ -489,7 +485,7 @@ namespace SabreTools.Serialization.Deserializers
if (majorVersion >= 17)
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
else
fileGroup.Name = data.ReadNullTerminatedUnicodeString();
fileGroup.Name = data.ReadNullTerminatedAnsiString();
}
// Seek back to the correct offset
@@ -512,15 +508,19 @@ namespace SabreTools.Serialization.Deserializers
component.IdentifierOffset = data.ReadUInt32();
component.DescriptorOffset = data.ReadUInt32();
component.DisplayNameOffset = data.ReadUInt32();
component.Reserved0 = data.ReadUInt16();
component.ReservedOffset0 = data.ReadUInt32();
component.ReservedOffset1 = data.ReadUInt32();
component.Status = (ComponentStatus)data.ReadUInt16();
component.PasswordOffset = data.ReadUInt32();
component.MiscOffset = data.ReadUInt32();
component.ComponentIndex = data.ReadUInt16();
component.NameOffset = data.ReadUInt32();
component.ReservedOffset2 = data.ReadUInt32();
component.ReservedOffset3 = data.ReadUInt32();
component.ReservedOffset4 = data.ReadUInt32();
component.Reserved1 = data.ReadBytes(32);
component.CDRomFolderOffset = data.ReadUInt32();
component.HTTPLocationOffset = data.ReadUInt32();
component.FTPLocationOffset = data.ReadUInt32();
component.Guid = new Guid[2];
for (int i = 0; i < component.Guid.Length; i++)
{
component.Guid[i] = data.ReadGuid();
}
component.CLSIDOffset = data.ReadUInt32();
component.Reserved2 = data.ReadBytes(28);
component.Reserved3 = data.ReadBytes(majorVersion <= 5 ? 2 : 1);
@@ -533,10 +533,10 @@ namespace SabreTools.Serialization.Deserializers
component.SubComponentsCount = data.ReadUInt16();
component.SubComponentsOffset = data.ReadUInt32();
component.NextComponentOffset = data.ReadUInt32();
component.ReservedOffset5 = data.ReadUInt32();
component.ReservedOffset6 = data.ReadUInt32();
component.ReservedOffset7 = data.ReadUInt32();
component.ReservedOffset8 = data.ReadUInt32();
component.OnInstallingOffset = data.ReadUInt32();
component.OnInstalledOffset = data.ReadUInt32();
component.OnUninstallingOffset = data.ReadUInt32();
component.OnUninstalledOffset = data.ReadUInt32();
// Cache the current position
long currentPosition = data.Position;

View File

@@ -23,7 +23,6 @@ namespace SabreTools.Serialization.Deserializers
var sets = new List<Set>();
var rows = new List<Row>();
var additional = new List<string>();
while (!reader.EndOfStream)
{
// Read the line and don't split yet
@@ -33,7 +32,7 @@ namespace SabreTools.Serialization.Deserializers
// If we have a set to process
if (set != null)
{
set.Row = rows.ToArray();
set.Row = [.. rows];
sets.Add(set);
set = null;
rows.Clear();
@@ -215,7 +214,6 @@ namespace SabreTools.Serialization.Deserializers
default:
row = null;
additional.Add(line);
break;
}
@@ -226,15 +224,14 @@ namespace SabreTools.Serialization.Deserializers
// If we have a set to process
if (set != null)
{
set.Row = rows.ToArray();
set.Row = [.. rows];
sets.Add(set);
set = null;
rows.Clear();
}
// Add extra pieces and return
dat.Set = sets.ToArray();
dat.ADDITIONAL_ELEMENTS = additional.ToArray();
dat.Set = [.. sets];
return dat;
}
}

View File

@@ -102,7 +102,7 @@ namespace SabreTools.Serialization.Deserializers
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
archive.HashTable = [.. hashTable];
}
}
@@ -131,7 +131,7 @@ namespace SabreTools.Serialization.Deserializers
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
archive.HashTable = [.. hashTable];
}
}
@@ -160,7 +160,7 @@ namespace SabreTools.Serialization.Deserializers
hashTable.Add(hashEntry);
}
archive.HashTable = hashTable.ToArray();
archive.HashTable = [.. hashTable];
}
}
@@ -193,7 +193,7 @@ namespace SabreTools.Serialization.Deserializers
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
archive.BlockTable = [.. blockTable];
}
}
@@ -222,7 +222,7 @@ namespace SabreTools.Serialization.Deserializers
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
archive.BlockTable = [.. blockTable];
}
}
@@ -251,7 +251,7 @@ namespace SabreTools.Serialization.Deserializers
blockTable.Add(blockEntry);
}
archive.BlockTable = blockTable.ToArray();
archive.BlockTable = [.. blockTable];
}
}
@@ -278,7 +278,7 @@ namespace SabreTools.Serialization.Deserializers
hiBlockTable.Add(hiBlockEntry);
}
archive.HiBlockTable = hiBlockTable.ToArray();
archive.HiBlockTable = [.. hiBlockTable];
}
}

View File

@@ -21,7 +21,7 @@ namespace SabreTools.Serialization.Deserializers
return null;
// Cache the current offset
int initialOffset = (int)data.Position;
long initialOffset = data.Position;
// Create a new cart image to fill
var cart = new Cart();
@@ -62,118 +62,74 @@ namespace SabreTools.Serialization.Deserializers
#endregion
#region Partitions
// Create the partition table
cart.Partitions = new NCCHHeader[8];
// Iterate and build the partitions
for (int i = 0; i < 8; i++)
{
cart.Partitions[i] = ParseNCCHHeader(data);
}
#endregion
// Cache the media unit size for further use
long mediaUnitSize = 0;
if (header.PartitionFlags != null)
mediaUnitSize = (uint)(0x200 * Math.Pow(2, header.PartitionFlags[(int)NCSDFlags.MediaUnitSize]));
#region Extended Headers
#region Partitions
// Create the extended header table
cart.ExtendedHeaders = new NCCHExtendedHeader[8];
// Create the tables
cart.Partitions = new NCCHHeader[8];
cart.ExtendedHeaders = new NCCHExtendedHeader?[8];
cart.ExeFSHeaders = new ExeFSHeader?[8];
cart.RomFSHeaders = new RomFSHeader?[8];
// Iterate and build the extended headers
// Iterate and build the partitions
for (int i = 0; i < 8; i++)
{
// If we have an encrypted or invalid partition
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
// Find the offset to the partition
long partitionOffset = cart.Header.PartitionsTable?[i]?.Offset ?? 0;
partitionOffset *= mediaUnitSize;
if (partitionOffset == 0)
continue;
// If we have no partitions table
if (cart.Header!.PartitionsTable == null)
// Seek to the start of the partition
data.Seek(partitionOffset, SeekOrigin.Begin);
// Handle the normal header
var partition = ParseNCCHHeader(data);
if (partition == null || partition.MagicID != NCCHMagicNumber)
continue;
// Get the extended header offset
long offset = (cart.Header.PartitionsTable[i]!.Offset * mediaUnitSize) + 0x200;
if (offset < 0 || offset >= data.Length)
continue;
// Set the normal header
cart.Partitions[i] = partition;
// Seek to the extended header
data.Seek(offset, SeekOrigin.Begin);
// Handle the extended header, if it exists
if (partition.ExtendedHeaderSizeInBytes > 0)
{
var extendedHeader = ParseNCCHExtendedHeader(data);
if (extendedHeader != null)
cart.ExtendedHeaders[i] = extendedHeader;
}
// Parse the extended header
var extendedHeader = ParseNCCHExtendedHeader(data);
if (extendedHeader != null)
cart.ExtendedHeaders[i] = extendedHeader;
}
// Handle the ExeFS, if it exists
if (partition.ExeFSSizeInMediaUnits > 0)
{
long offset = partition.ExeFSOffsetInMediaUnits * mediaUnitSize;
data.Seek(partitionOffset + offset, SeekOrigin.Begin);
#endregion
var exeFsHeader = ParseExeFSHeader(data);
if (exeFsHeader == null)
return null;
#region ExeFS Headers
cart.ExeFSHeaders[i] = exeFsHeader;
}
// Create the ExeFS header table
cart.ExeFSHeaders = new ExeFSHeader[8];
// Handle the RomFS, if it exists
if (partition.RomFSSizeInMediaUnits > 0)
{
long offset = partition.RomFSOffsetInMediaUnits * mediaUnitSize;
data.Seek(partitionOffset + offset, SeekOrigin.Begin);
// Iterate and build the ExeFS headers
for (int i = 0; i < 8; i++)
{
// If we have an encrypted or invalid partition
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
continue;
var romFsHeader = ParseRomFSHeader(data);
if (romFsHeader == null)
continue;
else if (romFsHeader.MagicString != RomFSMagicNumber || romFsHeader.MagicNumber != RomFSSecondMagicNumber)
continue;
// If we have no partitions table
if (cart.Header!.PartitionsTable == null)
continue;
// Get the ExeFS header offset
long offset = (cart.Header.PartitionsTable[i]!.Offset + cart.Partitions[i]!.ExeFSOffsetInMediaUnits) * mediaUnitSize;
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the ExeFS header
data.Seek(offset, SeekOrigin.Begin);
// Parse the ExeFS header
var exeFsHeader = ParseExeFSHeader(data);
if (exeFsHeader == null)
return null;
cart.ExeFSHeaders[i] = exeFsHeader;
}
#endregion
#region RomFS Headers
// Create the RomFS header table
cart.RomFSHeaders = new RomFSHeader[8];
// Iterate and build the RomFS headers
for (int i = 0; i < 8; i++)
{
// If we have an encrypted or invalid partition
if (cart.Partitions[i]!.MagicID != NCCHMagicNumber)
continue;
// If we have no partitions table
if (cart.Header!.PartitionsTable == null)
continue;
// Get the RomFS header offset
long offset = (cart.Header.PartitionsTable[i]!.Offset + cart.Partitions[i]!.RomFSOffsetInMediaUnits) * mediaUnitSize;
if (offset < 0 || offset >= data.Length)
continue;
// Seek to the RomFS header
data.Seek(offset, SeekOrigin.Begin);
// Parse the RomFS header
var romFsHeader = ParseRomFSHeader(data);
if (romFsHeader != null)
cart.RomFSHeaders[i] = romFsHeader;
}
}
#endregion
@@ -259,7 +215,43 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled card info header on success, null on error</returns>
public static CardInfoHeader? ParseCardInfoHeader(Stream data)
{
return data.ReadType<CardInfoHeader>();
// TODO: Use marshalling here instead of building
var header = new CardInfoHeader();
header.WritableAddressMediaUnits = data.ReadUInt32();
header.CardInfoBitmask = data.ReadUInt32();
header.Reserved1 = data.ReadBytes(0xF8);
header.FilledSize = data.ReadUInt32();
header.Reserved2 = data.ReadBytes(0x0C);
header.TitleVersion = data.ReadUInt16();
header.CardRevision = data.ReadUInt16();
header.Reserved3 = data.ReadBytes(0x0C);
header.CVerTitleID = data.ReadBytes(0x08);
header.CVerVersionNumber = data.ReadUInt16();
header.Reserved4 = data.ReadBytes(0xCD6);
header.InitialData = ParseInitialData(data);
return header;
}
/// <summary>
/// Parse a Stream into initial data
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled initial data on success, null on error</returns>
public static InitialData? ParseInitialData(Stream data)
{
// TODO: Use marshalling here instead of building
var id = new InitialData();
id.CardSeedKeyY = data.ReadBytes(0x10);
id.EncryptedCardSeed = data.ReadBytes(0x10);
id.CardSeedAESMAC = data.ReadBytes(0x10);
id.CardSeedNonce = data.ReadBytes(0x0C);
id.Reserved = data.ReadBytes(0xC4);
id.BackupHeader = ParseNCCHHeader(data, skipSignature: true);
return id;
}
/// <summary>
@@ -339,7 +331,51 @@ namespace SabreTools.Serialization.Deserializers
/// <returns>Filled NCCH extended header on success, null on error</returns>
public static NCCHExtendedHeader? ParseNCCHExtendedHeader(Stream data)
{
return data.ReadType<NCCHExtendedHeader>();
// TODO: Replace with `data.ReadType<NCCHExtendedHeader>();` when enum serialization fixed
var header = new NCCHExtendedHeader();
header.SCI = data.ReadType<SystemControlInfo>();
header.ACI = ParseAccessControlInfo(data);
header.AccessDescSignature = data.ReadBytes(0x100);
header.NCCHHDRPublicKey = data.ReadBytes(0x100);
header.ACIForLimitations = ParseAccessControlInfo(data);
return header;
}
/// <summary>
/// Parse a Stream into an access control info
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled access control info on success, null on error</returns>
public static AccessControlInfo? ParseAccessControlInfo(Stream data)
{
var aci = new AccessControlInfo();
aci.ARM11LocalSystemCapabilities = data.ReadType<ARM11LocalSystemCapabilities>();
aci.ARM11KernelCapabilities = data.ReadType<ARM11KernelCapabilities>();
aci.ARM9AccessControl = ParseARM9AccessControl(data);
return aci;
}
/// <summary>
/// Parse a Stream into an ARM9 access control
/// </summary>
/// <param name="data">Stream to parse</param>
/// <returns>Filled ARM9 access control on success, null on error</returns>
public static ARM9AccessControl? ParseARM9AccessControl(Stream data)
{
var a9ac = new ARM9AccessControl();
a9ac.Descriptors = new ARM9AccessControlDescriptors[15];
for (int i = 0; i < a9ac.Descriptors.Length; i++)
{
a9ac.Descriptors[i] = (ARM9AccessControlDescriptors)data.ReadByteValue();
}
a9ac.DescriptorVersion = data.ReadByteValue();
return a9ac;
}
/// <summary>

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SabreTools.IO.Extensions;
using SabreTools.Models.NewExecutable;
using static SabreTools.Models.NewExecutable.Constants;
@@ -303,19 +302,34 @@ namespace SabreTools.Serialization.Deserializers
resourceTable.ResourceTypes = [.. resourceTypes];
// Get the full list of unique string offsets
var stringOffsets = resourceTable.ResourceTypes
.Where(rt => rt != null)
.Where(rt => !rt!.IsIntegerType() && rt!.TypeID != 0)
.Select(rt => rt!.TypeID)
.Union(resourceTable.ResourceTypes
.Where(rt => rt != null && rt!.TypeID != 0)
.SelectMany(rt => rt!.Resources ?? [])
.Where(r => !r!.IsIntegerType())
.Select(r => r!.ResourceID))
.Distinct()
.Where(o => o != 0)
.OrderBy(o => o)
.ToList();
var stringOffsets = new List<ushort>();
foreach (var rtie in resourceTable.ResourceTypes)
{
// Skip invalid entries
if (rtie == null || rtie.TypeID == 0)
continue;
// Handle offset types
if (!rtie.IsIntegerType() && !stringOffsets.Contains(rtie.TypeID))
stringOffsets.Add(rtie.TypeID);
// Handle types with resources
foreach (var rtre in rtie.Resources ?? [])
{
// Skip invalid entries
if (rtre == null || rtre.IsIntegerType() || rtre.ResourceID == 0)
continue;
// Skip already added entries
if (stringOffsets.Contains(rtre.ResourceID))
continue;
stringOffsets.Add(rtre.ResourceID);
}
}
// Order the offsets list
stringOffsets.Sort();
// Populate the type and name string dictionary
resourceTable.TypeAndNameStrings = [];

View File

@@ -110,7 +110,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Set the file allocation table
cart.FileAllocationTable = fileAllocationTable.ToArray();
cart.FileAllocationTable = [.. fileAllocationTable];
#endregion
@@ -170,7 +170,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Assign the folder allocation table
nameTable.FolderAllocationTable = folderAllocationTable.ToArray();
nameTable.FolderAllocationTable = [.. folderAllocationTable];
// Create a variable-length table
var nameList = new List<NameListEntry>();
@@ -184,7 +184,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Assign the name list
nameTable.NameList = nameList.ToArray();
nameTable.NameList = [.. nameList];
return nameTable;
}

View File

@@ -43,7 +43,7 @@ namespace SabreTools.Serialization.Deserializers
}
// Assign the units and return
di.Units = diUnits.ToArray();
di.Units = [.. diUnits];
return di;
}

View File

@@ -1,7 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
#if NET35_OR_GREATER || NETCOREAPP
using System.Linq;
#endif
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Models.PortableExecutable;
@@ -678,7 +680,7 @@ namespace SabreTools.Serialization.Deserializers
_ = data.ReadByteValue();
}
return attributeCertificateTable.ToArray();
return [.. attributeCertificateTable];
}
/// <summary>
@@ -913,7 +915,7 @@ namespace SabreTools.Serialization.Deserializers
break;
}
importTable.ImportDirectoryTable = importDirectoryTable.ToArray();
importTable.ImportDirectoryTable = [.. importDirectoryTable];
// Names
for (int i = 0; i < importTable.ImportDirectoryTable.Length; i++)
@@ -981,7 +983,7 @@ namespace SabreTools.Serialization.Deserializers
break;
}
importLookupTables[i] = entryLookupTable.ToArray();
importLookupTables[i] = [.. entryLookupTable];
}
importTable.ImportLookupTables = importLookupTables;
@@ -1035,7 +1037,7 @@ namespace SabreTools.Serialization.Deserializers
break;
}
importAddressTables[i] = addressLookupTable.ToArray();
importAddressTables[i] = [.. addressLookupTable];
}
importTable.ImportAddressTables = importAddressTables;
@@ -1047,38 +1049,70 @@ namespace SabreTools.Serialization.Deserializers
|| importTable.ImportAddressTables != null && importTable.ImportAddressTables.Count > 0)
{
// Get the addresses of the hint/name table entries
List<int> hintNameTableEntryAddresses = new List<int>();
var hintNameTableEntryAddresses = new List<int>();
// If we have import lookup tables
if (importTable.ImportLookupTables != null && importLookupTables.Count > 0)
{
#if NET20
var addresses = new List<int>();
foreach (var kvp in importTable.ImportLookupTables)
{
if (kvp.Value == null)
continue;
var vaddrs = Array.ConvertAll(kvp.Value,
ilte => ilte == null ? 0 : (int)ilte.HintNameTableRVA.ConvertVirtualAddress(sections));
addresses.AddRange(vaddrs);
}
#else
var addresses = importTable.ImportLookupTables
.Where(kvp => kvp.Value != null)
.SelectMany(kvp => kvp.Value!)
.SelectMany(kvp => kvp.Value ?? [])
.Where(ilte => ilte != null)
.Select(ilte => (int)ilte!.HintNameTableRVA.ConvertVirtualAddress(sections));
#endif
hintNameTableEntryAddresses.AddRange(addresses);
}
// If we have import address tables
if (importTable.ImportAddressTables != null && importTable.ImportAddressTables.Count > 0)
{
#if NET20
var addresses = new List<int>();
foreach (var kvp in importTable.ImportAddressTables)
{
if (kvp.Value == null)
continue;
var vaddrs = Array.ConvertAll(kvp.Value,
iate => iate == null ? 0 : (int)iate.HintNameTableRVA.ConvertVirtualAddress(sections));
addresses.AddRange(vaddrs);
}
#else
var addresses = importTable.ImportAddressTables
.Where(kvp => kvp.Value != null)
.SelectMany(kvp => kvp.Value!)
.SelectMany(kvp => kvp.Value ?? [])
.Where(iate => iate != null)
.Select(iate => (int)iate!.HintNameTableRVA.ConvertVirtualAddress(sections));
#endif
hintNameTableEntryAddresses.AddRange(addresses);
}
// Sanitize the addresses
hintNameTableEntryAddresses = hintNameTableEntryAddresses.Where(addr => addr != 0)
.Distinct()
.OrderBy(a => a)
.ToList();
hintNameTableEntryAddresses = hintNameTableEntryAddresses.FindAll(addr => addr != 0);
#if NET20
var temp = new List<int>();
foreach (int value in hintNameTableEntryAddresses)
{
if (!temp.Contains(value))
temp.Add(value);
}
#else
hintNameTableEntryAddresses = hintNameTableEntryAddresses.Distinct().ToList();
#endif
hintNameTableEntryAddresses.Sort();
// If we have any addresses, add them to the table
if (hintNameTableEntryAddresses.Any())
if (hintNameTableEntryAddresses.Count > 0)
{
for (int i = 0; i < hintNameTableEntryAddresses.Count; i++)
{
@@ -1216,11 +1250,12 @@ namespace SabreTools.Serialization.Deserializers
return resourceDirectoryTable;
// If we're not aligned to a section
if (!sections.Any(s => s != null && s.PointerToRawData == initialOffset))
var firstSection = Array.Find(sections, s => s != null && s.PointerToRawData == initialOffset);
if (firstSection == null)
return resourceDirectoryTable;
// Get the section size
int size = (int)sections.First(s => s != null && s.PointerToRawData == initialOffset)!.SizeOfRawData;
int size = (int)firstSection.SizeOfRawData;
// Align to the 512-byte boundary, we find the start of an MS-DOS header, or the end of the file
while (data.Position - initialOffset < size && data.Position % 0x200 != 0 && data.Position < data.Length - 1)

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Readers;
using SabreTools.Models.RomCenter;
@@ -25,11 +24,6 @@ namespace SabreTools.Serialization.Deserializers
// Loop through and parse out the values
var roms = new List<Rom>();
var additional = new List<string>();
var creditsAdditional = new List<string>();
var datAdditional = new List<string>();
var emulatorAdditional = new List<string>();
var gamesAdditional = new List<string>();
while (!reader.EndOfStream)
{
// If we have no next line
@@ -57,10 +51,6 @@ namespace SabreTools.Serialization.Deserializers
case "games":
dat.Games ??= new Games();
break;
default:
if (reader.CurrentLine != null)
additional.Add(reader.CurrentLine);
break;
}
continue;
}
@@ -94,10 +84,6 @@ namespace SabreTools.Serialization.Deserializers
case "comment":
dat.Credits.Comment = reader.KeyValuePair?.Value;
break;
default:
if (reader.CurrentLine != null)
creditsAdditional.Add(reader.CurrentLine);
break;
}
}
@@ -121,10 +107,6 @@ namespace SabreTools.Serialization.Deserializers
case "merge":
dat.Dat.Merge = reader.KeyValuePair?.Value;
break;
default:
if (reader.CurrentLine != null)
datAdditional.Add(reader.CurrentLine);
break;
}
}
@@ -142,10 +124,6 @@ namespace SabreTools.Serialization.Deserializers
case "version":
dat.Emulator.Version = reader.KeyValuePair?.Value;
break;
default:
if (reader.CurrentLine != null)
emulatorAdditional.Add(reader.CurrentLine);
break;
}
}
@@ -156,13 +134,12 @@ namespace SabreTools.Serialization.Deserializers
dat.Games ??= new Games();
// If the line doesn't contain the delimiter
#if NETFRAMEWORK
if (!(reader.CurrentLine?.Contains("¬") ?? false))
#else
if (!(reader.CurrentLine?.Contains('¬') ?? false))
{
if (reader.CurrentLine != null)
gamesAdditional.Add(reader.CurrentLine);
#endif
continue;
}
// Otherwise, separate out the line
string[] splitLine = reader.CurrentLine.Split('¬');
@@ -181,32 +158,14 @@ namespace SabreTools.Serialization.Deserializers
// EMPTY = splitLine[10]
};
if (splitLine.Length > 11)
rom.ADDITIONAL_ELEMENTS = splitLine.Skip(11).ToArray();
roms.Add(rom);
}
else
{
if (reader.CurrentLine != null)
additional.Add(reader.CurrentLine);
}
}
// Add extra pieces and return
dat.ADDITIONAL_ELEMENTS = additional.Where(s => s != null).ToArray();
if (dat.Credits != null)
dat.Credits.ADDITIONAL_ELEMENTS = creditsAdditional.Where(s => s != null).ToArray();
if (dat.Dat != null)
dat.Dat.ADDITIONAL_ELEMENTS = datAdditional.Where(s => s != null).ToArray();
if (dat.Emulator != null)
dat.Emulator.ADDITIONAL_ELEMENTS = emulatorAdditional.Where(s => s != null).ToArray();
if (dat.Games != null)
{
dat.Games.Rom = roms.ToArray();
dat.Games.ADDITIONAL_ELEMENTS = gamesAdditional.Where(s => s != null).Select(s => s).ToArray();
}
dat.Games.Rom = [.. roms];
return dat;
}
}

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Readers;
using SabreTools.Models.SeparatedValue;
@@ -104,7 +103,7 @@ namespace SabreTools.Serialization.Deserializers
if (!reader.ReadHeader() || reader.HeaderValues == null)
return null;
dat.Header = reader.HeaderValues.ToArray();
dat.Header = [.. reader.HeaderValues];
// Loop through the rows and parse out values
var rows = new List<Row>();
@@ -135,10 +134,6 @@ namespace SabreTools.Serialization.Deserializers
SHA256 = reader.Line[12],
Status = reader.Line[13],
};
// If we have additional fields
if (reader.Line.Count > HeaderWithoutExtendedHashesCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithoutExtendedHashesCount).ToArray();
}
else
{
@@ -162,16 +157,12 @@ namespace SabreTools.Serialization.Deserializers
SpamSum = reader.Line[15],
Status = reader.Line[16],
};
// If we have additional fields
if (reader.Line.Count > HeaderWithExtendedHashesCount)
row.ADDITIONAL_ELEMENTS = reader.Line.Skip(HeaderWithExtendedHashesCount).ToArray();
}
rows.Add(row);
}
// Assign the rows to the Dat and return
dat.Row = rows.ToArray();
dat.Row = [.. rows];
return dat;
}

View File

@@ -195,7 +195,7 @@ namespace SabreTools.Serialization.Deserializers
}
}
return directoryItems.ToArray();
return [.. directoryItems];
}
/// <summary>

View File

@@ -184,7 +184,7 @@ namespace SabreTools.Serialization.Deserializers
private static DirectoryItem ParseDirectoryItem(Stream data)
{
// TODO: Use marshalling here instead of building
DirectoryItem directoryItem = new DirectoryItem();
var directoryItem = new DirectoryItem();
directoryItem.FileNameCRC = data.ReadUInt32();
directoryItem.NameOffset = data.ReadUInt32();

View File

@@ -0,0 +1,9 @@
#if NET20
namespace System.Runtime.CompilerServices
{
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Method)]
internal sealed class ExtensionAttribute : Attribute {}
}
#endif

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Xml.Serialization;
using SabreTools.IO.Extensions;
@@ -28,7 +27,7 @@ namespace SabreTools.Serialization
return 0;
// If the RVA matches a section start exactly, use that
var matchingSection = sections.FirstOrDefault(s => s != null && s.VirtualAddress == rva);
var matchingSection = Array.Find(sections, s => s != null && s.VirtualAddress == rva);
if (matchingSection != null)
return rva - matchingSection.VirtualAddress + matchingSection.PointerToRawData;
@@ -165,7 +164,9 @@ namespace SabreTools.Serialization
if (string.IsNullOrEmpty(addD.Version))
offset = originalOffset + 0x10;
addD.Build = data.ReadBytes(ref offset, 4)?.Select(b => (char)b)?.ToArray();
var buildBytes = data.ReadBytes(ref offset, 4);
var buildChars = Array.ConvertAll(buildBytes, b => (char)b);
addD.Build = buildChars;
// Distinguish between v1 and v2
int bytesToRead = 112; // v2

View File

@@ -39,6 +39,7 @@ namespace SabreTools.Serialization
Wrapper.BFPK item => item.PrettyPrint(),
Wrapper.BSP item => item.PrettyPrint(),
Wrapper.CFB item => item.PrettyPrint(),
Wrapper.CHD item => item.PrettyPrint(),
Wrapper.CIA item => item.PrettyPrint(),
Wrapper.GCF item => item.PrettyPrint(),
Wrapper.InstallShieldCabinet item => item.PrettyPrint(),
@@ -83,6 +84,7 @@ namespace SabreTools.Serialization
Wrapper.BFPK item => item.ExportJSON(),
Wrapper.BSP item => item.ExportJSON(),
Wrapper.CFB item => item.ExportJSON(),
Wrapper.CHD item => item.ExportJSON(),
Wrapper.CIA item => item.ExportJSON(),
Wrapper.GCF item => item.ExportJSON(),
Wrapper.InstallShieldCabinet item => item.ExportJSON(),
@@ -167,6 +169,16 @@ namespace SabreTools.Serialization
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>
private static StringBuilder PrettyPrint(this Wrapper.CHD item)
{
var builder = new StringBuilder();
CHD.Print(builder, item.Model);
return builder;
}
/// <summary>
/// Export the item information as pretty-printed text
/// </summary>

View File

@@ -0,0 +1,159 @@
using System;
using System.Collections.Generic;
using System.Text;
using SabreTools.Models.CHD;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Printers
{
public class CHD : IPrinter<Header>
{
/// <inheritdoc/>
public void PrintInformation(StringBuilder builder, Header model)
=> Print(builder, model);
public static void Print(StringBuilder builder, Header header)
{
builder.AppendLine("CHD Header Information:");
builder.AppendLine("-------------------------");
if (header == null)
{
builder.AppendLine("No header");
builder.AppendLine();
return;
}
switch (header)
{
case HeaderV1 v1:
Print(builder, v1);
break;
case HeaderV2 v2:
Print(builder, v2);
break;
case HeaderV3 v3:
Print(builder, v3);
break;
case HeaderV4 v4:
Print(builder, v4);
break;
case HeaderV5 v5:
Print(builder, v5);
break;
default:
builder.AppendLine("Unrecognized header type");
builder.AppendLine();
break;
}
}
private static void Print(StringBuilder builder, HeaderV1 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.HunkSize, $"Hunk size");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.Cylinders, $"Cylinders");
builder.AppendLine(header.Heads, $"Heads");
builder.AppendLine(header.Sectors, $"Sectors");
builder.AppendLine(header.MD5, $"MD5");
builder.AppendLine(header.ParentMD5, $"Parent MD5");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV2 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.HunkSize, $"Hunk size");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.Cylinders, $"Cylinders");
builder.AppendLine(header.Heads, $"Heads");
builder.AppendLine(header.Sectors, $"Sectors");
builder.AppendLine(header.MD5, $"MD5");
builder.AppendLine(header.ParentMD5, $"Parent MD5");
builder.AppendLine(header.BytesPerSector, $"Bytes per sector");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV3 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
builder.AppendLine(header.MetaOffset, $"Meta offset");
builder.AppendLine(header.MD5, $"MD5");
builder.AppendLine(header.ParentMD5, $"Parent MD5");
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
builder.AppendLine(header.SHA1, $"SHA-1");
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV4 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
builder.AppendLine($" Flags: {header.Flags} (0x{header.Flags:X})");
builder.AppendLine($" Compression: {header.Compression} (0x{header.Compression:X})");
builder.AppendLine(header.TotalHunks, $"Total hunks");
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
builder.AppendLine(header.MetaOffset, $"Meta offset");
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
builder.AppendLine(header.SHA1, $"SHA-1");
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
builder.AppendLine(header.RawSHA1, $"Raw SHA-1");
builder.AppendLine();
}
private static void Print(StringBuilder builder, HeaderV5 header)
{
builder.AppendLine(header.Tag, $"Tag");
builder.AppendLine(header.Length, $"Length");
builder.AppendLine(header.Version, $"Version");
string compressorsLine = "Compressors: ";
if (header.Compressors == null)
{
compressorsLine += "[NULL]";
}
else
{
var compressors = new List<string>();
for (int i = 0; i < header.Compressors.Length; i++)
{
uint compressor = (uint)header.Compressors[i];
byte[] compressorBytes = BitConverter.GetBytes(compressor);
Array.Reverse(compressorBytes);
string compressorString = Encoding.ASCII.GetString(compressorBytes);
compressors.Add(compressorString);
}
compressorsLine += string.Join(", ", [.. compressors]);
}
builder.AppendLine(compressorsLine);
builder.AppendLine(header.LogicalBytes, $"Logical bytes");
builder.AppendLine(header.MapOffset, $"Map offset");
builder.AppendLine(header.MetaOffset, $"Meta offset");
builder.AppendLine(header.HunkBytes, $"Hunk bytes");
builder.AppendLine(header.UnitBytes, $"Unit bytes");
builder.AppendLine(header.RawSHA1, $"Raw SHA-1");
builder.AppendLine(header.SHA1, $"SHA-1");
builder.AppendLine(header.ParentSHA1, $"Parent SHA-1");
builder.AppendLine();
}
}
}

View File

@@ -315,27 +315,19 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(fileGroup.NameOffset, " Name offset");
builder.AppendLine(fileGroup.Name, " Name");
builder.AppendLine(fileGroup.ExpandedSize, " Expanded size");
builder.AppendLine(fileGroup.Reserved0, " Reserved 0");
builder.AppendLine(fileGroup.CompressedSize, " Compressed size");
builder.AppendLine(fileGroup.Reserved1, " Reserved 1");
builder.AppendLine(fileGroup.Reserved2, " Reserved 2");
builder.AppendLine(fileGroup.Attribute1, " Attribute 1");
builder.AppendLine(fileGroup.Attribute2, " Attribute 2");
builder.AppendLine($" Attributes: {fileGroup.Attributes} (0x{fileGroup.Attributes:X})");
builder.AppendLine(fileGroup.FirstFile, " First file");
builder.AppendLine(fileGroup.LastFile, " Last file");
builder.AppendLine(fileGroup.UnknownOffset, " Unknown offset");
builder.AppendLine(fileGroup.Var4Offset, " Var 4 offset");
builder.AppendLine(fileGroup.Var1Offset, " Var 1 offset");
builder.AppendLine(fileGroup.UnknownStringOffset, " Unknown string offset");
builder.AppendLine(fileGroup.OperatingSystemOffset, " Operating system offset");
builder.AppendLine(fileGroup.LanguageOffset, " Language offset");
builder.AppendLine(fileGroup.HTTPLocationOffset, " HTTP location offset");
builder.AppendLine(fileGroup.FTPLocationOffset, " FTP location offset");
builder.AppendLine(fileGroup.MiscOffset, " Misc. offset");
builder.AppendLine(fileGroup.Var2Offset, " Var 2 offset");
builder.AppendLine(fileGroup.TargetDirectoryOffset, " Target directory offset");
builder.AppendLine(fileGroup.Reserved3, " Reserved 3");
builder.AppendLine(fileGroup.Reserved4, " Reserved 4");
builder.AppendLine(fileGroup.Reserved5, " Reserved 5");
builder.AppendLine(fileGroup.Reserved6, " Reserved 6");
builder.AppendLine(fileGroup.Reserved7, " Reserved 7");
builder.AppendLine($" Overwrite flags: {fileGroup.OverwriteFlags} (0x{fileGroup.OverwriteFlags:X})");
builder.AppendLine(fileGroup.Reserved, " Reserved");
}
builder.AppendLine();
}
@@ -366,16 +358,16 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(component.DescriptorOffset, " Descriptor offset");
builder.AppendLine(component.DisplayNameOffset, " Display name offset");
builder.AppendLine(component.DisplayName, " Display name");
builder.AppendLine(component.Reserved0, " Reserved 0");
builder.AppendLine(component.ReservedOffset0, " Reserved offset 0");
builder.AppendLine(component.ReservedOffset1, " Reserved offset 1");
builder.AppendLine($" Status: {component.Status} (0x{component.Status:X})");
builder.AppendLine(component.PasswordOffset, " Password offset");
builder.AppendLine(component.MiscOffset, " Misc. offset");
builder.AppendLine(component.ComponentIndex, " Component index");
builder.AppendLine(component.NameOffset, " Name offset");
builder.AppendLine(component.Name, " Name");
builder.AppendLine(component.ReservedOffset2, " Reserved offset 2");
builder.AppendLine(component.ReservedOffset3, " Reserved offset 3");
builder.AppendLine(component.ReservedOffset4, " Reserved offset 4");
builder.AppendLine(component.Reserved1, " Reserved 1");
builder.AppendLine(component.CDRomFolderOffset, " CD-ROM folder offset");
builder.AppendLine(component.HTTPLocationOffset, " HTTP location offset");
builder.AppendLine(component.FTPLocationOffset, " FTP location offset");
builder.AppendLine(component.Guid, " GUIDs");
builder.AppendLine(component.CLSIDOffset, " CLSID offset");
builder.AppendLine(component.CLSID, " CLSID");
builder.AppendLine(component.Reserved2, " Reserved 2");
@@ -406,10 +398,10 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(component.SubComponentsCount, " Sub-components count");
builder.AppendLine(component.SubComponentsOffset, " Sub-components offset");
builder.AppendLine(component.NextComponentOffset, " Next component offset");
builder.AppendLine(component.ReservedOffset5, " Reserved offset 5");
builder.AppendLine(component.ReservedOffset6, " Reserved offset 6");
builder.AppendLine(component.ReservedOffset7, " Reserved offset 7");
builder.AppendLine(component.ReservedOffset8, " Reserved offset 8");
builder.AppendLine(component.OnInstallingOffset, " On installing offset");
builder.AppendLine(component.OnInstalledOffset, " On installed offset");
builder.AppendLine(component.OnUninstallingOffset, " On uninstalling offset");
builder.AppendLine(component.OnUninstalledOffset, " On uninstalled offset");
}
builder.AppendLine();
}

View File

@@ -1,3 +1,4 @@
using System;
using System.Text;
using SabreTools.Models.N3DS;
using SabreTools.Serialization.Interfaces;
@@ -44,26 +45,26 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(header.PartitionsCryptType, " Partitions crypt type");
builder.AppendLine();
builder.AppendLine(" Partition table:");
builder.AppendLine(" -------------------------");
builder.AppendLine(" Partition table:");
builder.AppendLine(" -------------------------");
if (header.PartitionsTable == null || header.PartitionsTable.Length == 0)
{
builder.AppendLine(" No partition table entries");
builder.AppendLine(" No partition table entries");
}
else
{
for (int i = 0; i < header.PartitionsTable.Length; i++)
{
var partitionTableEntry = header.PartitionsTable[i];
builder.AppendLine($" Partition table entry {i}");
builder.AppendLine($" Partition table entry {i}");
if (partitionTableEntry == null)
{
builder.AppendLine(" [NULL]");
builder.AppendLine(" [NULL]");
continue;
}
builder.AppendLine(partitionTableEntry.Offset, " Offset");
builder.AppendLine(partitionTableEntry.Length, " Length");
builder.AppendLine(partitionTableEntry.Offset, " Offset");
builder.AppendLine(partitionTableEntry.Length, " Length");
}
}
builder.AppendLine();
@@ -77,17 +78,17 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(header.PartitionFlags, " Partition flags");
builder.AppendLine();
builder.AppendLine(" Partition ID table:");
builder.AppendLine(" -------------------------");
builder.AppendLine(" Partition ID table:");
builder.AppendLine(" -------------------------");
if (header.PartitionIdTable == null || header.PartitionIdTable.Length == 0)
{
builder.AppendLine(" No partition ID table entries");
builder.AppendLine(" No partition ID table entries");
}
else
{
for (int i = 0; i < header.PartitionIdTable.Length; i++)
{
builder.AppendLine(header.PartitionIdTable[i], $" Partition {i} ID");
builder.AppendLine(header.PartitionIdTable[i], $" Partition {i} ID");
}
}
builder.AppendLine();
@@ -131,6 +132,8 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine(header.CVerVersionNumber, " Version number of CVer in included update partition");
builder.AppendLine(header.Reserved4, " Reserved 4");
builder.AppendLine();
Print(builder, header.InitialData);
}
private static void Print(StringBuilder builder, DevelopmentCardInfoHeader? header)
@@ -144,62 +147,6 @@ namespace SabreTools.Serialization.Printers
return;
}
builder.AppendLine();
builder.AppendLine(" Initial Data:");
builder.AppendLine(" -------------------------");
if (header.InitialData == null)
{
builder.AppendLine(" No initial data");
}
else
{
builder.AppendLine(header.InitialData.CardSeedKeyY, " Card seed keyY");
builder.AppendLine(header.InitialData.EncryptedCardSeed, " Encrypted card seed");
builder.AppendLine(header.InitialData.CardSeedAESMAC, " Card seed AES-MAC");
builder.AppendLine(header.InitialData.CardSeedNonce, " Card seed nonce");
builder.AppendLine(header.InitialData.Reserved, " Reserved");
builder.AppendLine();
builder.AppendLine(" Backup Header:");
builder.AppendLine(" -------------------------");
if (header.InitialData.BackupHeader == null)
{
builder.AppendLine(" No backup header");
}
else
{
builder.AppendLine(header.InitialData.BackupHeader.MagicID, " Magic ID");
builder.AppendLine(header.InitialData.BackupHeader.ContentSizeInMediaUnits, " Content size in media units");
builder.AppendLine(header.InitialData.BackupHeader.PartitionId, " Partition ID");
builder.AppendLine(header.InitialData.BackupHeader.MakerCode, " Maker code");
builder.AppendLine(header.InitialData.BackupHeader.Version, " Version");
builder.AppendLine(header.InitialData.BackupHeader.VerificationHash, " Verification hash");
builder.AppendLine(header.InitialData.BackupHeader.ProgramId, " Program ID");
builder.AppendLine(header.InitialData.BackupHeader.Reserved1, " Reserved 1");
builder.AppendLine(header.InitialData.BackupHeader.LogoRegionHash, " Logo region SHA-256 hash");
builder.AppendLine(header.InitialData.BackupHeader.ProductCode, " Product code");
builder.AppendLine(header.InitialData.BackupHeader.ExtendedHeaderHash, " Extended header SHA-256 hash");
builder.AppendLine(header.InitialData.BackupHeader.ExtendedHeaderSizeInBytes, " Extended header size in bytes");
builder.AppendLine(header.InitialData.BackupHeader.Reserved2, " Reserved 2");
builder.AppendLine($" Flags: {header.InitialData.BackupHeader.Flags} (0x{header.InitialData.BackupHeader.Flags:X})");
builder.AppendLine(header.InitialData.BackupHeader.PlainRegionOffsetInMediaUnits, " Plain region offset, in media units");
builder.AppendLine(header.InitialData.BackupHeader.PlainRegionSizeInMediaUnits, " Plain region size, in media units");
builder.AppendLine(header.InitialData.BackupHeader.LogoRegionOffsetInMediaUnits, " Logo region offset, in media units");
builder.AppendLine(header.InitialData.BackupHeader.LogoRegionSizeInMediaUnits, " Logo region size, in media units");
builder.AppendLine(header.InitialData.BackupHeader.ExeFSOffsetInMediaUnits, " ExeFS offset, in media units");
builder.AppendLine(header.InitialData.BackupHeader.ExeFSSizeInMediaUnits, " ExeFS size, in media units");
builder.AppendLine(header.InitialData.BackupHeader.ExeFSHashRegionSizeInMediaUnits, " ExeFS hash region size, in media units");
builder.AppendLine(header.InitialData.BackupHeader.Reserved3, " Reserved 3");
builder.AppendLine(header.InitialData.BackupHeader.RomFSOffsetInMediaUnits, " RomFS offset, in media units");
builder.AppendLine(header.InitialData.BackupHeader.RomFSSizeInMediaUnits, " RomFS size, in media units");
builder.AppendLine(header.InitialData.BackupHeader.RomFSHashRegionSizeInMediaUnits, " RomFS hash region size, in media units");
builder.AppendLine(header.InitialData.BackupHeader.Reserved4, " Reserved 4");
builder.AppendLine(header.InitialData.BackupHeader.ExeFSSuperblockHash, " ExeFS superblock SHA-256 hash");
builder.AppendLine(header.InitialData.BackupHeader.RomFSSuperblockHash, " RomFS superblock SHA-256 hash");
}
}
builder.AppendLine();
builder.AppendLine(header.CardDeviceReserved1, " Card device reserved 1");
builder.AppendLine(header.TitleKey, " Title key");
builder.AppendLine(header.CardDeviceReserved2, " Card device reserved 2");
@@ -227,6 +174,96 @@ namespace SabreTools.Serialization.Printers
builder.AppendLine();
}
private static void Print(StringBuilder builder, InitialData? id)
{
builder.AppendLine(" Initial Data Information:");
builder.AppendLine(" -------------------------");
if (id == null)
{
builder.AppendLine(" No initial data");
builder.AppendLine();
return;
}
builder.AppendLine(id.CardSeedKeyY, " Card seed KeyY");
builder.AppendLine(id.EncryptedCardSeed, " Encrypted card seed");
builder.AppendLine(id.CardSeedAESMAC, " Card seed AES-MAC");
builder.AppendLine(id.CardSeedNonce, " Card seed nonce");
builder.AppendLine(id.Reserved, " Reserved");
builder.AppendLine();
PrintBackup(builder, id.BackupHeader);
}
private static void PrintBackup(StringBuilder builder, NCCHHeader? header)
{
builder.AppendLine(" Backup NCCH Header Information:");
builder.AppendLine(" -------------------------");
if (header == null)
{
builder.AppendLine(" No backup NCCH header");
builder.AppendLine();
return;
}
if (header.MagicID == string.Empty)
{
builder.AppendLine(" Empty backup header, no data can be parsed");
}
else if (header.MagicID != Constants.NCCHMagicNumber)
{
builder.AppendLine(" Unrecognized backup header, no data can be parsed");
}
else
{
// Backup header omits RSA signature
builder.AppendLine(header.MagicID, " Magic ID");
builder.AppendLine(header.ContentSizeInMediaUnits, " Content size in media units");
builder.AppendLine(header.PartitionId, " Partition ID");
builder.AppendLine(header.MakerCode, " Maker code");
builder.AppendLine(header.Version, " Version");
builder.AppendLine(header.VerificationHash, " Verification hash");
builder.AppendLine(header.ProgramId, " Program ID");
builder.AppendLine(header.Reserved1, " Reserved 1");
builder.AppendLine(header.LogoRegionHash, " Logo region SHA-256 hash");
builder.AppendLine(header.ProductCode, " Product code");
builder.AppendLine(header.ExtendedHeaderHash, " Extended header SHA-256 hash");
builder.AppendLine(header.ExtendedHeaderSizeInBytes, " Extended header size in bytes");
builder.AppendLine(header.Reserved2, " Reserved 2");
builder.AppendLine(" Flags:");
if (header.Flags == null)
{
builder.AppendLine(" [NULL]");
}
else
{
builder.AppendLine(header.Flags.Reserved0, " Reserved 0");
builder.AppendLine(header.Flags.Reserved1, " Reserved 1");
builder.AppendLine(header.Flags.Reserved2, " Reserved 2");
builder.AppendLine($" Crypto method: {header.Flags.CryptoMethod} (0x{header.Flags.CryptoMethod:X})");
builder.AppendLine($" Content platform: {header.Flags.ContentPlatform} (0x{header.Flags.ContentPlatform:X})");
builder.AppendLine($" Content type: {header.Flags.MediaPlatformIndex} (0x{header.Flags.MediaPlatformIndex:X})");
builder.AppendLine(header.Flags.ContentUnitSize, " Content unit size");
builder.AppendLine($" Bitmasks: {header.Flags.BitMasks} (0x{header.Flags.BitMasks:X})");
}
builder.AppendLine(header.PlainRegionOffsetInMediaUnits, " Plain region offset, in media units");
builder.AppendLine(header.PlainRegionSizeInMediaUnits, " Plain region size, in media units");
builder.AppendLine(header.LogoRegionOffsetInMediaUnits, " Logo region offset, in media units");
builder.AppendLine(header.LogoRegionSizeInMediaUnits, " Logo region size, in media units");
builder.AppendLine(header.ExeFSOffsetInMediaUnits, " ExeFS offset, in media units");
builder.AppendLine(header.ExeFSSizeInMediaUnits, " ExeFS size, in media units");
builder.AppendLine(header.ExeFSHashRegionSizeInMediaUnits, " ExeFS hash region size, in media units");
builder.AppendLine(header.Reserved3, " Reserved 3");
builder.AppendLine(header.RomFSOffsetInMediaUnits, " RomFS offset, in media units");
builder.AppendLine(header.RomFSSizeInMediaUnits, " RomFS size, in media units");
builder.AppendLine(header.RomFSHashRegionSizeInMediaUnits, " RomFS hash region size, in media units");
builder.AppendLine(header.Reserved4, " Reserved 4");
builder.AppendLine(header.ExeFSSuperblockHash, " ExeFS superblock SHA-256 hash");
builder.AppendLine(header.RomFSSuperblockHash, " RomFS superblock SHA-256 hash");
}
builder.AppendLine();
}
private static void Print(StringBuilder builder, NCCHHeader?[]? entries)
{
builder.AppendLine(" NCCH Partition Header Information:");
@@ -455,14 +492,20 @@ namespace SabreTools.Serialization.Printers
}
else
{
builder.AppendLine(entry.ACI.ARM9AccessControl.Descriptors, " Descriptors");
string descriptorsStr = "[NULL]";
if (entry.ACI.ARM9AccessControl.Descriptors != null)
{
var descriptors = Array.ConvertAll(entry.ACI.ARM9AccessControl.Descriptors, d => d.ToString());
descriptorsStr = string.Join(", ", descriptors);
}
builder.AppendLine(descriptorsStr, " Descriptors");
builder.AppendLine(entry.ACI.ARM9AccessControl.DescriptorVersion, " Descriptor version");
}
builder.AppendLine(entry.AccessDescSignature, " AccessDec signature (RSA-2048-SHA256)");
builder.AppendLine(entry.NCCHHDRPublicKey, " NCCH HDR RSA-2048 public key");
}
builder.AppendLine(entry.AccessDescSignature, " AccessDec signature (RSA-2048-SHA256)");
builder.AppendLine(entry.NCCHHDRPublicKey, " NCCH HDR RSA-2048 public key");
builder.AppendLine(" Access control info (for limitations of first ACI):");
if (entry.ACIForLimitations == null)
{
@@ -523,7 +566,13 @@ namespace SabreTools.Serialization.Printers
}
else
{
builder.AppendLine(entry.ACIForLimitations.ARM9AccessControl.Descriptors, " Descriptors");
string descriptorsStr = "[NULL]";
if (entry.ACIForLimitations.ARM9AccessControl.Descriptors != null)
{
var descriptors = Array.ConvertAll(entry.ACIForLimitations.ARM9AccessControl.Descriptors, d => d.ToString());
descriptorsStr = string.Join(", ", descriptors);
}
builder.AppendLine(descriptorsStr, " Descriptors");
builder.AppendLine(entry.ACIForLimitations.ARM9AccessControl.DescriptorVersion, " Descriptor version");
}
}

View File

@@ -1,13 +1,11 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml;
using SabreTools.ASN1;
using SabreTools.IO.Extensions;
using SabreTools.Models.PortableExecutable;
using SabreTools.Serialization.Interfaces;
using static SabreTools.Serialization.Extensions;
namespace SabreTools.Serialization.Printers
{
@@ -254,7 +252,7 @@ namespace SabreTools.Serialization.Printers
}
if (header.DelayImportDescriptor != null)
{
builder.AppendLine(" Delay Import Descriptior (14)");
builder.AppendLine(" Delay Import Descriptor (14)");
builder.AppendLine(header.DelayImportDescriptor.VirtualAddress, " Virtual address");
builder.AppendLine(header.DelayImportDescriptor.VirtualAddress.ConvertVirtualAddress(table), " Physical address");
builder.AppendLine(header.DelayImportDescriptor.Size, " Size");
@@ -999,7 +997,8 @@ namespace SabreTools.Serialization.Printers
string padding = new(' ', (level + 1) * 2);
// TODO: Use ordered list of base types to determine the shape of the data
builder.AppendLine($"{padding}Base types: {string.Join(", ", types.Select(t => t.ToString()).ToArray())}");
var baseTypes = Array.ConvertAll(types.ToArray(), t => t.ToString());
builder.AppendLine($"{padding}Base types: {string.Join(", ", baseTypes)}");
builder.AppendLine(level, $"{padding}Entry level");
builder.AppendLine(entry.DataRVA, $"{padding}Data RVA");

View File

@@ -1,37 +1,39 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.6.5</Version>
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.7.5</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Serialization and deserialization helpers for various types</Description>
<Copyright>Copyright (c) Matt Nadareski 2019-2024</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.Serialization</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>serialize serialization deserialize deserialization file stream</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Serialization and deserialization helpers for various types</Description>
<Copyright>Copyright (c) Matt Nadareski 2019-2024</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.Serialization</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>serialize serialization deserialize deserialization file stream</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.ASN1" Version="1.3.2" />
<PackageReference Include="SabreTools.Hashing" Version="1.2.0" />
<PackageReference Include="SabreTools.IO" Version="1.4.11" />
<PackageReference Include="SabreTools.Models" Version="1.4.8" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.ASN1" Version="1.4.1" />
<PackageReference Include="SabreTools.Hashing" Version="1.4.0" />
<PackageReference Include="SabreTools.IO" Version="1.5.1" />
<PackageReference Include="SabreTools.Models" Version="1.5.1" />
</ItemGroup>
</Project>
</Project>

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Writers;
using SabreTools.Models.AttractMode;
@@ -84,7 +83,7 @@ namespace SabreTools.Serialization.Serializers
return null;
byte[] bytes = new byte[stream.Length];
stream.Read(bytes, 0, bytes.Length);
int read = stream.Read(bytes, 0, bytes.Length);
return bytes;
}
@@ -170,7 +169,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteRows(Row?[]? rows, SeparatedValueWriter writer, bool longHeader)
{
// If the games information is missing, we can't do anything
if (rows == null || !rows.Any())
if (rows == null || rows.Length == 0)
return;
// Loop through and write out the rows

View File

@@ -1,7 +1,5 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using SabreTools.Serialization.Interfaces;
@@ -27,7 +25,7 @@ namespace SabreTools.Serialization.Serializers
return null;
byte[] bytes = new byte[stream.Length];
stream.Read(bytes, 0, bytes.Length);
int read = stream.Read(bytes, 0, bytes.Length);
return bytes;
}
@@ -107,12 +105,13 @@ namespace SabreTools.Serialization.Serializers
if (serializerName == null)
return default;
// If the serializer has no model type
Type? modelType = typeof(TSerializer).GetGenericArguments()?.FirstOrDefault();
if (modelType == null)
// If the serializer has no generic arguments
var genericArgs = typeof(TSerializer).GetGenericArguments();
if (genericArgs == null || genericArgs.Length == 0)
return default;
// Loop through all loaded assemblies
Type modelType = genericArgs[0];
foreach (var assembly in AppDomain.CurrentDomain.GetAssemblies())
{
// If the assembly is invalid
@@ -120,19 +119,23 @@ namespace SabreTools.Serialization.Serializers
return default;
// If not all types can be loaded, use the ones that could be
List<Type> assemblyTypes = [];
Type?[] assemblyTypes = [];
try
{
assemblyTypes = assembly.GetTypes().ToList<Type>();
assemblyTypes = assembly.GetTypes();
}
catch (ReflectionTypeLoadException rtle)
{
assemblyTypes = rtle.Types.Where(t => t != null)!.ToList<Type>();
assemblyTypes = rtle.Types ?? [];
}
// Loop through all types
foreach (Type type in assemblyTypes)
foreach (Type? type in assemblyTypes)
{
// If the type is invalid
if (type == null)
continue;
// If the type isn't a class
if (!type.IsClass)
continue;

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Writers;
using SabreTools.Models.ClrMamePro;
@@ -29,7 +28,7 @@ namespace SabreTools.Serialization.Serializers
return null;
byte[] bytes = new byte[stream.Length];
stream.Read(bytes, 0, bytes.Length);
int read = stream.Read(bytes, 0, bytes.Length);
return bytes;
}
@@ -141,7 +140,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteGames(GameBase?[]? games, ClrMameProWriter writer)
{
// If the games information is missing, we can't do anything
if (games == null || !games.Any())
if (games == null || games.Length == 0)
return;
// Loop through and write out the games

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Writers;
using SabreTools.Models.DosCenter;
@@ -66,7 +65,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteGames(Game[]? games, ClrMameProWriter writer)
{
// If the games information is missing, we can't do anything
if (games == null || !games.Any())
if (games == null || games.Length == 0)
return;
// Loop through and write out the games

View File

@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Writers;
using SabreTools.Models.EverdriveSMDB;
@@ -36,7 +35,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteRows(Row[]? rows, SeparatedValueWriter writer)
{
// If the games information is missing, we can't do anything
if (rows == null || !rows.Any())
if (rows == null || rows.Length == 0)
return;
// Loop through and write out the rows
@@ -57,7 +56,7 @@ namespace SabreTools.Serialization.Serializers
if (row.Size != null)
rowArray.Add(row.Size);
writer.WriteValues(rowArray.ToArray());
writer.WriteValues([.. rowArray]);
writer.Flush();
}
}

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.Hashing;
using SabreTools.IO.Writers;
@@ -32,7 +31,7 @@ namespace SabreTools.Serialization.Serializers
return null;
byte[] bytes = new byte[stream.Length];
stream.Read(bytes, 0, bytes.Length);
int read = stream.Read(bytes, 0, bytes.Length);
return bytes;
}
@@ -101,10 +100,6 @@ namespace SabreTools.Serialization.Serializers
switch (hash)
{
case HashType.CRC32:
case HashType.CRC32_ISO:
case HashType.CRC32_Naive:
case HashType.CRC32_Optimized:
case HashType.CRC32_Parallel:
WriteSFV(obj.SFV, writer);
break;
case HashType.MD5:
@@ -142,7 +137,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteSFV(SFV[]? sfvs, SeparatedValueWriter writer)
{
// If the item information is missing, we can't do anything
if (sfvs == null || !sfvs.Any())
if (sfvs == null || sfvs.Length == 0)
return;
// Loop through and write out the items
@@ -166,7 +161,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteMD5(MD5[]? md5s, SeparatedValueWriter writer)
{
// If the item information is missing, we can't do anything
if (md5s == null || !md5s.Any())
if (md5s == null || md5s.Length == 0)
return;
// Loop through and write out the items
@@ -190,7 +185,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteSHA1(SHA1[]? sha1s, SeparatedValueWriter writer)
{
// If the item information is missing, we can't do anything
if (sha1s == null || !sha1s.Any())
if (sha1s == null || sha1s.Length == 0)
return;
// Loop through and write out the items
@@ -214,7 +209,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteSHA256(SHA256[]? sha256s, SeparatedValueWriter writer)
{
// If the item information is missing, we can't do anything
if (sha256s == null || !sha256s.Any())
if (sha256s == null || sha256s.Length == 0)
return;
// Loop through and write out the items
@@ -238,7 +233,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteSHA384(SHA384[]? sha384s, SeparatedValueWriter writer)
{
// If the item information is missing, we can't do anything
if (sha384s == null || !sha384s.Any())
if (sha384s == null || sha384s.Length == 0)
return;
// Loop through and write out the items
@@ -262,7 +257,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteSHA512(SHA512[]? sha512s, SeparatedValueWriter writer)
{
// If the item information is missing, we can't do anything
if (sha512s == null || !sha512s.Any())
if (sha512s == null || sha512s.Length == 0)
return;
// Loop through and write out the items
@@ -286,7 +281,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteSpamSum(SpamSum[]? spamsums, SeparatedValueWriter writer)
{
// If the item information is missing, we can't do anything
if (spamsums == null || !spamsums.Any())
if (spamsums == null || spamsums.Length == 0)
return;
// Loop through and write out the items

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using System.Linq;
using System.Text;
namespace SabreTools.Serialization.Serializers
@@ -38,11 +37,11 @@ namespace SabreTools.Serialization.Serializers
return null;
if (obj.Footer == null || obj.Footer.Length != obj.FooterLength)
return null;
if (obj.RegionHashes == null || obj.RegionHashes.Length != obj.RegionCount || obj.RegionHashes.Any(h => h == null || h.Length != 16))
if (obj.RegionHashes == null || obj.RegionHashes.Length != obj.RegionCount || !Array.TrueForAll(obj.RegionHashes, h => h == null || h.Length != 16))
return null;
if (obj.FileKeys == null || obj.FileKeys.Length != obj.FileCount)
return null;
if (obj.FileHashes == null || obj.FileHashes.Length != obj.FileCount || obj.FileHashes.Any(h => h == null || h.Length != 16))
if (obj.FileHashes == null || obj.FileHashes.Length != obj.FileCount || !Array.TrueForAll(obj.FileHashes, h => h == null || h.Length != 16))
return null;
if (obj.PIC == null || obj.PIC.Length != 115)
return null;

View File

@@ -30,7 +30,7 @@ namespace SabreTools.Serialization.Serializers
return null;
byte[] bytes = new byte[stream.Length];
stream.Read(bytes, 0, bytes.Length);
int read = stream.Read(bytes, 0, bytes.Length);
return bytes;
}

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.Models.Listrom;
@@ -34,7 +33,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteSets(Set[]? sets, StreamWriter writer)
{
// If the games information is missing, we can't do anything
if (sets == null || !sets.Any())
if (sets == null || sets.Length == 0)
return;
// Loop through and write out the games
@@ -58,7 +57,7 @@ namespace SabreTools.Serialization.Serializers
if (!string.IsNullOrEmpty(set.Driver))
{
if (set.Row != null && set.Row.Any())
if (set.Row != null && set.Row.Length > 0)
{
writer.WriteLine($"ROMs required for driver \"{set.Driver}\".");
writer.WriteLine("Name Size Checksum");
@@ -78,7 +77,7 @@ namespace SabreTools.Serialization.Serializers
}
else if (!string.IsNullOrEmpty(set.Device))
{
if (set.Row != null && set.Row.Any())
if (set.Row != null && set.Row.Length > 0)
{
writer.WriteLine($"ROMs required for device \"{set.Device}\".");
writer.WriteLine("Name Size Checksum");

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Writers;
using SabreTools.Models.RomCenter;
@@ -124,7 +123,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteGames(Games? games, IniWriter writer)
{
// If the games information is missing, we can't do anything
if (games?.Rom == null || !games.Rom.Any())
if (games?.Rom == null || games.Rom.Length == 0)
return;
writer.WriteSection("GAMES");

View File

@@ -1,5 +1,4 @@
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Writers;
using SabreTools.Models.SeparatedValue;
@@ -30,7 +29,7 @@ namespace SabreTools.Serialization.Serializers
return null;
byte[] bytes = new byte[stream.Length];
stream.Read(bytes, 0, bytes.Length);
int read = stream.Read(bytes, 0, bytes.Length);
return bytes;
}
@@ -166,7 +165,7 @@ namespace SabreTools.Serialization.Serializers
private static void WriteRows(Row[]? rows, SeparatedValueWriter writer, bool longHeader)
{
// If the games information is missing, we can't do anything
if (rows == null || !rows.Any())
if (rows == null || rows.Length == 0)
return;
// Loop through and write out the rows

View File

@@ -33,7 +33,7 @@ namespace SabreTools.Serialization.Serializers
return null;
byte[] bytes = new byte[stream.Length];
stream.Read(bytes, 0, bytes.Length);
int read = stream.Read(bytes, 0, bytes.Length);
return bytes;
}

View File

@@ -1,5 +1,4 @@
using System;
using System.Linq;
using System.Text;
namespace SabreTools.Serialization
@@ -147,7 +146,13 @@ namespace SabreTools.Serialization
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, char[]? value, string prefixString)
{
string valueString = (value == null ? "[NULL]" : string.Join(", ", value.Select(c => c.ToString()).ToArray()));
string valueString = "[NULL]";
if (value != null)
{
var valueArr = Array.ConvertAll(value, c => c.ToString());
valueString = string.Join(", ", valueArr);
}
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -156,7 +161,13 @@ namespace SabreTools.Serialization
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, short[]? value, string prefixString)
{
string valueString = (value == null ? "[NULL]" : string.Join(", ", value.Select(s => s.ToString()).ToArray()));
string valueString = "[NULL]";
if (value != null)
{
var valueArr = Array.ConvertAll(value, s => s.ToString());
valueString = string.Join(", ", valueArr);
}
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -165,7 +176,13 @@ namespace SabreTools.Serialization
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, ushort[]? value, string prefixString)
{
string valueString = (value == null ? "[NULL]" : string.Join(", ", value.Select(u => u.ToString()).ToArray()));
string valueString = "[NULL]";
if (value != null)
{
var valueArr = Array.ConvertAll(value, u => u.ToString());
valueString = string.Join(", ", valueArr);
}
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -174,7 +191,13 @@ namespace SabreTools.Serialization
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, int[]? value, string prefixString)
{
string valueString = (value == null ? "[NULL]" : string.Join(", ", value.Select(i => i.ToString()).ToArray()));
string valueString = "[NULL]";
if (value != null)
{
var valueArr = Array.ConvertAll(value, i => i.ToString());
valueString = string.Join(", ", valueArr);
}
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -183,7 +206,13 @@ namespace SabreTools.Serialization
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, uint[]? value, string prefixString)
{
string valueString = (value == null ? "[NULL]" : string.Join(", ", value.Select(u => u.ToString()).ToArray()));
string valueString = "[NULL]";
if (value != null)
{
var valueArr = Array.ConvertAll(value, u => u.ToString());
valueString = string.Join(", ", valueArr);
}
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -192,7 +221,13 @@ namespace SabreTools.Serialization
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, long[]? value, string prefixString)
{
string valueString = (value == null ? "[NULL]" : string.Join(", ", value.Select(l => l.ToString()).ToArray()));
string valueString = "[NULL]";
if (value != null)
{
var valueArr = Array.ConvertAll(value, l => l.ToString());
valueString = string.Join(", ", valueArr);
}
return sb.AppendLine($"{prefixString}: {valueString}");
}
@@ -201,7 +236,28 @@ namespace SabreTools.Serialization
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, ulong[]? value, string prefixString)
{
string valueString = (value == null ? "[NULL]" : string.Join(", ", value.Select(u => u.ToString()).ToArray()));
string valueString = "[NULL]";
if (value != null)
{
var valueArr = Array.ConvertAll(value, u => u.ToString());
valueString = string.Join(", ", valueArr);
}
return sb.AppendLine($"{prefixString}: {valueString}");
}
/// <summary>
/// Append a line containing a UInt64[] value to a StringBuilder
/// </summary>
public static StringBuilder AppendLine(this StringBuilder sb, Guid[]? value, string prefixString)
{
string valueString = "[NULL]";
if (value != null)
{
var valueArr = Array.ConvertAll(value, g => g.ToString());
valueString = string.Join(", ", valueArr);
}
return sb.AppendLine($"{prefixString}: {valueString}");
}
}

View File

@@ -18,12 +18,12 @@ namespace SabreTools.Serialization.Wrappers
/// <summary>
/// Normal sector size in bytes
/// </summary>
public long SectorSize => (long)Math.Pow(2, this.Model.Header?.SectorShift ?? 0);
public long SectorSize => (long)Math.Pow(2, Model.Header?.SectorShift ?? 0);
/// <summary>
/// Mini sector size in bytes
/// </summary>
public long MiniSectorSize => (long)Math.Pow(2, this.Model.Header?.MiniSectorShift ?? 0);
public long MiniSectorSize => (long)Math.Pow(2, Model.Header?.MiniSectorShift ?? 0);
#endregion
@@ -101,7 +101,7 @@ namespace SabreTools.Serialization.Wrappers
public List<Models.CFB.SectorNumber?>? GetFATSectorChain(Models.CFB.SectorNumber? startingSector)
{
// If we have an invalid sector
if (startingSector == null || startingSector < 0 || this.Model.FATSectorNumbers == null || (long)startingSector >= this.Model.FATSectorNumbers.Length)
if (startingSector == null || startingSector < 0 || Model.FATSectorNumbers == null || (long)startingSector >= Model.FATSectorNumbers.Length)
return null;
// Setup the returned list
@@ -114,10 +114,10 @@ namespace SabreTools.Serialization.Wrappers
break;
// Get the next sector from the lookup table
var nextSector = this.Model.FATSectorNumbers[(uint)lastSector!.Value];
var nextSector = Model.FATSectorNumbers[(uint)lastSector!.Value];
// If we have an end of chain or free sector
if (nextSector == SabreTools.Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == SabreTools.Models.CFB.SectorNumber.FREESECT)
if (nextSector == Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == Models.CFB.SectorNumber.FREESECT)
break;
// Add the next sector to the list and replace the last sector
@@ -158,7 +158,7 @@ namespace SabreTools.Serialization.Wrappers
data.AddRange(sectorData);
}
return data.ToArray();
return [.. data];
}
/// <summary>
@@ -188,7 +188,7 @@ namespace SabreTools.Serialization.Wrappers
public List<Models.CFB.SectorNumber?>? GetMiniFATSectorChain(Models.CFB.SectorNumber? startingSector)
{
// If we have an invalid sector
if (startingSector == null || startingSector < 0 || this.Model.MiniFATSectorNumbers == null || (long)startingSector >= this.Model.MiniFATSectorNumbers.Length)
if (startingSector == null || startingSector < 0 || Model.MiniFATSectorNumbers == null || (long)startingSector >= Model.MiniFATSectorNumbers.Length)
return null;
// Setup the returned list
@@ -201,10 +201,10 @@ namespace SabreTools.Serialization.Wrappers
break;
// Get the next sector from the lookup table
var nextSector = this.Model.MiniFATSectorNumbers[(uint)lastSector!.Value];
var nextSector = Model.MiniFATSectorNumbers[(uint)lastSector!.Value];
// If we have an end of chain or free sector
if (nextSector == SabreTools.Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == SabreTools.Models.CFB.SectorNumber.FREESECT)
if (nextSector == Models.CFB.SectorNumber.ENDOFCHAIN || nextSector == Models.CFB.SectorNumber.FREESECT)
break;
// Add the next sector to the list and replace the last sector
@@ -245,7 +245,7 @@ namespace SabreTools.Serialization.Wrappers
data.AddRange(sectorData);
}
return data.ToArray();
return [.. data];
}
/// <summary>

View File

@@ -0,0 +1,121 @@
using System.IO;
using SabreTools.Models.CHD;
namespace SabreTools.Serialization.Wrappers
{
public class CHD : WrapperBase<Header>
{
#region Descriptive Properties
/// <inheritdoc/>
public override string DescriptionString => "MAME Compressed Hunks of Data";
#endregion
#region Extension Properties
/// <summary>
/// Internal MD5 hash, if available
/// </summary>
public byte[]? MD5
{
get
{
return Model switch
{
HeaderV1 v1 => v1.MD5,
HeaderV2 v2 => v2.MD5,
HeaderV3 v3 => v3.MD5,
HeaderV4 v4 => null,
HeaderV5 v5 => null,
_ => null,
};
}
}
/// <summary>
/// Internal SHA1 hash, if available
/// </summary>
public byte[]? SHA1
{
get
{
return Model switch
{
HeaderV1 v1 => null,
HeaderV2 v2 => null,
HeaderV3 v3 => v3.SHA1,
HeaderV4 v4 => v4.SHA1,
HeaderV5 v5 => v5.SHA1,
_ => null,
};
}
}
#endregion
#region Constructors
/// <inheritdoc/>
public CHD(Header? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public CHD(Header? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
}
/// <summary>
/// Create a CHD header from a byte array and offset
/// </summary>
/// <param name="data">Byte array representing the archive</param>
/// <param name="offset">Offset within the array to parse</param>
/// <returns>A CHD header wrapper on success, null on failure</returns>
public static CHD? Create(byte[]? data, int offset)
{
// If the data is invalid
if (data == null)
return null;
// If the offset is out of bounds
if (offset < 0 || offset >= data.Length)
return null;
// Create a memory stream and use that
var dataStream = new MemoryStream(data, offset, data.Length - offset);
return Create(dataStream);
}
/// <summary>
/// Create a CHD header from a Stream
/// </summary>
/// <param name="data">Stream representing the archive</param>
/// <returns>An CHD header on success, null on failure</returns>
public static CHD? Create(Stream? data)
{
// If the data is invalid
if (data == null || data.Length == 0 || !data.CanSeek || !data.CanRead)
return null;
var header = Deserializers.CHD.DeserializeStream(data);
if (header == null)
return null;
try
{
return new CHD(header, data);
}
catch
{
return null;
}
}
#endregion
}
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using SabreTools.Models.N3DS;
namespace SabreTools.Serialization.Wrappers
{
@@ -74,5 +75,36 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
// TODO: Hook these up for external use
#region Currently Unused Extensions
#region Ticket
/// <summary>
/// Denotes if the ticket denotes a demo or not
/// </summary>
public static bool IsDemo(Ticket? ticket)
{
if (ticket?.Limits == null || ticket.Limits.Length == 0)
return false;
return ticket.Limits[0] == 0x0004;
}
/// <summary>
/// Denotes if the max playcount for a demo
/// </summary>
public static uint PlayCount(Ticket ticket)
{
if (ticket?.Limits == null || ticket.Limits.Length == 0)
return 0;
return ticket.Limits[1];
}
#endregion
#endregion
}
}

View File

@@ -26,16 +26,16 @@ namespace SabreTools.Serialization.Wrappers
return _files;
// If we don't have a required property
if (this.Model.DirectoryEntries == null || this.Model.DirectoryMapEntries == null || this.Model.BlockEntries == null)
if (Model.DirectoryEntries == null || Model.DirectoryMapEntries == null || Model.BlockEntries == null)
return null;
// Otherwise, scan and build the files
var files = new List<FileInfo>();
for (int i = 0; i < this.Model.DirectoryEntries.Length; i++)
for (int i = 0; i < Model.DirectoryEntries.Length; i++)
{
// Get the directory entry
var directoryEntry = this.Model.DirectoryEntries[i];
var directoryMapEntry = this.Model.DirectoryMapEntries[i];
var directoryEntry = Model.DirectoryEntries[i];
var directoryMapEntry = Model.DirectoryMapEntries[i];
if (directoryEntry == null || directoryMapEntry == null)
continue;
@@ -57,26 +57,26 @@ namespace SabreTools.Serialization.Wrappers
Encrypted = directoryEntry.DirectoryFlags.HasFlag(Models.GCF.HL_GCF_FLAG.HL_GCF_FLAG_ENCRYPTED),
#endif
};
var pathParts = new List<string> { this.Model.DirectoryNames![directoryEntry.NameOffset] ?? string.Empty };
var pathParts = new List<string> { Model.DirectoryNames![directoryEntry.NameOffset] ?? string.Empty };
var blockEntries = new List<Models.GCF.BlockEntry?>();
// Traverse the parent tree
uint index = directoryEntry.ParentIndex;
while (index != 0xFFFFFFFF)
{
var parentDirectoryEntry = this.Model.DirectoryEntries[index];
var parentDirectoryEntry = Model.DirectoryEntries[index];
if (parentDirectoryEntry == null)
break;
pathParts.Add(this.Model.DirectoryNames![parentDirectoryEntry.NameOffset] ?? string.Empty);
pathParts.Add(Model.DirectoryNames![parentDirectoryEntry.NameOffset] ?? string.Empty);
index = parentDirectoryEntry.ParentIndex;
}
// Traverse the block entries
index = directoryMapEntry.FirstBlockIndex;
while (index != this.Model.DataBlockHeader?.BlockCount)
while (index != Model.DataBlockHeader?.BlockCount)
{
var nextBlock = this.Model.BlockEntries[index];
var nextBlock = Model.BlockEntries[index];
if (nextBlock == null)
break;
@@ -89,7 +89,7 @@ namespace SabreTools.Serialization.Wrappers
// Build the remaining file info
#if NET20 || NET35
var pathArray = pathParts.ToArray();
string[] pathArray = [.. pathParts];
string tempPath = string.Empty;
if (pathArray.Length == 0 || pathArray.Length == 1)
@@ -108,7 +108,7 @@ namespace SabreTools.Serialization.Wrappers
}
fileInfo.Path = tempPath;
#else
fileInfo.Path = Path.Combine(pathParts.ToArray());
fileInfo.Path = Path.Combine([.. pathParts]);
#endif
fileInfo.BlockEntries = [.. blockEntries];
@@ -134,14 +134,14 @@ namespace SabreTools.Serialization.Wrappers
return _dataBlockOffsets;
// If we don't have a block count, offset, or size
if (this.Model.DataBlockHeader?.BlockCount == null || this.Model.DataBlockHeader?.FirstBlockOffset == null || this.Model.DataBlockHeader?.BlockSize == null)
if (Model.DataBlockHeader?.BlockCount == null || Model.DataBlockHeader?.FirstBlockOffset == null || Model.DataBlockHeader?.BlockSize == null)
return null;
// Otherwise, build the data block set
_dataBlockOffsets = new long[this.Model.DataBlockHeader.BlockCount];
for (int i = 0; i < this.Model.DataBlockHeader.BlockCount; i++)
_dataBlockOffsets = new long[Model.DataBlockHeader.BlockCount];
for (int i = 0; i < Model.DataBlockHeader.BlockCount; i++)
{
long dataBlockOffset = this.Model.DataBlockHeader.FirstBlockOffset + (i * this.Model.DataBlockHeader.BlockSize);
long dataBlockOffset = Model.DataBlockHeader.FirstBlockOffset + (i * Model.DataBlockHeader.BlockSize);
_dataBlockOffsets[i] = dataBlockOffset;
}

View File

@@ -1,5 +1,5 @@
using System;
using System.IO;
using System.Linq;
using SabreTools.Models.InstallShieldCabinet;
namespace SabreTools.Serialization.Wrappers
@@ -53,7 +53,7 @@ namespace SabreTools.Serialization.Wrappers
{
get
{
uint majorVersion = this.Model.CommonHeader?.Version ?? 0;
uint majorVersion = Model.CommonHeader?.Version ?? 0;
if (majorVersion >> 24 == 1)
{
majorVersion = (majorVersion >> 12) & 0x0F;
@@ -177,7 +177,7 @@ namespace SabreTools.Serialization.Wrappers
/// Get the directory index for the given file index
/// </summary>
/// <returns>Directory index if found, UInt32.MaxValue on error</returns>
public uint GetFileDirectoryIndex(int index)
public uint GetDirectoryIndexFromFile(int index)
{
FileDescriptor? descriptor = GetFileDescriptor(index);
if (descriptor != null)
@@ -289,26 +289,46 @@ namespace SabreTools.Serialization.Wrappers
if (Model.FileGroups == null)
return null;
return Model.FileGroups.FirstOrDefault(fg => fg != null && string.Equals(fg.Name, name));
return Array.Find(Model.FileGroups, fg => fg != null && string.Equals(fg.Name, name));
}
/// <summary>
/// Get the file group for the given file index, if possible
/// </summary>
public FileGroup? GetFileGroupFromFile(int index)
{
if (Model.FileGroups == null)
return null;
if (index < 0 || index >= FileCount)
return null;
for (int i = 0; i < FileGroupCount; i++)
{
var fileGroup = GetFileGroup(i);
if (fileGroup == null)
continue;
if (fileGroup.FirstFile > index || fileGroup.LastFile < index)
continue;
return fileGroup;
}
return null;
}
/// <summary>
/// Get the file group name at a given index, if possible
/// </summary>
public string? GetFileGroupName(int index)
{
if (Model.FileGroups == null)
return null;
=> GetFileGroup(index)?.Name;
if (index < 0 || index >= Model.FileGroups.Length)
return null;
var fileGroup = Model.FileGroups[index];
if (fileGroup == null)
return null;
return fileGroup.Name;
}
/// <summary>
/// Get the file group name at a given file index, if possible
/// </summary>
public string? GetFileGroupNameFromFile(int index)
=> GetFileGroupFromFile(index)?.Name;
#endregion
}

View File

@@ -86,13 +86,13 @@ namespace SabreTools.Serialization.Wrappers
/// </summary>
private static uint ChecksumData(byte[] data)
{
uint[] C = new uint[4]
{
uint[] C =
[
S(data, 1, data.Length),
S(data, 2, data.Length),
S(data, 3, data.Length),
S(data, 4, data.Length),
};
];
return C[0] ^ C[1] ^ C[2] ^ C[3];
}
@@ -124,11 +124,11 @@ namespace SabreTools.Serialization.Wrappers
public DateTime? GetDateTime(int fileIndex)
{
// If we have an invalid file index
if (fileIndex < 0 || this.Model.Files == null || fileIndex >= this.Model.Files.Length)
if (fileIndex < 0 || Model.Files == null || fileIndex >= Model.Files.Length)
return null;
// Get the file header
var file = this.Model.Files[fileIndex];
var file = Model.Files[fileIndex];
if (file == null)
return null;

View File

@@ -1,8 +1,11 @@
using System;
using System.IO;
using SabreTools.Models.N3DS;
using static SabreTools.Models.N3DS.Constants;
namespace SabreTools.Serialization.Wrappers
{
public class N3DS : WrapperBase<Models.N3DS.Cart>
public class N3DS : WrapperBase<Cart>
{
#region Descriptive Properties
@@ -11,17 +14,209 @@ namespace SabreTools.Serialization.Wrappers
#endregion
#region Extension Properties
/// <summary>
/// Backup header
/// </summary>
public NCCHHeader? BackupHeader => Model.CardInfoHeader?.InitialData?.BackupHeader;
/// <summary>
/// ExeFS headers
/// </summary>
public ExeFSHeader?[] ExeFSHeaders => Model.ExeFSHeaders ?? [];
/// <summary>
/// Media unit size in bytes
/// </summary>
public uint MediaUnitSize
{
get
{
if (Model.Header?.PartitionFlags == null)
return default;
return (uint)(0x200 * Math.Pow(2, Model.Header.PartitionFlags[(int)NCSDFlags.MediaUnitSize]));
}
}
/// <summary>
/// Partitions data table
/// </summary>
public NCCHHeader?[] Partitions => Model.Partitions ?? [];
/// <summary>
/// Partitions header table
/// </summary>
public PartitionTableEntry?[] PartitionsTable => Model.Header?.PartitionsTable ?? [];
#region Named Partition Entries
/// <summary>
/// Partition table entry for Executable Content (CXI)
/// </summary>
public PartitionTableEntry? ExecutableContentEntry
{
get
{
if (PartitionsTable == null || PartitionsTable.Length == 0)
return null;
return PartitionsTable[0];
}
}
/// <summary>
/// Partition table entry for E-Manual (CFA)
/// </summary>
public PartitionTableEntry? EManualEntry
{
get
{
if (PartitionsTable == null || PartitionsTable.Length == 0)
return null;
return PartitionsTable[1];
}
}
/// <summary>
/// Partition table entry for Download Play Child container (CFA)
/// </summary>
public PartitionTableEntry? DownloadPlayChildContainerEntry
{
get
{
if (PartitionsTable == null || PartitionsTable.Length == 0)
return null;
return PartitionsTable[2];
}
}
/// <summary>
/// Partition table entry for New3DS Update Data (CFA)
/// </summary>
public PartitionTableEntry? New3DSUpdateDataEntry
{
get
{
if (PartitionsTable == null || PartitionsTable.Length == 0)
return null;
return PartitionsTable[6];
}
}
/// <summary>
/// Partition table entry for Update Data (CFA)
/// </summary>
public PartitionTableEntry? UpdateDataEntry
{
get
{
if (PartitionsTable == null || PartitionsTable.Length == 0)
return null;
return PartitionsTable[7];
}
}
#endregion
/// <summary>
/// Partitions flags
/// </summary>
public byte[] PartitionFlags => Model.Header?.PartitionFlags ?? [];
#region Partition Flags
/// <summary>
/// Backup Write Wait Time (The time to wait to write save to backup after the card is recognized (0-255
/// seconds)). NATIVE_FIRM loads this flag from the gamecard NCSD header starting with 6.0.0-11.
/// </summary>
public byte BackupWriteWaitTime
{
get
{
if (PartitionFlags == null || PartitionFlags.Length == 0)
return default;
return PartitionFlags[(int)NCSDFlags.BackupWriteWaitTime];
}
}
/// <summary>
/// Media Card Device (1 = NOR Flash, 2 = None, 3 = BT) (Only SDK 2.X)
/// </summary>
public MediaCardDeviceType MediaCardDevice2X
{
get
{
if (PartitionFlags == null || PartitionFlags.Length == 0)
return default;
return (MediaCardDeviceType)PartitionFlags[(int)NCSDFlags.MediaCardDevice2X];
}
}
/// <summary>
/// Media Card Device (1 = NOR Flash, 2 = None, 3 = BT) (SDK 3.X+)
/// </summary>
public MediaCardDeviceType MediaCardDevice3X
{
get
{
if (PartitionFlags == null || PartitionFlags.Length == 0)
return default;
return (MediaCardDeviceType)PartitionFlags[(int)NCSDFlags.MediaCardDevice3X];
}
}
/// <summary>
/// Media Platform Index (1 = CTR)
/// </summary>
public MediaPlatformIndex MediaPlatformIndex
{
get
{
if (PartitionFlags == null || PartitionFlags.Length == 0)
return default;
return (MediaPlatformIndex)PartitionFlags[(int)NCSDFlags.MediaPlatformIndex];
}
}
/// <summary>
/// Media Type Index (0 = Inner Device, 1 = Card1, 2 = Card2, 3 = Extended Device)
/// </summary>
public MediaTypeIndex MediaTypeIndex
{
get
{
if (PartitionFlags == null || PartitionFlags.Length == 0)
return default;
return (MediaTypeIndex)PartitionFlags[(int)NCSDFlags.MediaTypeIndex];
}
}
#endregion
#endregion
#region Constructors
/// <inheritdoc/>
public N3DS(Models.N3DS.Cart? model, byte[]? data, int offset)
public N3DS(Cart? model, byte[]? data, int offset)
: base(model, data, offset)
{
// All logic is handled by the base class
}
/// <inheritdoc/>
public N3DS(Models.N3DS.Cart? model, Stream? data)
public N3DS(Cart? model, Stream? data)
: base(model, data)
{
// All logic is handled by the base class
@@ -74,5 +269,408 @@ namespace SabreTools.Serialization.Wrappers
}
#endregion
#region Data
/// <summary>
/// Get the bit masks for a partition
/// </summary>
public BitMasks GetBitMasks(int index)
{
if (Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
var partition = Partitions[index];
if (partition?.Flags == null)
return 0;
return partition.Flags.BitMasks;
}
/// <summary>
/// Get the crypto method for a partition
/// </summary>
public CryptoMethod GetCryptoMethod(int index)
{
if (Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
var partition = Partitions[index];
if (partition?.Flags == null)
return 0;
return partition.Flags.CryptoMethod;
}
/// <summary>
/// Determines if a file header represents a CODE block
/// </summary>
public bool IsCodeBinary(int fsIndex, int headerIndex)
{
if (ExeFSHeaders == null)
return false;
if (fsIndex < 0 || fsIndex >= ExeFSHeaders.Length)
return false;
var fsHeader = ExeFSHeaders[fsIndex];
if (fsHeader?.FileHeaders == null)
return false;
if (headerIndex < 0 || headerIndex >= fsHeader.FileHeaders.Length)
return false;
var fileHeader = fsHeader.FileHeaders[headerIndex];
if (fileHeader == null)
return false;
return fileHeader.FileName == ".code";
}
/// <summary>
/// Get if the NoCrypto bit is set
/// </summary>
public bool PossiblyDecrypted(int index)
{
var bitMasks = GetBitMasks(index);
#if NET20 || NET35
return (bitMasks & BitMasks.NoCrypto) != 0;
#else
return bitMasks.HasFlag(BitMasks.NoCrypto);
#endif
}
#endregion
#region Encryption
/// <summary>
/// Get the initial value for the ExeFS counter
/// </summary>
public byte[] ExeFSIV(int index)
{
if (Partitions == null)
return [];
if (index < 0 || index >= Partitions.Length)
return [];
var header = Partitions[index];
if (header == null || header.MagicID != NCCHMagicNumber)
return [];
byte[] partitionIdBytes = BitConverter.GetBytes(header.PartitionId);
Array.Reverse(partitionIdBytes);
return [.. partitionIdBytes, .. ExefsCounter];
}
/// <summary>
/// Get the initial value for the plain counter
/// </summary>
public byte[] PlainIV(int index)
{
if (Partitions == null)
return [];
if (index < 0 || index >= Partitions.Length)
return [];
var header = Partitions[index];
if (header == null || header.MagicID != NCCHMagicNumber)
return [];
byte[] partitionIdBytes = BitConverter.GetBytes(header.PartitionId);
Array.Reverse(partitionIdBytes);
return [.. partitionIdBytes, .. PlainCounter];
}
/// <summary>
/// Get the initial value for the RomFS counter
/// </summary>
public byte[] RomFSIV(int index)
{
if (Partitions == null)
return [];
if (index < 0 || index >= Partitions.Length)
return [];
var header = Partitions[index];
if (header == null || header.MagicID != NCCHMagicNumber)
return [];
byte[] partitionIdBytes = BitConverter.GetBytes(header.PartitionId);
Array.Reverse(partitionIdBytes);
return [.. partitionIdBytes, .. RomfsCounter];
}
#endregion
#region Offsets
/// <summary>
/// Get the offset of a partition ExeFS
/// </summary>
/// <returns>Offset to the ExeFS of the partition, 0 on error</returns>
public uint GetExeFSOffset(int index)
{
// No partitions means no size is available
if (PartitionsTable == null || Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition table entry means no size is available
var entry = PartitionsTable[index];
if (entry == null)
return 0;
// Invalid partition means no size is available
var header = Partitions[index];
if (header == null || header.MagicID != NCCHMagicNumber)
return 0;
// If the offset is 0, return 0
uint exeFsOffsetMU = header.ExeFSOffsetInMediaUnits;
if (exeFsOffsetMU == 0)
return 0;
// Return the adjusted offset
uint partitionOffsetMU = entry.Offset;
return (partitionOffsetMU + exeFsOffsetMU) * MediaUnitSize;
}
/// <summary>
/// Get the offset of a partition logo region
/// </summary>
/// <returns>Offset to the logo region of the partition, 0 on error</returns>
public uint GetLogoRegionOffset(int index)
{
// No partitions means no size is available
if (PartitionsTable == null || Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition table entry means no size is available
var entry = PartitionsTable[index];
if (entry == null)
return 0;
// Invalid partition means no size is available
var header = Partitions[index];
if (header == null || header.MagicID != NCCHMagicNumber)
return 0;
// If the offset is 0, return 0
uint logoOffsetMU = header.LogoRegionOffsetInMediaUnits;
if (logoOffsetMU == 0)
return 0;
// Return the adjusted offset
uint partitionOffsetMU = entry.Offset;
return (partitionOffsetMU + logoOffsetMU) * MediaUnitSize;
}
/// <summary>
/// Get the offset of a partition
/// </summary>
/// <returns>Offset to the partition, 0 on error</returns>
public uint GetPartitionOffset(int index)
{
// No partitions means no size is available
if (PartitionsTable == null)
return 0;
if (index < 0 || index >= PartitionsTable.Length)
return 0;
// Invalid partition table entry means no size is available
var entry = PartitionsTable[index];
if (entry == null)
return 0;
// Return the adjusted offset
uint partitionOffsetMU = entry.Offset;
if (entry.Offset == 0)
return 0;
// Return the adjusted offset
return partitionOffsetMU * MediaUnitSize;
}
/// <summary>
/// Get the offset of a partition plain region
/// </summary>
/// <returns>Offset to the plain region of the partition, 0 on error</returns>
public uint GetPlainRegionOffset(int index)
{
// No partitions means no size is available
if (PartitionsTable == null || Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition table entry means no size is available
var entry = PartitionsTable[index];
if (entry == null)
return 0;
// Invalid partition means no size is available
var header = Partitions[index];
if (header == null || header.MagicID != NCCHMagicNumber)
return 0;
// If the offset is 0, return 0
uint prOffsetMU = header.PlainRegionOffsetInMediaUnits;
if (prOffsetMU == 0)
return 0;
// Return the adjusted offset
uint partitionOffsetMU = entry.Offset;
return (partitionOffsetMU + prOffsetMU) * MediaUnitSize;
}
/// <summary>
/// Get the offset of a partition RomFS
/// </summary>
/// <returns>Offset to the RomFS of the partition, 0 on error</returns>
public uint GetRomFSOffset(int index)
{
// No partitions means no size is available
if (PartitionsTable == null || Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition table entry means no size is available
var entry = PartitionsTable[index];
if (entry == null)
return 0;
// Invalid partition means no size is available
var header = Partitions[index];
if (header == null || header.MagicID != NCCHMagicNumber)
return 0;
// If the offset is 0, return 0
uint romFsOffsetMU = header.RomFSOffsetInMediaUnits;
if (romFsOffsetMU == 0)
return 0;
// Return the adjusted offset
uint partitionOffsetMU = entry.Offset;
return (partitionOffsetMU + romFsOffsetMU) * MediaUnitSize;
}
#endregion
#region Sizes
/// <summary>
/// Get the size of a partition ExeFS
/// </summary>
/// <returns>Size of the partition ExeFS in bytes, 0 on error</returns>
public uint GetExeFSSize(int index)
{
// Empty partitions array means no size is available
if (Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition header means no size is available
var header = Partitions[index];
if (header == null)
return 0;
// Return the adjusted size
return header.ExeFSSizeInMediaUnits * MediaUnitSize;
}
/// <summary>
/// Get the size of a partition extended header
/// </summary>
/// <returns>Size of the partition extended header in bytes, 0 on error</returns>
public uint GetExtendedHeaderSize(int index)
{
// Empty partitions array means no size is available
if (Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition header means no size is available
var header = Partitions[index];
if (header == null)
return 0;
// Return the adjusted size
return header.ExtendedHeaderSizeInBytes;
}
/// <summary>
/// Get the size of a partition logo region
/// </summary>
/// <returns>Size of the partition logo region in bytes, 0 on error</returns>
public uint GetLogoRegionSize(int index)
{
// Empty partitions array means no size is available
if (Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition header means no size is available
var header = Partitions[index];
if (header == null)
return 0;
// Return the adjusted size
return header.LogoRegionSizeInMediaUnits * MediaUnitSize;
}
/// <summary>
/// Get the size of a partition plain region
/// </summary>
/// <returns>Size of the partition plain region in bytes, 0 on error</returns>
public uint GetPlainRegionSize(int index)
{
// Empty partitions array means no size is available
if (Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition header means no size is available
var header = Partitions[index];
if (header == null)
return 0;
// Return the adjusted size
return header.PlainRegionSizeInMediaUnits * MediaUnitSize;
}
/// <summary>
/// Get the size of a partition RomFS
/// </summary>
/// <returns>Size of the partition RomFS in bytes, 0 on error</returns>
public uint GetRomFSSize(int index)
{
// Empty partitions array means no size is available
if (Partitions == null)
return 0;
if (index < 0 || index >= Partitions.Length)
return 0;
// Invalid partition header means no size is available
var header = Partitions[index];
if (header == null)
return 0;
// Return the adjusted size
return header.RomFSSizeInMediaUnits * MediaUnitSize;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
using System;
using System.IO;
using System.Linq;
using static SabreTools.Models.VPK.Constants;
namespace SabreTools.Serialization.Wrappers
@@ -43,13 +43,16 @@ namespace SabreTools.Serialization.Wrappers
return null;
// Get the archive count
int archiveCount = this.Model.DirectoryItems == null
? 0
: this.Model.DirectoryItems
.Select(di => di?.DirectoryEntry)
.Select(de => de?.ArchiveIndex ?? 0)
.Where(ai => ai != HL_VPK_NO_ARCHIVE)
.Max();
ushort archiveCount = 0;
foreach (var di in Model.DirectoryItems ?? [])
{
if (di?.DirectoryEntry == null)
continue;
if (di.DirectoryEntry.ArchiveIndex == HL_VPK_NO_ARCHIVE)
continue;
archiveCount = Math.Max(archiveCount, di.DirectoryEntry.ArchiveIndex);
}
// Build the list of archive filenames to populate
_archiveFilenames = new string[archiveCount];

View File

@@ -1,3 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
@@ -27,4 +32,332 @@ namespace SabreTools.Serialization.Wrappers
#endregion
}
public abstract class WrapperBase<T> : WrapperBase, IWrapper<T>
{
#region Properties
/// <inheritdoc/>
public T GetModel() => Model;
/// <summary>
/// Internal model
/// </summary>
public T Model { get; private set; }
#endregion
#region Instance Variables
/// <summary>
/// Source of the original data
/// </summary>
protected DataSource _dataSource = DataSource.UNKNOWN;
/// <summary>
/// Lock object for reading from the source
/// </summary>
private readonly object _streamDataLock = new();
/// <summary>
/// Source byte array data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected byte[]? _byteArrayData = null;
/// <summary>
/// Source byte array data offset
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected int _byteArrayOffset = -1;
/// <summary>
/// Source Stream data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.Stream"/></remarks>
protected Stream? _streamData = null;
#if !NETFRAMEWORK
/// <summary>
/// JSON serializer options for output printing
/// </summary>
protected System.Text.Json.JsonSerializerOptions _jsonSerializerOptions
{
get
{
#if NETCOREAPP3_1
var serializer = new System.Text.Json.JsonSerializerOptions { WriteIndented = true };
#else
var serializer = new System.Text.Json.JsonSerializerOptions { IncludeFields = true, WriteIndented = true };
#endif
serializer.Converters.Add(new ConcreteAbstractSerializer());
serializer.Converters.Add(new ConcreteInterfaceSerializer());
serializer.Converters.Add(new System.Text.Json.Serialization.JsonStringEnumConverter());
return serializer;
}
}
#endif
#endregion
#region Constructors
/// <summary>
/// Construct a new instance of the wrapper from a byte array
/// </summary>
protected WrapperBase(T? model, byte[]? data, int offset)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (offset < 0 || offset >= data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
Model = model;
_dataSource = DataSource.ByteArray;
_byteArrayData = data;
_byteArrayOffset = offset;
}
/// <summary>
/// Construct a new instance of the wrapper from a Stream
/// </summary>
protected WrapperBase(T? model, Stream? data)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (data.Length == 0 || !data.CanSeek || !data.CanRead)
throw new ArgumentOutOfRangeException(nameof(data));
Model = model;
_dataSource = DataSource.Stream;
_streamData = data;
}
#endregion
#region Data
/// <summary>
/// Validate the backing data source
/// </summary>
/// <returns>True if the data source is valid, false otherwise</returns>
public bool DataSourceIsValid()
{
return _dataSource switch
{
// Byte array data requires both a valid array and offset
DataSource.ByteArray => _byteArrayData != null && _byteArrayOffset >= 0,
// Stream data requires both a valid stream
DataSource.Stream => _streamData != null && _streamData.CanRead && _streamData.CanSeek,
// Everything else is invalid
_ => false,
};
}
/// <summary>
/// Check if a data segment is valid in the data source
/// </summary>
/// <param name="position">Position in the source</param>
/// <param name="length">Length of the data to check</param>
/// <returns>True if the positional data is valid, false otherwise</returns>
public bool SegmentValid(int position, int length)
{
// Validate the data souece
if (!DataSourceIsValid())
return false;
// If we have an invalid position
if (position < 0 || position >= GetEndOfFile())
return false;
return _dataSource switch
{
DataSource.ByteArray => _byteArrayOffset + position + length <= _byteArrayData!.Length,
DataSource.Stream => position + length <= _streamData!.Length,
// Everything else is invalid
_ => false,
};
}
/// <summary>
/// Read data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <returns>Byte array containing the requested data, null on error</returns>
public byte[]? ReadFromDataSource(int position, int length)
{
// Validate the data source
if (!DataSourceIsValid())
return null;
// Validate the requested segment
if (!SegmentValid(position, length))
return null;
// Read and return the data
byte[]? sectionData = null;
switch (_dataSource)
{
case DataSource.ByteArray:
sectionData = new byte[length];
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
break;
case DataSource.Stream:
lock (_streamDataLock)
{
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
}
}
return sectionData;
}
/// <summary>
/// Read string data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <returns>String list containing the requested data, null on error</returns>
public List<string>? ReadStringsFromDataSource(int position, int length, int charLimit = 5)
{
// Read the data as a byte array first
byte[]? sourceData = ReadFromDataSource(position, length);
if (sourceData == null)
return null;
// Check for ASCII strings
var asciiStrings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.ASCII);
// Check for UTF-8 strings
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
var utf8Strings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.UTF8);
// Check for Unicode strings
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
var unicodeStrings = ReadStringsWithEncoding(sourceData, charLimit, Encoding.Unicode);
// Ignore duplicate strings across encodings
List<string> sourceStrings = [.. asciiStrings, .. utf8Strings, .. unicodeStrings];
// Sort the strings and return
sourceStrings.Sort();
return sourceStrings;
}
/// <summary>
/// Get the ending offset of the source
/// </summary>
/// <returns>Value greater than 0 for a valid end of file, -1 on error</returns>
public int GetEndOfFile()
{
// Validate the data souece
if (!DataSourceIsValid())
return -1;
// Return the effective endpoint
return _dataSource switch
{
DataSource.ByteArray => _byteArrayData!.Length - _byteArrayOffset,
DataSource.Stream => (int)_streamData!.Length,
_ => -1,
};
}
/// <summary>
/// Read string data from the source with an encoding
/// </summary>
/// <param name="sourceData">Byte array representing the source data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <param name="encoding">Character encoding to use for checking</param>
/// <returns>String list containing the requested data, empty on error</returns>
/// <remarks>TODO: Move to IO?</remarks>
#if NET20
private List<string> ReadStringsWithEncoding(byte[] sourceData, int charLimit, Encoding encoding)
#else
private HashSet<string> ReadStringsWithEncoding(byte[] sourceData, int charLimit, Encoding encoding)
#endif
{
// If we have an invalid character limit, default to 5
if (charLimit <= 0)
charLimit = 5;
// Create the string hash set to return
#if NET20
var sourceStrings = new List<string>();
#else
var sourceStrings = new HashSet<string>();
#endif
// Setup cached data
int sourceDataIndex = 0;
List<char> cachedChars = [];
// Check for strings
while (sourceDataIndex < sourceData.Length)
{
// Read the next character
char ch = encoding.GetChars(sourceData, sourceDataIndex, 1)[0];
// If we have a control character or an invalid byte
bool isValid = !char.IsControl(ch) && (ch & 0xFF00) == 0;
if (!isValid)
{
// If we have no cached string
if (cachedChars.Count == 0)
continue;
// If we have a cached string greater than the limit
if (cachedChars.Count >= charLimit)
sourceStrings.Add(new string([.. cachedChars]));
cachedChars.Clear();
continue;
}
// If a long repeating string is found, discard it
if (cachedChars.Count >= 64 && cachedChars.TrueForAll(c => c == cachedChars[0]))
{
cachedChars.Clear();
continue;
}
// Append the character to the cached string
cachedChars.Add(ch);
sourceDataIndex++;
}
// If we have a cached string greater than the limit
if (cachedChars.Count >= charLimit)
sourceStrings.Add(new string([.. cachedChars]));
return sourceStrings;
}
#endregion
#region JSON Export
#if !NETFRAMEWORK
/// <summary>
/// Export the item information as JSON
/// </summary>
public override string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
#endif
#endregion
}
}

View File

@@ -1,350 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SabreTools.IO.Extensions;
using SabreTools.Serialization.Interfaces;
namespace SabreTools.Serialization.Wrappers
{
public abstract class WrapperBase<T> : WrapperBase, IWrapper<T>
{
#region Properties
/// <inheritdoc/>
public T GetModel() => Model;
/// <summary>
/// Internal model
/// </summary>
public T Model { get; private set; }
#endregion
#region Instance Variables
/// <summary>
/// Source of the original data
/// </summary>
protected DataSource _dataSource = DataSource.UNKNOWN;
/// <summary>
/// Source byte array data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected byte[]? _byteArrayData = null;
/// <summary>
/// Source byte array data offset
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.ByteArray"/></remarks>
protected int _byteArrayOffset = -1;
/// <summary>
/// Source Stream data
/// </summary>
/// <remarks>This is only populated if <see cref="_dataSource"/> is <see cref="DataSource.Stream"/></remarks>
protected Stream? _streamData = null;
#if !NETFRAMEWORK
/// <summary>
/// JSON serializer options for output printing
/// </summary>
protected System.Text.Json.JsonSerializerOptions _jsonSerializerOptions
{
get
{
#if NETCOREAPP3_1
var serializer = new System.Text.Json.JsonSerializerOptions { WriteIndented = true };
#else
var serializer = new System.Text.Json.JsonSerializerOptions { IncludeFields = true, WriteIndented = true };
#endif
serializer.Converters.Add(new ConcreteAbstractSerializer());
serializer.Converters.Add(new ConcreteInterfaceSerializer());
serializer.Converters.Add(new System.Text.Json.Serialization.JsonStringEnumConverter());
return serializer;
}
}
#endif
#endregion
#region Constructors
/// <summary>
/// Construct a new instance of the wrapper from a byte array
/// </summary>
protected WrapperBase(T? model, byte[]? data, int offset)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (offset < 0 || offset >= data.Length)
throw new ArgumentOutOfRangeException(nameof(offset));
this.Model = model;
_dataSource = DataSource.ByteArray;
_byteArrayData = data;
_byteArrayOffset = offset;
}
/// <summary>
/// Construct a new instance of the wrapper from a Stream
/// </summary>
protected WrapperBase(T? model, Stream? data)
{
if (model == null)
throw new ArgumentNullException(nameof(model));
if (data == null)
throw new ArgumentNullException(nameof(data));
if (data.Length == 0 || !data.CanSeek || !data.CanRead)
throw new ArgumentOutOfRangeException(nameof(data));
this.Model = model;
_dataSource = DataSource.Stream;
_streamData = data;
}
#endregion
#region Data
/// <summary>
/// Validate the backing data source
/// </summary>
/// <returns>True if the data source is valid, false otherwise</returns>
public bool DataSourceIsValid()
{
switch (_dataSource)
{
// Byte array data requires both a valid array and offset
case DataSource.ByteArray:
return _byteArrayData != null && _byteArrayOffset >= 0;
// Stream data requires both a valid stream
case DataSource.Stream:
return _streamData != null && _streamData.CanRead && _streamData.CanSeek;
// Everything else is invalid
case DataSource.UNKNOWN:
default:
return false;
}
}
/// <summary>
/// Check if a data segment is valid in the data source
/// </summary>
/// <param name="position">Position in the source</param>
/// <param name="length">Length of the data to check</param>
/// <returns>True if the positional data is valid, false otherwise</returns>
public bool SegmentValid(int position, int length)
{
// Validate the data souece
if (!DataSourceIsValid())
return false;
// If we have an invalid position
if (position < 0 || position >= GetEndOfFile())
return false;
switch (_dataSource)
{
case DataSource.ByteArray:
return _byteArrayOffset + position + length <= _byteArrayData!.Length;
case DataSource.Stream:
return position + length <= _streamData!.Length;
// Everything else is invalid
case DataSource.UNKNOWN:
default:
return false;
}
}
/// <summary>
/// Read data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <returns>Byte array containing the requested data, null on error</returns>
public byte[]? ReadFromDataSource(int position, int length)
{
// Validate the data source
if (!DataSourceIsValid())
return null;
// Validate the requested segment
if (!SegmentValid(position, length))
return null;
// Read and return the data
byte[]? sectionData = null;
switch (_dataSource)
{
case DataSource.ByteArray:
sectionData = new byte[length];
Array.Copy(_byteArrayData!, _byteArrayOffset + position, sectionData, 0, length);
break;
case DataSource.Stream:
long currentLocation = _streamData!.Position;
_streamData.Seek(position, SeekOrigin.Begin);
sectionData = _streamData.ReadBytes(length);
_streamData.Seek(currentLocation, SeekOrigin.Begin);
break;
}
return sectionData;
}
/// <summary>
/// Read string data from the source
/// </summary>
/// <param name="position">Position in the source to read from</param>
/// <param name="length">Length of the requested data</param>
/// <param name="charLimit">Number of characters needed to be a valid string</param>
/// <returns>String list containing the requested data, null on error</returns>
public List<string>? ReadStringsFromDataSource(int position, int length, int charLimit = 5)
{
// Read the data as a byte array first
byte[]? sourceData = ReadFromDataSource(position, length);
if (sourceData == null)
return null;
// If we have an invalid character limit, default to 5
if (charLimit <= 0)
charLimit = 5;
// Create the string list to return
var sourceStrings = new List<string>();
// Setup cached data
int sourceDataIndex = 0;
string cachedString = string.Empty;
// Check for ASCII strings
while (sourceDataIndex < sourceData.Length)
{
// If we have a control character or an invalid byte
if (sourceData[sourceDataIndex] < 0x20 || sourceData[sourceDataIndex] > 0x7F)
{
// If we have no cached string
if (cachedString.Length == 0)
{
sourceDataIndex++;
continue;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
cachedString = string.Empty;
sourceDataIndex++;
continue;
}
// All other characters get read in
cachedString += Encoding.ASCII.GetString(sourceData, sourceDataIndex, 1);
sourceDataIndex++;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
// Reset cached data
sourceDataIndex = 0;
cachedString = string.Empty;
// We are limiting the check for Unicode characters with a second byte of 0x00 for now
// Check for Unicode strings
while (sourceDataIndex < sourceData.Length)
{
// Unicode characters are always 2 bytes
if (sourceDataIndex == sourceData.Length - 1)
break;
ushort ch = BitConverter.ToUInt16(sourceData, sourceDataIndex);
// If we have a null terminator or "invalid" character
if (ch == 0x0000 || (ch & 0xFF00) != 0)
{
// If we have no cached string
if (cachedString.Length == 0)
{
sourceDataIndex += 2;
continue;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
cachedString = string.Empty;
sourceDataIndex += 2;
continue;
}
// All other characters get read in
cachedString += Encoding.Unicode.GetString(sourceData, sourceDataIndex, 2);
sourceDataIndex += 2;
}
// If we have a cached string greater than the limit
if (cachedString.Length >= charLimit)
sourceStrings.Add(cachedString);
// Deduplicate the string list for storage
sourceStrings = sourceStrings.Distinct().OrderBy(s => s).ToList();
// TODO: Complete implementation of string finding
return sourceStrings;
}
/// <summary>
/// Get the ending offset of the source
/// </summary>
/// <returns>Value greater than 0 for a valid end of file, -1 on error</returns>
public int GetEndOfFile()
{
// Validate the data souece
if (!DataSourceIsValid())
return -1;
// Return the effective endpoint
switch (_dataSource)
{
case DataSource.ByteArray:
return _byteArrayData!.Length - _byteArrayOffset;
case DataSource.Stream:
return (int)_streamData!.Length;
case DataSource.UNKNOWN:
default:
return -1;
}
}
#endregion
#region JSON Export
#if !NETFRAMEWORK
/// <summary>
/// Export the item information as JSON
/// </summary>
public override string ExportJSON() => System.Text.Json.JsonSerializer.Serialize(Model, _jsonSerializerOptions);
#endif
#endregion
}
}

Some files were not shown because too many files have changed in this diff Show More