Compare commits

...

110 Commits
1.2.0 ... 1.6.5

Author SHA1 Message Date
Matt Nadareski
60ce7cbfa0 Bump version 2025-04-30 20:51:59 -04:00
Deterous
39fe46a162 Remove obsolete PhilipsCDiDigitalVideo system (#10)
* Remove obsolete PhilipsCDiDigitalVideo system

* Delete obsolete system
2025-04-17 08:04:02 -04:00
Matt Nadareski
a8674a21e4 Add 30 second timeout to web operations 2025-04-13 21:20:00 -04:00
Matt Nadareski
65f2d53a3f Fix how conditions are used for references 2025-02-25 21:16:16 -05:00
Matt Nadareski
0bc869543a Bump version 2024-12-31 21:10:46 -05:00
Matt Nadareski
aa7d513d2c Add Ring Perfect Audio Offset pseudo-tag 2024-12-31 21:09:11 -05:00
Matt Nadareski
3d35129529 Update copyright 2024-12-30 21:27:26 -05:00
Matt Nadareski
ec563938ba Remove unnecessary action step 2024-12-30 21:26:16 -05:00
Matt Nadareski
f0f3a1a194 Bump version 2024-12-28 13:52:24 -05:00
Deterous
55f5262198 Add new Protection pseudo site code (#9)
* Add Protection pseudo site tag

* Use new sitecode in redumplib
2024-12-27 12:33:35 -05:00
Matt Nadareski
1d247b1f6f Use string comparison on tab replacement when possible 2024-12-25 22:13:26 -05:00
Matt Nadareski
32c57736ae Duplicate write offset field for convenience (fixes #8) 2024-12-25 22:03:55 -05:00
Deterous
8ab312ba8b Convert <Tab> (#7) 2024-12-24 21:07:51 -05:00
Matt Nadareski
3ea01ca933 Ensure .NET versions are installed for testing 2024-12-19 10:52:22 -05:00
Matt Nadareski
27d99f7429 Bump version 2024-12-16 14:35:02 -05:00
Matt Nadareski
8b147f2041 Change empty language list message 2024-12-16 14:33:22 -05:00
Matt Nadareski
9c7a143d52 Add to publish scripts, not rolling build 2024-12-16 14:26:55 -05:00
Matt Nadareski
30bbef7bba Add RedumpTool as a non-building component 2024-12-16 14:23:43 -05:00
Matt Nadareski
17da564b00 Fix old .NET support 2024-12-16 14:22:07 -05:00
Matt Nadareski
073176cccb Update Models to 1.5.8 2024-12-16 14:21:53 -05:00
Matt Nadareski
0434e63e34 Allow symbols to be packed 2024-12-16 14:21:34 -05:00
Matt Nadareski
2b75eb44cd Use publish script and update README 2024-12-06 11:34:34 -05:00
Matt Nadareski
10eecc866e Bump version 2024-12-05 22:11:48 -05:00
Matt Nadareski
84fa2f93ea Fix consecutive empty line logic 2024-12-05 21:15:26 -05:00
Matt Nadareski
5a92c0fc98 Bump version 2024-12-01 22:58:47 -05:00
Matt Nadareski
4ffc1b3160 Fix multi-newline formatting, add tests 2024-12-01 22:44:12 -05:00
Matt Nadareski
ffa8f2b16e Update ToDiscType and add tests 2024-12-01 22:31:19 -05:00
Matt Nadareski
70e3e074cc Update some extensions, update tests 2024-12-01 22:14:49 -05:00
Matt Nadareski
4858b4e459 None of these are TODOs on my part 2024-12-01 21:32:45 -05:00
Matt Nadareski
9495cd32c7 Handle some TODO items 2024-12-01 21:31:58 -05:00
Matt Nadareski
071571870e Add ToYesNo tests 2024-12-01 21:20:47 -05:00
Matt Nadareski
f03cd40181 Use automatic system name mapping 2024-12-01 21:14:08 -05:00
Matt Nadareski
ea51726645 Fill out more tests 2024-12-01 21:09:15 -05:00
Matt Nadareski
f0633d5aa7 Framework only matters for executable 2024-11-30 21:39:44 -05:00
Matt Nadareski
4c076aec0c Update packages 2024-11-30 21:38:41 -05:00
Matt Nadareski
2ec9d6a4a0 Use more targeted library for old .NET 2024-11-18 19:53:44 -05:00
Matt Nadareski
415b488005 Bump version 2024-11-14 22:23:20 -05:00
Matt Nadareski
5d300c9975 Make download helpers public for ease 2024-11-14 22:22:52 -05:00
Matt Nadareski
304236774f Bump version 2024-11-13 01:54:40 -05:00
Matt Nadareski
9924289c48 Fix casting issues 2024-11-13 01:54:24 -05:00
Matt Nadareski
240eb74ead Bump version 2024-11-13 01:30:42 -05:00
Matt Nadareski
a64b109d2c Remove unncessary Linq usage 2024-11-13 01:29:36 -05:00
Matt Nadareski
3e0f9b5410 Add .NET 9 to target frameworks 2024-11-13 00:59:20 -05:00
Matt Nadareski
668be418ac Bump version 2024-10-18 12:30:30 -04:00
Matt Nadareski
7d184a634e Always return ID list, if possible 2024-10-18 12:26:36 -04:00
Matt Nadareski
67aed0899d Don't null foreign title if missing 2024-10-18 11:58:29 -04:00
Matt Nadareski
9fbaf1a187 Bump version 2024-10-04 01:42:58 -04:00
Matt Nadareski
fe8686a2bb Allow forward slashes in queries sometimes 2024-10-04 01:41:05 -04:00
Matt Nadareski
652270c8c7 Add publish scripts 2024-10-01 13:56:40 -04:00
Matt Nadareski
905d8a94fb Bump version 2024-10-01 13:55:33 -04:00
Matt Nadareski
3ee8416695 Remove unnecessary tuples 2024-10-01 13:53:03 -04:00
Matt Nadareski
49fa06da55 Remove threading bridge package (unused) 2024-10-01 04:27:31 -04:00
Matt Nadareski
70e29afd89 Remove Linq requirement from old .NET 2024-10-01 04:25:35 -04:00
Matt Nadareski
2a402a53db Remove ValueTuple packages (usused) 2024-10-01 03:21:07 -04:00
Matt Nadareski
66bb3b75b2 Bump version 2024-07-24 11:05:25 -04:00
Matt Nadareski
9d7d46673a Fix deserializing submission from file 2024-07-23 22:18:06 -04:00
Matt Nadareski
16d196c902 Bump version 2024-07-16 14:13:28 -04:00
Matt Nadareski
c93da92f19 Add new helper class for site interaction 2024-07-16 14:12:28 -04:00
Matt Nadareski
a219d0c5de Add some client helper classes 2024-07-16 14:07:18 -04:00
Matt Nadareski
02e6f0e85f Port tests from MPF 2024-07-16 13:08:56 -04:00
Deterous
9dfec64e4e Bump version (#6) 2024-07-09 11:09:28 -04:00
Deterous
4c12693a33 Update Sharp X68000 shortname to x68k (#5) 2024-07-08 22:35:28 -04:00
Deterous
acea06c05f Update Triforce shortname to trf (#4) 2024-07-08 21:43:40 -04:00
Matt Nadareski
067c5cfbbc Move project to subfolder 2024-06-27 16:49:18 -04:00
Matt Nadareski
ff04b8ec6f Fix helper classes 2024-06-27 09:43:01 -04:00
Matt Nadareski
e09c895cf1 Add retry count constructor to client 2024-06-27 09:34:58 -04:00
Matt Nadareski
1b68253089 Remove now-redundant classes 2024-06-27 01:41:17 -04:00
Matt Nadareski
5b9a2d6b74 CookieWebClient should be internal 2024-06-27 01:33:15 -04:00
Matt Nadareski
d5c7ef74d4 Reduce code complexity in new client (nw) 2024-06-27 01:30:30 -04:00
Matt Nadareski
f60cd2985d Add unified Client (nw) 2024-06-27 01:12:57 -04:00
Matt Nadareski
7fcb6aa949 Create separate cookie web client 2024-06-27 00:34:15 -04:00
Matt Nadareski
f22b1b036b Minor fixes to organization, add sleep 2024-06-27 00:18:33 -04:00
Matt Nadareski
ec045448c5 Make retry count into a field, sync clients 2024-06-27 00:05:53 -04:00
Matt Nadareski
93873ea204 Add dumping parameters field 2024-06-26 10:02:28 -04:00
Matt Nadareski
341edc56bd Bump version 2024-05-15 17:05:15 -04:00
Matt Nadareski
da4bdac6e2 Update Models 2024-05-15 17:04:55 -04:00
Matt Nadareski
7fe595ee0a Fix ordering of site codes 2024-05-15 16:44:07 -04:00
Matt Nadareski
8a9f62f5a4 Add new tags to formatting list 2024-05-15 16:40:01 -04:00
Matt Nadareski
dbb7cf7ef9 Add missing comment fields to listing 2024-05-15 16:38:17 -04:00
Matt Nadareski
d591ee1550 Bump version 2024-05-15 16:16:37 -04:00
Matt Nadareski
9153c931a5 Include comment/content markers 2024-05-15 16:16:07 -04:00
Matt Nadareski
99ebd1f3ac Add extensions for comment and content codes 2024-05-15 16:09:57 -04:00
Matt Nadareski
844f5506f5 Add Applications pseudotag 2024-05-15 16:09:19 -04:00
Matt Nadareski
4be01b25ab Add EidosID pseudotag 2024-05-15 16:07:00 -04:00
Matt Nadareski
22e2e73f65 Add BethesdaID pseudotag 2024-05-15 16:06:06 -04:00
Matt Nadareski
831ea86d4f Add CompatibleOS pseudotag 2024-05-15 16:04:47 -04:00
Matt Nadareski
4475dba94c Bump version 2024-03-15 12:48:36 -04:00
Matt Nadareski
63a758c005 Fix missing fields from output 2024-03-15 12:48:18 -04:00
Matt Nadareski
7e81f723ca Bump version 2024-03-05 12:30:59 -05:00
Matt Nadareski
f69c7e6bb2 Add new dumping info fields 2024-03-02 19:41:19 -05:00
Matt Nadareski
9d2803a6df Add nuget package and PR workflows 2024-02-27 19:16:31 -05:00
Matt Nadareski
23f1ceac99 Bump version 2024-02-26 20:15:29 -05:00
Deterous
55c621b615 Fill PS3 Disc Key from redump (#3)
* Fill PS3 Disc Key from redump

* Deny NULL rather than only allow hex keys

* Update Builder.cs

Co-authored-by: Matt Nadareski <mnadareski@outlook.com>

---------

Co-authored-by: Matt Nadareski <mnadareski@outlook.com>
2024-02-26 17:04:02 -08:00
Matt Nadareski
29cce4d4b9 Bump version 2024-02-02 11:20:18 -05:00
Matt Nadareski
1f37ece28d Fix typo 2024-02-02 11:20:08 -05:00
Matt Nadareski
6acac60376 Add CD support for GameWave 2024-02-02 11:18:44 -05:00
Matt Nadareski
3f9b09b943 Bump version 2023-12-05 11:04:02 -05:00
Matt Nadareski
2b1ee393d4 Fix some formatting issues 2023-12-04 14:33:56 -05:00
Matt Nadareski
887c443a17 Correct log statement for universal hash 2023-12-04 14:15:36 -05:00
Matt Nadareski
5ad9bebf88 Bump version 2023-11-22 23:13:09 -05:00
Matt Nadareski
8c8e624ac2 Update compatiblity libraries 2023-11-22 23:12:44 -05:00
Matt Nadareski
d7b2d13d8b Bump version 2023-11-22 10:12:09 -05:00
Matt Nadareski
8c8ae49a3b Support .NET Framework 2.0 2023-11-22 10:11:56 -05:00
Matt Nadareski
b77eec5063 Use different set of libraries 2023-11-22 09:44:04 -05:00
Matt Nadareski
5d992566b5 Update SabreTools.Models 2023-11-22 09:42:15 -05:00
Matt Nadareski
46bde960f3 Perform some prep for .NET Framework 3.5 2023-11-20 21:45:27 -05:00
Matt Nadareski
ba2c3a592f Support async in .NET Framework 4.0 2023-11-20 21:18:36 -05:00
Matt Nadareski
f0bca60d63 Bump version 2023-11-16 12:21:19 -05:00
Matt Nadareski
a81dc6d680 Rename Writer to Formatter 2023-11-16 09:36:00 -05:00
Matt Nadareski
1a6ebfdbf0 Add more logic from MPF, update syntax 2023-11-16 00:59:40 -05:00
53 changed files with 10121 additions and 3377 deletions

40
.github/workflows/build_and_test.yml vendored Normal file
View File

@@ -0,0 +1,40 @@
name: Build and Test
on:
push:
branches: [ "main" ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: |
6.0.x
8.0.x
9.0.x
- name: Run tests
run: dotnet test
- name: Run publish script
run: ./publish-nix.sh -d
- name: Upload to rolling
uses: ncipollo/release-action@v1.14.0
with:
allowUpdates: True
artifacts: "*.nupkg,*.snupkg"
body: 'Last built commit: ${{ github.sha }}'
name: 'Rolling Release'
prerelease: True
replacesArtifacts: True
tag: "rolling"
updateOnlyUnreleased: True

23
.github/workflows/check_pr.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
name: Build PR
on: [pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: |
6.0.x
8.0.x
9.0.x
- name: Build
run: dotnet build
- name: Run tests
run: dotnet test

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,13 @@
# SabreTools.RedumpLib
[![Build and Test](https://github.com/SabreTools/SabreTools.RedumpLib/actions/workflows/build_and_test.yml/badge.svg)](https://github.com/SabreTools/SabreTools.RedumpLib/actions/workflows/build_and_test.yml)
This library comprises interaction logic for [Redump](http://redump.org/). Because there is no formal API for the site, this library interacts with the site through normal HTTP methods. It includes a fairly comprehensive reference of supported parts of the site, including URLs, page information, and packs.
Find the link to the Nuget package [here](https://www.nuget.org/packages/SabreTools.RedumpLib).
## Releases
For the most recent stable build, download the latest release here: [Releases Page](https://github.com/SabreTools/SabreTools.RedumpLib/releases)
For the latest WIP build here: [Rolling Release](https://github.com/SabreTools/SabreTools.RedumpLib/releases/rolling)

278
RedumpTool/Program.cs Normal file
View File

@@ -0,0 +1,278 @@
using System;
using System.IO;
using SabreTools.RedumpLib;
using SabreTools.RedumpLib.Data;
namespace RedumpTool
{
public class Program
{
static void Main(string[] args)
{
// Show help if nothing is input
if (args == null || args.Length == 0)
{
ShowHelp();
return;
}
// Derive the feature, if possible
Feature feature = DeriveFeature(args[0]);
if (feature == Feature.NONE)
{
ShowHelp();
return;
}
// Create a new Downloader
var downloader = CreateDownloader(feature, args);
if (downloader == null)
{
ShowHelp();
return;
}
// Run the download task
var downloaderTask = downloader.Download();
downloaderTask.Wait();
// Get the downloader task results and print, if necessary
var downloaderResult = downloaderTask.Result;
if (downloaderResult.Count > 0)
{
string processedIds = string.Join(", ", [.. downloaderResult.ConvertAll(i => i.ToString())]);
Console.WriteLine($"Processed IDs: {processedIds}");
}
else if (downloaderResult.Count == 0 && downloader.Feature != Feature.Packs)
{
ShowHelp();
}
}
/// <summary>
/// Derive the feature from the supplied argument
/// </summary>
/// <param name="feature">Possible feature name to derive from</param>
/// <returns>True if the feature was set, false otherwise</returns>
private static Feature DeriveFeature(string feature)
{
return feature.ToLowerInvariant() switch
{
"site" => Feature.Site,
"wip" => Feature.WIP,
"packs" => Feature.Packs,
"user" => Feature.User,
"search" => Feature.Quicksearch,
"query" => Feature.Quicksearch,
_ => Feature.NONE,
};
}
/// <summary>
/// Create a Downloader from a feature and a set of arguments
/// </summary>
/// <param name="feature">Primary feature to use</param>
/// <param name="args">Arguments list to parse</param>
/// <returns>Initialized Downloader on success, null otherwise</returns>
private static Downloader? CreateDownloader(Feature feature, string[] args)
{
// Set temporary internal variables
string? outDir = null;
string? username = null;
string? password = null;
int minimumId = -1;
int maximumId = -1;
string? queryString = null;
bool useSubfolders = false;
bool onlyNew = false;
bool onlyList = false;
bool noSlash = false;
bool force = false;
// Now loop through all of the arguments
try
{
for (int i = 1; i < args.Length; i++)
{
switch (args[i])
{
// Output directory
case "-o":
case "--output":
outDir = args[++i].Trim('"');
break;
// Username
case "-u":
case "--username":
username = args[++i];
break;
// Password
case "-p":
case "--password":
password = args[++i];
break;
// Minimum Redump ID
case "-min":
case "--minimum":
if (!int.TryParse(args[++i], out minimumId))
minimumId = -1;
break;
// Maximum Redump ID
case "-max":
case "--maximum":
if (!int.TryParse(args[++i], out maximumId))
maximumId = -1;
break;
// Quicksearch text
case "-q":
case "--query":
queryString = args[++i];
break;
// Packs subfolders
case "-s":
case "--subfolders":
useSubfolders = true;
break;
// Use last modified
case "-n":
case "--onlynew":
onlyNew = true;
break;
// List instead of download
case "-l":
case "--list":
onlyList = true;
break;
// Don't filter forward slashes from queries
case "-ns":
case "--noslash":
noSlash = true;
break;
// Force continuation
case "-f":
case "--force":
force = true;
break;
// Everything else
default:
Console.WriteLine($"Unrecognized flag: {args[i]}");
break;
}
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
// Output directory validation
if (!onlyList && string.IsNullOrEmpty(outDir))
{
Console.WriteLine("No output directory set!");
return null;
}
else if (!onlyList && !string.IsNullOrEmpty(outDir))
{
// Create the output directory, if it doesn't exist
try
{
if (!Directory.Exists(outDir))
Directory.CreateDirectory(outDir);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
// Range verification
if (feature == Feature.Site && !onlyNew && (minimumId < 0 || maximumId < 0))
{
Console.WriteLine("Please enter a valid range of Redump IDs");
return null;
}
else if (feature == Feature.WIP && !onlyNew && (minimumId < 0 || maximumId < 0))
{
Console.WriteLine("Please enter a valid range of WIP IDs");
return null;
}
// Query verification (and cleanup)
if (feature == Feature.Quicksearch && string.IsNullOrEmpty(queryString))
{
Console.WriteLine("Please enter a query for searching");
return null;
}
// Create and return the downloader
var downloader = new Downloader()
{
Feature = feature,
MinimumId = minimumId,
MaximumId = maximumId,
QueryString = queryString,
OutDir = outDir,
UseSubfolders = useSubfolders,
OnlyNew = onlyNew,
OnlyList = onlyList,
Force = force,
NoSlash = noSlash,
Username = username,
Password = password,
};
return downloader;
}
/// <summary>
/// Show the commandline help for the program
/// </summary>
private static void ShowHelp()
{
Console.WriteLine("RedumpTool - A Redump.org recovery tool");
Console.WriteLine();
Console.WriteLine("Usage: RedumpTool <feature> [options]");
Console.WriteLine();
Console.WriteLine("Common Options");
Console.WriteLine(" -o <folder>, --output <folder> - Set the base output directory");
Console.WriteLine(" -u <username>, --username <username> - Redump username");
Console.WriteLine(" -p <pass>, --password <pass> - Redump password");
Console.WriteLine();
Console.WriteLine("site - Download pages and related files from the main site");
Console.WriteLine(" -min <MinId>, --minimum <MinId> - Lower bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -max <MaxId>, --maximum <MaxId> - Upper bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -n, --onlynew - Use the last modified view (cannot be used with min and max)");
Console.WriteLine(" -f, --force - Force continuing downloads until user cancels (used with only new)");
Console.WriteLine();
Console.WriteLine("wip - Download pages and related files from the WIP list");
Console.WriteLine(" -min <MinId>, --minimum <MinId> - Lower bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -max <MaxId>, --maximum <MaxId> - Upper bound for page numbers (cannot be used with only new)");
Console.WriteLine(" -n, --onlynew - Use the last modified view (cannot be used with min and max)");
Console.WriteLine();
Console.WriteLine("packs - Download available packs");
Console.WriteLine(" -s, --subfolders - Download packs to named subfolders");
Console.WriteLine();
Console.WriteLine("user - Download pages and related files for a particular user");
Console.WriteLine(" -n, --onlynew - Use the last modified view instead of sequential parsing");
Console.WriteLine(" -l, --list - Only list the page IDs for that user");
Console.WriteLine();
Console.WriteLine("query - Download pages and related files from a Redump-compatible query");
Console.WriteLine(" -q, --query - Redump-compatible query to run");
Console.WriteLine(" -l, --list - Only list the page IDs for that query");
Console.WriteLine(" -ns, --noslash - Don't replace forward slashes with '-'");
Console.WriteLine();
}
}
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
<OutputType>Exe</OutputType>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<IncludeSourceRevisionInInformationalVersion>false</IncludeSourceRevisionInInformationalVersion>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.6.5</Version>
</PropertyGroup>
<!-- Support All Frameworks -->
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net4`))">
<RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`netcoreapp`)) OR $(TargetFramework.StartsWith(`net5`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(TargetFramework.StartsWith(`net6`)) OR $(TargetFramework.StartsWith(`net7`)) OR $(TargetFramework.StartsWith(`net8`)) OR $(TargetFramework.StartsWith(`net9`))">
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
</PropertyGroup>
<PropertyGroup Condition="$(RuntimeIdentifier.StartsWith(`osx-arm`))">
<TargetFrameworks>net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,191 @@
using System;
using System.IO;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class BuilderTests
{
[Theory]
[InlineData("success_complete.json", false)]
[InlineData("success_invalid.json", false)] // Fully invalid returns a default object
[InlineData("success_partial.json", false)]
[InlineData("fail_invalid.json", true)]
public void CreateFromFileTest(string filename, bool expectNull)
{
// Get the full path to the test file
string path = Path.Combine(Environment.CurrentDirectory, "TestData", filename);
// Try to create the submission info from file
var si = Builder.CreateFromFile(path);
// Check for an expected result
Assert.Equal(expectNull, si == null);
}
[Fact]
public void EnsureAllSections_Null_Filled()
{
SubmissionInfo? si = null;
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void EnsureAllSections_Empty_Filled()
{
SubmissionInfo? si = new SubmissionInfo
{
CommonDiscInfo = null,
VersionAndEditions = null,
EDC = null,
ParentCloneRelationship = null,
Extras = null,
CopyProtection = null,
DumpersAndStatus = null,
TracksAndWriteOffsets = null,
SizeAndChecksums = null,
DumpingInfo = null,
Artifacts = null,
};
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void EnsureAllSections_Filled_Filled()
{
SubmissionInfo? si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection
{
CommentsSpecialFields = [],
ContentsSpecialFields = [],
},
VersionAndEditions = new VersionAndEditionsSection(),
EDC = new EDCSection(),
ParentCloneRelationship = new ParentCloneRelationshipSection(),
Extras = new ExtrasSection(),
CopyProtection = new CopyProtectionSection(),
DumpersAndStatus = new DumpersAndStatusSection(),
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection(),
SizeAndChecksums = new SizeAndChecksumsSection(),
DumpingInfo = new DumpingInfoSection(),
Artifacts = [],
};
var actual = Builder.EnsureAllSections(si);
Assert.NotNull(actual);
Assert.NotNull(actual.CommonDiscInfo);
Assert.NotNull(actual.CommonDiscInfo.CommentsSpecialFields);
Assert.NotNull(actual.CommonDiscInfo.ContentsSpecialFields);
Assert.NotNull(actual.VersionAndEditions);
Assert.NotNull(actual.EDC);
Assert.NotNull(actual.ParentCloneRelationship);
Assert.NotNull(actual.Extras);
Assert.NotNull(actual.CopyProtection);
Assert.NotNull(actual.DumpersAndStatus);
Assert.NotNull(actual.TracksAndWriteOffsets);
Assert.NotNull(actual.SizeAndChecksums);
Assert.NotNull(actual.DumpingInfo);
Assert.NotNull(actual.Artifacts);
}
[Fact]
public void InjectSubmissionInformation_BothNull_Null()
{
SubmissionInfo? si = null;
SubmissionInfo? seed = null;
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.Null(actual);
}
[Fact]
public void InjectSubmissionInformation_ValidInputNullSeed_Valid()
{
SubmissionInfo? si = new SubmissionInfo();
SubmissionInfo? seed = null;
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.NotNull(actual);
}
[Fact]
public void InjectSubmissionInformation_BothValid_Valid()
{
SubmissionInfo? si = new SubmissionInfo();
SubmissionInfo? seed = new SubmissionInfo();
var actual = Builder.InjectSubmissionInformation(si, seed);
Assert.NotNull(actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_EmptyString_Empty()
{
string original = string.Empty;
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Empty(actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_NoReplace_Identical()
{
string original = "<p>Nothing here will be replaced</p>";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(original, actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_StandardCode_Replaced()
{
string original = "<b>ISBN</b>: 000-0-00-000000-0";
string expected = "[T:ISBN] 000-0-00-000000-0";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(expected, actual);
}
[Fact]
public void ReplaceHtmlWithSiteCodes_OutdatedCode_Replaced()
{
string original = "XMID: AB12345C";
string expected = "<b>XMID</b>: AB12345C";
string actual = Builder.ReplaceHtmlWithSiteCodes(original);
Assert.Equal(expected, actual);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,8 @@
namespace SabreTools.RedumpLib.Test
{
public class DownloaderTests
{
// Tests here will require installing and using the Moq library
// to mock the RedumpClient type.
}
}

View File

@@ -0,0 +1,877 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class FormatterTests
{
#region ProcessSpecialFields
// TODO: Write tests for ProcessSpecialFields
#endregion
#region CommonDiscInfoSection
// TODO: Write tests for FormatOutputData(CommonDiscInfoSection)
[Fact]
public void FormatOutputData_CDINullSACNullTAWONull_Minimal()
{
string expected = "Common Disc Info:\n\tRegion: SPACE! (CHANGE THIS)\n\tLanguages: ADD LANGUAGES HERE (ONLY IF YOU TESTED)\n\n\tRingcode Information:\n\n\n";
var builder = new StringBuilder();
CommonDiscInfoSection? section = null;
SizeAndChecksumsSection? sac = null;
TracksAndWriteOffsetsSection? tawo = null;
int? fullyMatchedID = null;
List<int>? partiallyMatchedIDs = null;
Formatter.FormatOutputData(builder,
section,
sac,
tawo,
fullyMatchedID,
partiallyMatchedIDs);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region VersionAndEditionsSection
[Fact]
public void FormatOutputData_VAENull_Minimal()
{
string expected = "Version and Editions:\n";
var builder = new StringBuilder();
VersionAndEditionsSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_VAE_Formatted()
{
string expected = "Version and Editions:\n\tVersion: XXXXXX\n\tEdition/Release: XXXXXX\n";
var builder = new StringBuilder();
VersionAndEditionsSection? section = new VersionAndEditionsSection
{
Version = "XXXXXX",
OtherEditions = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region EDCSection
[Fact]
public void FormatOutputData_EDCNull_Minimal()
{
string expected = "EDC:\n";
var builder = new StringBuilder();
EDCSection? section = null;
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_EDCInvalidSystem_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
EDCSection? section = null;
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_EDC_Formatted()
{
string expected = "EDC:\n\tEDC: Yes\n";
var builder = new StringBuilder();
EDCSection? section = new EDCSection { EDC = YesNo.Yes };
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region ExtrasSection
[Fact]
public void FormatOutputData_ExtrasNull_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
ExtrasSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_ExtrasInvalid_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
ExtrasSection? section = new ExtrasSection
{
PVD = null,
PIC = null,
BCA = null,
SecuritySectorRanges = null,
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_Extras_Formatted()
{
string expected = "Extras:\n\tPrimary Volume Descriptor (PVD): XXXXXX\n\tDisc Key: XXXXXX\n\tDisc ID: XXXXXX\n\tPermanent Information & Control (PIC): XXXXXX\n\tHeader: XXXXXX\n\tBCA: XXXXXX\n\tSecurity Sector Ranges: XXXXXX\n";
var builder = new StringBuilder();
ExtrasSection? section = new ExtrasSection
{
PVD = "XXXXXX",
DiscKey = "XXXXXX",
DiscID = "XXXXXX",
PIC = "XXXXXX",
Header = "XXXXXX",
BCA = "XXXXXX",
SecuritySectorRanges = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region CopyProtectionSection
[Fact]
public void FormatOutputData_COPNull_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
CopyProtectionSection? section = null;
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COPInvalid_Empty()
{
string expected = string.Empty;
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
Protection = null,
AntiModchip = null,
LibCrypt = null,
LibCryptData = null,
SecuROMData = null,
};
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COP_Formatted()
{
string expected = "Copy Protection:\n\tCopy Protection: XXXXXX\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n";
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.Yes,
LibCryptData = "XXXXXX",
Protection = "XXXXXX",
SecuROMData = "XXXXXX",
};
RedumpSystem? system = RedumpSystem.IBMPCcompatible;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_COPPSX_Formatted()
{
string expected = "Copy Protection:\n\tAnti-modchip: Yes\n\tLibCrypt: Yes\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n\tCopy Protection: XXXXXX\n\tSubIntention Data (SecuROM/LibCrypt): XXXXXX\n";
var builder = new StringBuilder();
CopyProtectionSection? section = new CopyProtectionSection
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.Yes,
LibCryptData = "XXXXXX",
Protection = "XXXXXX",
SecuROMData = "XXXXXX",
};
RedumpSystem? system = RedumpSystem.SonyPlayStation;
Formatter.FormatOutputData(builder, section, system);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region TracksAndWriteOffsetsSection
[Fact]
public void FormatOutputData_TAWOInvalid_Minimal()
{
string expected = "Tracks and Write Offsets:\n\tDAT:\n\n\n\n\n";
var builder = new StringBuilder();
TracksAndWriteOffsetsSection? section = new TracksAndWriteOffsetsSection();
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_TAWO_Formatted()
{
string expected = "Tracks and Write Offsets:\n\tDAT:\n\nXXXXXX\n\n\n\tCuesheet: XXXXXX\n\tWrite Offset: XXXXXX\n";
var builder = new StringBuilder();
TracksAndWriteOffsetsSection? section = new TracksAndWriteOffsetsSection
{
ClrMameProData = "XXXXXX",
Cuesheet = "XXXXXX",
OtherWriteOffsets = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region SizeAndChecksumsSection
// TODO: Write tests for FormatOutputData(SizeAndChecksumsSection)
#endregion
#region DumpingInfoSection
[Fact]
public void FormatOutputData_DINull_Minimal()
{
string expected = "Dumping Info:\n";
var builder = new StringBuilder();
DumpingInfoSection? section = null;
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
[Fact]
public void FormatOutputData_DI_Formatted()
{
string expected = "Dumping Info:\n\tFrontend Version: XXXXXX\n\tDumping Program: XXXXXX\n\tDate: XXXXXX\n\tParameters: XXXXXX\n\tManufacturer: XXXXXX\n\tModel: XXXXXX\n\tFirmware: XXXXXX\n\tReported Disc Type: XXXXXX\n\tC2 Error Count: XXXXXX\n";
var builder = new StringBuilder();
DumpingInfoSection? section = new DumpingInfoSection
{
FrontendVersion = "XXXXXX",
DumpingProgram = "XXXXXX",
DumpingDate = "XXXXXX",
DumpingParameters = "XXXXXX",
Manufacturer = "XXXXXX",
Model = "XXXXXX",
Firmware = "XXXXXX",
ReportedDiscType = "XXXXXX",
C2ErrorsCount = "XXXXXX",
};
Formatter.FormatOutputData(builder, section);
string actual = builder.ToString();
Assert.Equal(expected, actual);
}
#endregion
#region FormatSiteTag
[Fact]
public void FormatSiteTag_NoValue_Empty()
{
SiteCode code = SiteCode.AlternativeTitle;
string value = string.Empty;
string actual = Formatter.FormatSiteTag(code, value);
Assert.Empty(actual);
}
[Fact]
public void FormatSiteTag_Standard_Formatted()
{
string expected = "[T:ALT] XXXXXX";
SiteCode code = SiteCode.AlternativeTitle;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
[Fact]
public void FormatSiteTag_BooleanTrue_Formatted()
{
string expected = "[T:VCD]";
SiteCode code = SiteCode.VCD;
string value = "True";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
[Fact]
public void FormatSiteTag_BooleanFalse_Empty()
{
SiteCode code = SiteCode.VCD;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Empty(actual);
}
[Fact]
public void FormatSiteTag_Multiline_Formatted()
{
string expected = "[T:X]\nXXXXXX\n";
SiteCode code = SiteCode.Extras;
string value = "XXXXXX";
string actual = Formatter.FormatSiteTag(code, value);
Assert.Equal(expected, actual);
}
#endregion
#region GetFixedMediaType
[Fact]
public void GetFixedMediaType_NullType_Null()
{
MediaType? mediaType = null;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Null(actual);
}
[Fact]
public void GetFixedMediaType_UnformattedType_Formatted()
{
string? expected = "CD-ROM";
MediaType? mediaType = MediaType.CDROM;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_DVD9_Formatted()
{
string? expected = "DVD-ROM-9";
MediaType? mediaType = MediaType.DVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_DVD5_Formatted()
{
string? expected = "DVD-ROM-5";
MediaType? mediaType = MediaType.DVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD128_Formatted()
{
string? expected = "BD-ROM-128";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = 12345;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD100_Formatted()
{
string? expected = "BD-ROM-100";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = 12345;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD66PIC_Formatted()
{
string? expected = "BD-ROM-66";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD66Size_Formatted()
{
string? expected = "BD-ROM-66";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = 53_687_063_713;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD50_Formatted()
{
string? expected = "BD-ROM-50";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD33PIC_Formatted()
{
string? expected = "BD-ROM-33";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD33Size_Formatted()
{
string? expected = "BD-ROM-33";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = 26_843_531_857;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_BD25_Formatted()
{
string? expected = "BD-ROM-25";
MediaType? mediaType = MediaType.BluRay;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_HDDVDDL_Formatted()
{
string? expected = "HD-DVD-ROM-DL";
MediaType? mediaType = MediaType.HDDVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_HDDVDSL_Formatted()
{
string? expected = "HD-DVD-ROM-SL";
MediaType? mediaType = MediaType.HDDVD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_UMDDL_Formatted()
{
string? expected = "UMD-DL";
MediaType? mediaType = MediaType.UMD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = 12345;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
[Fact]
public void GetFixedMediaType_UMDSL_Formatted()
{
string? expected = "UMD-SL";
MediaType? mediaType = MediaType.UMD;
string? picIdentifier = null;
long? size = null;
long? layerbreak = null;
long? layerbreak2 = null;
long? layerbreak3 = null;
string? actual = Formatter.GetFixedMediaType(mediaType,
picIdentifier,
size,
layerbreak,
layerbreak2,
layerbreak3);
Assert.Equal(expected, actual);
}
#endregion
#region OrderCommentTags
[Fact]
public void OrderCommentTags_Empty_Empty()
{
Dictionary<SiteCode, string> tags = [];
var actual = Formatter.OrderCommentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderCommentTags_NoMatch_Empty()
{
var tags = new Dictionary<SiteCode, string>
{
{ SiteCode.Applications, "XXXXXX" },
};
var actual = Formatter.OrderCommentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderCommentTags_All_Ordered()
{
Dictionary<SiteCode, string> tags = [];
foreach (SiteCode code in Enum.GetValues<SiteCode>())
{
tags[code] = "XXXXXX";
}
var actual = Formatter.OrderCommentTags(tags);
Assert.NotEmpty(actual);
var actualCodes = actual.Select(kvp => kvp.Key);
Assert.True(Formatter.OrderedCommentCodes.SequenceEqual(actualCodes));
}
#endregion
#region OrderContentTags
[Fact]
public void OrderContentTags_Empty_Empty()
{
Dictionary<SiteCode, string> tags = [];
var actual = Formatter.OrderContentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderContentTags_NoMatch_Empty()
{
var tags = new Dictionary<SiteCode, string>
{
{ SiteCode.AlternativeTitle, "XXXXXX" },
};
var actual = Formatter.OrderContentTags(tags);
Assert.Empty(actual);
}
[Fact]
public void OrderContentTags_All_Ordered()
{
Dictionary<SiteCode, string> tags = [];
foreach (SiteCode code in Enum.GetValues<SiteCode>())
{
tags[code] = "XXXXXX";
}
var actual = Formatter.OrderContentTags(tags);
Assert.NotEmpty(actual);
var actualCodes = actual.Select(kvp => kvp.Key);
Assert.True(Formatter.OrderedContentCodes.SequenceEqual(actualCodes));
}
#endregion
#region RemoveConsecutiveEmptyLines
[Fact]
public void RemoveConsecutiveEmptyLines_Linux_Removed()
{
string expected = "data\n\nbase";
string newlines = "data\n\n\n\n\n\n\n\n\n\nbase";
string actual = Formatter.RemoveConsecutiveEmptyLines(newlines);
Assert.Equal(expected, actual);
}
[Fact]
public void RemoveConsecutiveEmptyLines_Windows_Removed()
{
string expected = "data\r\n\r\nbase";
string newlines = "data\r\n\r\n\r\n\r\n\r\nbase";
string actual = Formatter.RemoveConsecutiveEmptyLines(newlines);
Assert.Equal(expected, actual);
}
#endregion
}
}

View File

@@ -0,0 +1,47 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net6.0;net8.0;net9.0</TargetFrameworks>
<CheckEolTargetFramework>false</CheckEolTargetFramework>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SuppressTfmSupportBuildWarnings>true</SuppressTfmSupportBuildWarnings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SabreTools.RedumpLib\SabreTools.RedumpLib.csproj" />
</ItemGroup>
<ItemGroup>
<None Remove="TestData\*" />
</ItemGroup>
<ItemGroup>
<Content Include="TestData\*">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeCoverage" Version="17.13.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.13.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.abstractions" Version="2.0.3" />
<PackageReference Include="xunit.analyzers" Version="1.20.0" />
<PackageReference Include="xunit.assert" Version="2.9.3" />
<PackageReference Include="xunit.core" Version="2.9.3" />
<PackageReference Include="xunit.extensibility.core" Version="2.9.3" />
<PackageReference Include="xunit.extensibility.execution" Version="2.9.3" />
<PackageReference Include="xunit.runner.console" Version="2.9.3">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
</Project>

View File

@@ -0,0 +1,181 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class SubmissionInfoTests
{
[Fact]
public void EmptySerializationTest()
{
var submissionInfo = new SubmissionInfo();
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
[Fact]
public void PartialSerializationTest()
{
var submissionInfo = new SubmissionInfo()
{
CommonDiscInfo = new CommonDiscInfoSection(),
VersionAndEditions = new VersionAndEditionsSection(),
EDC = new EDCSection(),
ParentCloneRelationship = new ParentCloneRelationshipSection(),
Extras = new ExtrasSection(),
CopyProtection = new CopyProtectionSection(),
DumpersAndStatus = new DumpersAndStatusSection(),
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection(),
SizeAndChecksums = new SizeAndChecksumsSection(),
};
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
[Fact]
public void FullSerializationTest()
{
var submissionInfo = new SubmissionInfo()
{
SchemaVersion = 1,
FullyMatchedID = 3,
PartiallyMatchedIDs = new List<int> { 0, 1, 2, 3 },
Added = DateTime.UtcNow,
LastModified = DateTime.UtcNow,
CommonDiscInfo = new CommonDiscInfoSection()
{
System = RedumpSystem.IBMPCcompatible,
Media = DiscType.CD,
Title = "Game Title",
ForeignTitleNonLatin = "Foreign Game Title",
DiscNumberLetter = "1",
DiscTitle = "Install Disc",
Category = DiscCategory.Games,
Region = Region.World,
Languages = new Language?[] { Language.English, Language.Spanish, Language.French },
LanguageSelection = new LanguageSelection?[] { LanguageSelection.BiosSettings },
Serial = "Disc Serial",
Layer0MasteringRing = "L0 Mastering Ring",
Layer0MasteringSID = "L0 Mastering SID",
Layer0ToolstampMasteringCode = "L0 Toolstamp",
Layer0MouldSID = "L0 Mould SID",
Layer0AdditionalMould = "L0 Additional Mould",
Layer1MasteringRing = "L1 Mastering Ring",
Layer1MasteringSID = "L1 Mastering SID",
Layer1ToolstampMasteringCode = "L1 Toolstamp",
Layer1MouldSID = "L1 Mould SID",
Layer1AdditionalMould = "L1 Additional Mould",
Layer2MasteringRing = "L2 Mastering Ring",
Layer2MasteringSID = "L2 Mastering SID",
Layer2ToolstampMasteringCode = "L2 Toolstamp",
Layer3MasteringRing = "L3 Mastering Ring",
Layer3MasteringSID = "L3 Mastering SID",
Layer3ToolstampMasteringCode = "L3 Toolstamp",
RingWriteOffset = "+12",
Barcode = "UPC Barcode",
EXEDateBuildDate = "19xx-xx-xx",
ErrorsCount = "0",
Comments = "Comment data line 1\r\nComment data line 2",
CommentsSpecialFields = new Dictionary<SiteCode, string>()
{
[SiteCode.ISBN] = "ISBN",
},
Contents = "Special contents 1\r\nSpecial contents 2",
ContentsSpecialFields = new Dictionary<SiteCode, string>()
{
[SiteCode.PlayableDemos] = "Game Demo 1",
},
},
VersionAndEditions = new VersionAndEditionsSection()
{
Version = "Original",
VersionDatfile = "Alt",
CommonEditions = new string[] { "Taikenban" },
OtherEditions = "Rerelease",
},
EDC = new EDCSection()
{
EDC = YesNo.Yes,
},
ParentCloneRelationship = new ParentCloneRelationshipSection()
{
ParentID = "12345",
RegionalParent = false,
},
Extras = new ExtrasSection()
{
PVD = "PVD",
DiscKey = "Disc key",
DiscID = "Disc ID",
PIC = "PIC",
Header = "Header",
BCA = "BCA",
SecuritySectorRanges = "SSv1 Ranges",
},
CopyProtection = new CopyProtectionSection()
{
AntiModchip = YesNo.Yes,
LibCrypt = YesNo.No,
LibCryptData = "LibCrypt data",
Protection = "List of protections",
SecuROMData = "SecuROM data",
},
DumpersAndStatus = new DumpersAndStatusSection()
{
Status = DumpStatus.TwoOrMoreGreen,
Dumpers = new string[] { "Dumper1", "Dumper2" },
OtherDumpers = "Dumper3",
},
TracksAndWriteOffsets = new TracksAndWriteOffsetsSection()
{
ClrMameProData = "Datfile",
Cuesheet = "Cuesheet",
CommonWriteOffsets = new int[] { 0, 12, -12 },
OtherWriteOffsets = "-2",
},
SizeAndChecksums = new SizeAndChecksumsSection()
{
Layerbreak = 0,
Layerbreak2 = 1,
Layerbreak3 = 2,
Size = 12345,
CRC32 = "CRC32",
MD5 = "MD5",
SHA1 = "SHA1",
},
DumpingInfo = new DumpingInfoSection()
{
DumpingProgram = "DiscImageCreator 20500101",
DumpingDate = DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss"),
DumpingParameters = "cd dvd bd sacd fd hdd",
Manufacturer = "ATAPI",
Model = "Optical Drive",
Firmware = "1.23",
ReportedDiscType = "CD-R",
},
Artifacts = new Dictionary<string, string>()
{
["Sample Artifact"] = "Sample Data",
},
};
string json = JsonConvert.SerializeObject(submissionInfo, Formatting.Indented);
Assert.NotNull(json);
}
}
}

View File

@@ -0,0 +1 @@
This isn't even JSON, I lied.

View File

@@ -0,0 +1,96 @@
{
"schema_version": 3,
"common_disc_info":
{
"d_system": "ajcd",
"d_media": "cd",
"d_title": "Test Title",
"d_title_foreign": "Foreign Title",
"d_number": "1",
"d_label": "Install",
"d_category": "Games",
"d_region": "U",
"d_languages":
[
"en",
"fr",
"es"
],
"d_languages_selection": [],
"d_serial": "Serial",
"d_ring_0_ma1": "Ringcode 0 Layer 0",
"d_ring_0_ma1_sid": "SID 0 Layer 0",
"d_ring_0_ts1": "Toolstamp 0 Layer 0",
"d_ring_0_mo1_sid": "Mould SID 0 Layer 0",
"d_ring_0_mo1": "Additional Mould 0 Layer 0",
"d_ring_0_ma2": "Ringcode 0 Layer 1",
"d_ring_0_ma2_sid": "SID 0 Layer 1",
"d_ring_0_ts2": "Toolstamp 0 Layer 1",
"d_ring_0_mo2_sid": "Mould SID 0 Layer 1",
"d_ring_0_mo2": "Additional Mould 0 Layer 1",
"d_ring_0_ma3": "Ringcode 0 Layer 2",
"d_ring_0_ma3_sid": "SID 0 Layer 2",
"d_ring_0_ts3": "Toolstamp 0 Layer 2",
"d_ring_0_ma4": "Ringcode 0 Layer 3",
"d_ring_0_ma4_sid": "SID 0 Layer 2",
"d_ring_0_ts4": "Toolstamp 0 Layer 2",
"d_ring_0_offsets": "-22",
"d_ring_0_0_value": "-21",
"d_barcode": "0 12345 67890 1",
"d_date": "1980-01-01",
"d_errors": "0",
"d_comments": "This is a comment\nwith a newline",
"d_contents": "These are contents, sorry"
},
"versions_and_editions":
{
"d_version": "1.0.0.0",
"d_version_datfile": "1.00",
"d_editions_text": "Demo"
},
"edc":
{
"d_edc": false
},
"parent_clone_relationship":
{
"d_parent_id": "12345",
"d_is_regional_parent": false
},
"extras":
{
"d_pvd": "Pretend\nthis\nis\na\nPVD",
"d_d1_key": "Disc key",
"d_d2_key": "Disc ID",
"d_pic_data": "Pretend\nthis\nis\na\nPIC",
"d_header": "Pretend\nthis\nis\na\nHeader",
"d_bca": "Pretend\nthis\nis\na\nBCA",
"d_ssranges": "Pretend\nthis\nis\na\nsecurity_range"
},
"copy_protection":
{
"d_protection_a": false,
"d_protection_1": false,
"d_libcrypt": "Definitely\nLibCrypt\nData",
"d_protection": "Super easy to find protection",
"d_securom": "Definitely\nSecuROM\nData"
},
"tracks_and_write_offsets":
{
"d_tracks": "Hash data",
"d_cue": "Real cuesheet",
"d_offset_text": "-22"
},
"size_and_checksums":
{
"d_layerbreak": 1,
"d_layerbreak_2": 2,
"d_layerbreak_3": 3,
"d_pic_identifier": "Pretend\nthis\nis\na\nPIC",
"d_size": 123456,
"d_crc32": "cbf43926",
"d_md5": "d41d8cd98f00b204e9800998ecf8427e",
"d_sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709"
}
}

View File

@@ -0,0 +1,4 @@
{
"invalid_key": "invalid_value",
"invalid_x": 12345
}

View File

@@ -0,0 +1,7 @@
{
"schema_version": 3,
"common_disc_info":
{
"d_title": "Test Title"
}
}

View File

@@ -0,0 +1,304 @@
using SabreTools.RedumpLib.Data;
using Xunit;
namespace SabreTools.RedumpLib.Test
{
public class ValidatorTests
{
// Most tests here will require installing and using the Moq library
// to mock the RedumpClient type.
[Fact]
public void NormalizeDiscType_InvalidMedia_Untouched()
{
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = null }
};
Validator.NormalizeDiscType(si);
Assert.Null(si.CommonDiscInfo.Media);
}
[Fact]
public void NormalizeDiscType_InvalidSizeChecksums_Untouched()
{
DiscType expected = DiscType.CD;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = DiscType.CD },
SizeAndChecksums = null,
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Fact]
public void NormalizeDiscType_UnformattedType_Fixed()
{
DiscType expected = DiscType.CD;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = DiscType.CD },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.DVD5)]
[InlineData(DiscType.DVD9)]
public void NormalizeDiscType_DVD9_Fixed(DiscType type)
{
DiscType expected = DiscType.DVD9;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.DVD5)]
[InlineData(DiscType.DVD9)]
public void NormalizeDiscType_DVD5_Fixed(DiscType type)
{
DiscType expected = DiscType.DVD5;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD128_Fixed(DiscType type)
{
DiscType expected = DiscType.BD128;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak3 = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD100_Fixed(DiscType type)
{
DiscType expected = DiscType.BD100;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak2 = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD66PIC_Fixed(DiscType type)
{
DiscType expected = DiscType.BD66;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Layerbreak = 12345,
PICIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD66Size_Fixed(DiscType type)
{
DiscType expected = DiscType.BD66;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Layerbreak = 12345,
Size = 50_050_629_633,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD50_Fixed(DiscType type)
{
DiscType expected = DiscType.BD50;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD33PIC_Fixed(DiscType type)
{
DiscType expected = DiscType.BD33;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
PICIdentifier = Models.PIC.Constants.DiscTypeIdentifierROMUltra,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD33Size_Fixed(DiscType type)
{
DiscType expected = DiscType.BD33;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection
{
Size = 25_025_314_817,
},
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.BD25)]
[InlineData(DiscType.BD33)]
[InlineData(DiscType.BD50)]
[InlineData(DiscType.BD66)]
[InlineData(DiscType.BD100)]
[InlineData(DiscType.BD128)]
public void NormalizeDiscType_BD25_Fixed(DiscType type)
{
DiscType expected = DiscType.BD25;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.UMDSL)]
[InlineData(DiscType.UMDDL)]
public void NormalizeDiscType_UMDDL_Fixed(DiscType type)
{
DiscType expected = DiscType.UMDDL;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection { Layerbreak = 12345 },
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
[Theory]
[InlineData(DiscType.UMDSL)]
[InlineData(DiscType.UMDDL)]
public void NormalizeDiscType_UMDSL_Fixed(DiscType type)
{
DiscType expected = DiscType.UMDSL;
SubmissionInfo si = new SubmissionInfo
{
CommonDiscInfo = new CommonDiscInfoSection { Media = type },
SizeAndChecksums = new SizeAndChecksumsSection(),
};
Validator.NormalizeDiscType(si);
Assert.Equal(expected, si.CommonDiscInfo.Media);
}
}
}

View File

@@ -1,37 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0</TargetFrameworks>
<RuntimeIdentifiers>win-x86;win-x64;win-arm64;linux-x64;linux-arm64;osx-x64;osx-arm64</RuntimeIdentifiers>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.2.0</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Code to interact with redump.org</Description>
<Copyright>Copyright (c) Matt Nadareski 2020-2023</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.RedumpLib</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>web client redump</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<ItemGroup>
<None Include="README.md" Pack="true" PackagePath=""/>
</ItemGroup>
<!-- Support for old .NET versions -->
<ItemGroup Condition="$(TargetFramework.StartsWith(`net4`))">
<PackageReference Include="System.ValueTuple" Version="4.5.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
</ItemGroup>
</Project>

View File

@@ -3,7 +3,11 @@ Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.0.31903.59
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib", "SabreTools.RedumpLib.csproj", "{235D3A36-CA69-4348-9EC4-649B27ACFBB8}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib", "SabreTools.RedumpLib\SabreTools.RedumpLib.csproj", "{235D3A36-CA69-4348-9EC4-649B27ACFBB8}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SabreTools.RedumpLib.Test", "SabreTools.RedumpLib.Test\SabreTools.RedumpLib.Test.csproj", "{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RedumpTool", "RedumpTool\RedumpTool.csproj", "{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -18,5 +22,13 @@ Global
{235D3A36-CA69-4348-9EC4-649B27ACFBB8}.Debug|Any CPU.Build.0 = Debug|Any CPU
{235D3A36-CA69-4348-9EC4-649B27ACFBB8}.Release|Any CPU.ActiveCfg = Release|Any CPU
{235D3A36-CA69-4348-9EC4-649B27ACFBB8}.Release|Any CPU.Build.0 = Release|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Debug|Any CPU.Build.0 = Debug|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Release|Any CPU.ActiveCfg = Release|Any CPU
{63519DEA-0C3D-4F0E-95EB-E9B6E1D55378}.Release|Any CPU.Build.0 = Release|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Debug|Any CPU.Build.0 = Debug|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Release|Any CPU.ActiveCfg = Release|Any CPU
{556F7AA0-00D7-4D5B-96EB-D436FEFD9810}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

View File

@@ -1,5 +1,4 @@
using System;
using System.Linq;
namespace SabreTools.RedumpLib.Attributes
{
@@ -23,26 +22,26 @@ namespace SabreTools.RedumpLib.Attributes
// If the value returns a null on ToString, just return null
string? valueStr = value?.ToString();
if (string.IsNullOrWhiteSpace(valueStr))
if (string.IsNullOrEmpty(valueStr))
return null;
// Get the member info array
var memberInfos = enumType?.GetMember(valueStr);
if (memberInfos == null)
return null;
// Get the enum value info from the array, if possible
var enumValueMemberInfo = memberInfos.FirstOrDefault(m => m.DeclaringType == enumType);
var enumValueMemberInfo = Array.Find(memberInfos, m => m.DeclaringType == enumType);
if (enumValueMemberInfo == null)
return null;
// Try to get the relevant attribute
var attributes = enumValueMemberInfo.GetCustomAttributes(typeof(HumanReadableAttribute), true);
if (attributes == null)
if (attributes == null || attributes.Length == 0)
return null;
// Return the first attribute, if possible
return attributes.FirstOrDefault() as HumanReadableAttribute;
return attributes[0] as HumanReadableAttribute;
}
}
}

View File

@@ -0,0 +1,783 @@
using System;
using System.Collections.Generic;
using System.IO;
#if NET40_OR_GREATER || NETCOREAPP
using System.Net;
#endif
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using System.Xml;
using Newtonsoft.Json;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
namespace SabreTools.RedumpLib
{
public static class Builder
{
#region Creation
/// <summary>
/// Create a SubmissionInfo from a JSON file path
/// </summary>
/// <param name="path">Path to the SubmissionInfo JSON</param>
/// <returns>Filled SubmissionInfo on success, null on error</returns>
public static SubmissionInfo? CreateFromFile(string? path)
{
// If the path is invalid
if (string.IsNullOrEmpty(path))
return null;
// If the file doesn't exist
if (!File.Exists(path))
return null;
// Try to open and deserialize the file
try
{
byte[] data = File.ReadAllBytes(path);
string dataString = Encoding.UTF8.GetString(data);
return JsonConvert.DeserializeObject<SubmissionInfo>(dataString);
}
catch
{
// We don't care what the exception was
return null;
}
}
/// <summary>
/// Create a new SubmissionInfo object from a disc page
/// </summary>
/// <param name="discData">String containing the HTML disc data</param>
/// <returns>Filled SubmissionInfo object on success, null on error</returns>
/// <remarks>Not currently working</remarks>
private static SubmissionInfo? CreateFromID(string discData)
{
var info = new SubmissionInfo()
{
CommonDiscInfo = new CommonDiscInfoSection(),
VersionAndEditions = new VersionAndEditionsSection(),
};
// No disc data means we can't parse it
if (string.IsNullOrEmpty(discData))
return null;
try
{
// Load the current disc page into an XML document
var redumpPage = new XmlDocument() { PreserveWhitespace = true };
redumpPage.LoadXml(discData);
// If the current page isn't valid, we can't parse it
if (!redumpPage.HasChildNodes)
return null;
// Get the body node, if possible
var bodyNode = redumpPage["html"]?["body"];
if (bodyNode == null || !bodyNode.HasChildNodes)
return null;
// Loop through and get the main node, if possible
XmlNode? mainNode = null;
foreach (XmlNode? tempNode in bodyNode.ChildNodes)
{
// Invalid nodes are skipped
if (tempNode == null)
continue;
// We only care about div elements
if (!string.Equals(tempNode.Name, "div", StringComparison.OrdinalIgnoreCase))
continue;
// We only care if it has attributes
if (tempNode.Attributes == null)
continue;
// The main node has a class of "main"
if (string.Equals(tempNode.Attributes["class"]?.Value, "main", StringComparison.OrdinalIgnoreCase))
{
mainNode = tempNode;
break;
}
}
// If the main node is invalid, we can't do anything
if (mainNode == null || !mainNode.HasChildNodes)
return null;
// Try to find elements as we're going
foreach (XmlNode? childNode in mainNode.ChildNodes)
{
// Invalid nodes are skipped
if (childNode == null)
continue;
// The title is the only thing in h1 tags
if (string.Equals(childNode.Name, "h1", StringComparison.OrdinalIgnoreCase))
info.CommonDiscInfo.Title = childNode.InnerText;
// Most things are div elements but can be hard to parse out
else if (!string.Equals(childNode.Name, "div", StringComparison.OrdinalIgnoreCase))
continue;
// Only 2 of the internal divs have classes attached and one is not used here
if (childNode.Attributes != null && string.Equals(childNode.Attributes["class"]?.Value, "game",
StringComparison.OrdinalIgnoreCase))
{
// If we don't have children nodes, skip this one over
if (!childNode.HasChildNodes)
continue;
// The game node contains multiple other elements
foreach (XmlNode? gameNode in childNode.ChildNodes)
{
// Invalid nodes are skipped
if (gameNode == null)
continue;
// Table elements contain multiple other parts of information
if (string.Equals(gameNode.Name, "table", StringComparison.OrdinalIgnoreCase))
{
// All tables have some attribute we can use
if (gameNode.Attributes == null)
continue;
// The gameinfo node contains most of the major information
if (string.Equals(gameNode.Attributes["class"]?.Value, "gameinfo",
StringComparison.OrdinalIgnoreCase))
{
// If we don't have children nodes, skip this one over
if (!gameNode.HasChildNodes)
continue;
// Loop through each of the rows
foreach (XmlNode? gameInfoNode in gameNode.ChildNodes)
{
// Invalid nodes are skipped
if (gameInfoNode == null)
continue;
// If we run into anything not a row, ignore it
if (!string.Equals(gameInfoNode.Name, "tr", StringComparison.OrdinalIgnoreCase))
continue;
// If we don't have the required nodes, ignore it
if (gameInfoNode["th"] == null || gameInfoNode["td"] == null)
continue;
var gameInfoNodeHeader = gameInfoNode["th"];
var gameInfoNodeData = gameInfoNode["td"];
if (gameInfoNodeHeader == null || gameInfoNodeData == null)
{
// No-op for invalid data
}
else if (string.Equals(gameInfoNodeHeader.InnerText, "System", StringComparison.OrdinalIgnoreCase))
{
info.CommonDiscInfo.System = Extensions.ToRedumpSystem(gameInfoNodeData["a"]?.InnerText ?? string.Empty);
}
else if (string.Equals(gameInfoNodeHeader.InnerText, "Media", StringComparison.OrdinalIgnoreCase))
{
info.CommonDiscInfo.Media = Extensions.ToDiscType(gameInfoNodeData.InnerText);
}
else if (string.Equals(gameInfoNodeHeader.InnerText, "Category", StringComparison.OrdinalIgnoreCase))
{
info.CommonDiscInfo.Category = Extensions.ToDiscCategory(gameInfoNodeData.InnerText);
}
else if (string.Equals(gameInfoNodeHeader.InnerText, "Region", StringComparison.OrdinalIgnoreCase))
{
// TODO: COMPLETE
}
else if (string.Equals(gameInfoNodeHeader.InnerText, "Languages", StringComparison.OrdinalIgnoreCase))
{
// TODO: COMPLETE
}
else if (string.Equals(gameInfoNodeHeader.InnerText, "Edition", StringComparison.OrdinalIgnoreCase))
{
info.VersionAndEditions.OtherEditions = gameInfoNodeData.InnerText;
}
else if (string.Equals(gameInfoNodeHeader.InnerText, "Added", StringComparison.OrdinalIgnoreCase))
{
if (DateTime.TryParse(gameInfoNodeData.InnerText, out DateTime added))
info.Added = added;
}
else if (string.Equals(gameInfoNodeHeader.InnerText, "Last modified", StringComparison.OrdinalIgnoreCase))
{
if (DateTime.TryParse(gameInfoNodeData.InnerText, out DateTime lastModified))
info.LastModified = lastModified;
}
}
}
// The gamecomments node contains way more than it implies
if (string.Equals(gameNode.Attributes["class"]?.Value, "gamecomments", StringComparison.OrdinalIgnoreCase))
{
// TODO: COMPLETE
}
// TODO: COMPLETE
}
// The only other supported elements are divs
else if (!string.Equals(gameNode.Name, "div", StringComparison.OrdinalIgnoreCase))
{
continue;
}
// Check the div for dumper info
// TODO: COMPLETE
}
}
// Figure out what the div contains, if possible
// TODO: COMPLETE
}
}
catch
{
return null;
}
return info;
}
/// <summary>
/// Fill out an existing SubmissionInfo object based on a disc page
/// </summary>
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="includeAllData">True to include all pullable information, false to do bare minimum</param>
public async static Task<bool> FillFromId(RedumpClient rc, SubmissionInfo info, int id, bool includeAllData)
{
// Ensure that required sections exist
info = EnsureAllSections(info);
var discData = await rc.DownloadSingleSiteID(id);
if (string.IsNullOrEmpty(discData))
return false;
// Title, Disc Number/Letter, Disc Title
var match = Constants.TitleRegex.Match(discData);
if (match.Success)
{
string? title = WebUtility.HtmlDecode(match.Groups[1].Value);
// If we have parenthesis, title is everything before the first one
int firstParenLocation = title?.IndexOf(" (") ?? -1;
if (title != null && firstParenLocation >= 0)
{
info.CommonDiscInfo!.Title = title.Substring(0, firstParenLocation);
var submatches = Constants.DiscNumberLetterRegex.Matches(title);
foreach (Match? submatch in submatches)
{
if (submatch == null)
continue;
var submatchValue = submatch.Groups[1].Value;
// Disc number or letter
if (submatchValue.StartsWith("Disc"))
info.CommonDiscInfo.DiscNumberLetter = submatchValue.Remove(0, "Disc ".Length);
// Issue number
else if (ulong.TryParse(submatchValue, out _))
info.CommonDiscInfo.Title += $" ({submatchValue})";
// Disc title
else
info.CommonDiscInfo.DiscTitle = submatchValue;
}
}
// Otherwise, leave the title as-is
else
{
info.CommonDiscInfo!.Title = title;
}
}
// Foreign Title
match = Constants.ForeignTitleRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo!.ForeignTitleNonLatin = WebUtility.HtmlDecode(match.Groups[1].Value);
// Category
match = Constants.CategoryRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo!.Category = Extensions.ToDiscCategory(match.Groups[1].Value);
else
info.CommonDiscInfo!.Category = DiscCategory.Games;
// Region
if (info.CommonDiscInfo.Region == null)
{
match = Constants.RegionRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo.Region = Extensions.ToRegion(match.Groups[1].Value);
}
// Languages
var matches = Constants.LanguagesRegex.Matches(discData);
if (matches.Count > 0)
{
var tempLanguages = new List<Language?>();
foreach (Match? submatch in matches)
{
if (submatch == null)
continue;
var language = Extensions.ToLanguage(submatch.Groups[1].Value);
if (language != null)
tempLanguages.Add(language);
}
info.CommonDiscInfo.Languages = [.. tempLanguages];
}
// Serial
if (includeAllData)
{
// TODO: Re-enable if there's a way of verifying against a disc
//match = Constants.SerialRegex.Match(discData);
//if (match.Success)
// info.CommonDiscInfo.Serial = $"(VERIFY THIS) {WebUtility.HtmlDecode(match.Groups[1].Value)}";
}
// Error count
if (string.IsNullOrEmpty(info.CommonDiscInfo.ErrorsCount))
{
match = Constants.ErrorCountRegex.Match(discData);
if (match.Success)
info.CommonDiscInfo.ErrorsCount = match.Groups[1].Value;
}
// Version
if (info.VersionAndEditions!.Version == null)
{
match = Constants.VersionRegex.Match(discData);
if (match.Success)
info.VersionAndEditions.Version = $"(VERIFY THIS) {WebUtility.HtmlDecode(match.Groups[1].Value)}";
}
// Dumpers
matches = Constants.DumpersRegex.Matches(discData);
if (matches.Count > 0)
{
// Start with any currently listed dumpers
var tempDumpers = new List<string>();
if (info.DumpersAndStatus!.Dumpers != null && info.DumpersAndStatus.Dumpers.Length > 0)
{
foreach (string dumper in info.DumpersAndStatus.Dumpers)
tempDumpers.Add(dumper);
}
foreach (Match? submatch in matches)
{
if (submatch == null)
continue;
string? dumper = WebUtility.HtmlDecode(submatch.Groups[1].Value);
if (dumper != null)
tempDumpers.Add(dumper);
}
info.DumpersAndStatus.Dumpers = [.. tempDumpers];
}
// PS3 DiscKey
if (string.IsNullOrEmpty(info.Extras!.DiscKey))
{
// Validate key is not NULL
match = Constants.PS3DiscKey.Match(discData);
if (match.Success && match.Groups[1].Value != "<span class=\"null\">NULL</span>")
info.Extras.DiscKey = match.Groups[1].Value;
}
// TODO: Unify handling of fields that can include site codes (Comments/Contents)
// Comments
if (includeAllData)
{
match = Constants.CommentsRegex.Match(discData);
if (match.Success)
{
// Process the old comments block
string oldComments = info.CommonDiscInfo.Comments
+ (string.IsNullOrEmpty(info.CommonDiscInfo.Comments) ? string.Empty : "\n")
+ (WebUtility.HtmlDecode(match.Groups[1].Value) ?? string.Empty)
.Replace("\r\n", "\n")
.Replace("<br />\n", "\n")
.Replace("<br />", string.Empty)
.Replace("</div>", string.Empty)
.Replace("[+]", string.Empty)
.ReplaceHtmlWithSiteCodes();
oldComments = Regex.Replace(oldComments, @"<div .*?>", string.Empty, RegexOptions.Compiled);
// Create state variables
bool addToLast = false;
SiteCode? lastSiteCode = null;
string newComments = string.Empty;
// Process the comments block line-by-line
string[] commentsSeparated = oldComments.Split('\n');
for (int i = 0; i < commentsSeparated.Length; i++)
{
string commentLine = commentsSeparated[i].Trim();
// If we have an empty line, we want to treat this as intentional
if (string.IsNullOrEmpty(commentLine))
{
addToLast = false;
lastSiteCode = null;
newComments += $"{commentLine}\n";
continue;
}
// Otherwise, we need to find what tag is in use
bool foundTag = false;
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))
{
// If we have a null site code, just skip
if (siteCode == null)
continue;
// If the line doesn't contain this tag, just skip
var shortName = siteCode.ShortName();
if (shortName == null || !commentLine.Contains(shortName))
continue;
// Mark as having found a tag
foundTag = true;
// Cache the current site code
lastSiteCode = siteCode;
// A subset of tags can be multiline
addToLast = siteCode.IsMultiLine();
// Skip certain site codes because of data issues
switch (siteCode)
{
// Multiple
case SiteCode.InternalSerialName:
case SiteCode.Multisession:
case SiteCode.VolumeLabel:
continue;
// Audio CD
case SiteCode.RingNonZeroDataStart:
case SiteCode.RingPerfectAudioOffset:
case SiteCode.UniversalHash:
continue;
// Microsoft Xbox and Xbox 360
case SiteCode.DMIHash:
case SiteCode.PFIHash:
case SiteCode.SSHash:
case SiteCode.SSVersion:
case SiteCode.XMID:
case SiteCode.XeMID:
continue;
// Microsoft Xbox One and Series X/S
case SiteCode.Filename:
continue;
// Nintendo Gamecube
case SiteCode.InternalName:
continue;
// Protection
case SiteCode.Protection:
continue;
}
// If we don't already have this site code, add it to the dictionary
if (!info.CommonDiscInfo.CommentsSpecialFields!.ContainsKey(siteCode.Value))
info.CommonDiscInfo.CommentsSpecialFields[siteCode.Value] = $"(VERIFY THIS) {commentLine.Replace(shortName, string.Empty).Trim()}";
// Otherwise, append the value to the existing key
else
info.CommonDiscInfo.CommentsSpecialFields[siteCode.Value] += $", {commentLine.Replace(shortName, string.Empty).Trim()}";
break;
}
// If we didn't find a known tag, just add the line, just in case
if (!foundTag)
{
if (addToLast && lastSiteCode != null)
{
if (!string.IsNullOrEmpty(info.CommonDiscInfo.CommentsSpecialFields![lastSiteCode.Value]))
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.CommentsSpecialFields[lastSiteCode.Value] += commentLine;
}
else
{
newComments += $"{commentLine}\n";
}
}
}
// Set the new comments field
info.CommonDiscInfo.Comments = newComments;
}
}
// Contents
if (includeAllData)
{
match = Constants.ContentsRegex.Match(discData);
if (match.Success)
{
// Process the old contents block
string oldContents = info.CommonDiscInfo.Contents
+ (string.IsNullOrEmpty(info.CommonDiscInfo.Contents) ? string.Empty : "\n")
+ (WebUtility.HtmlDecode(match.Groups[1].Value) ?? string.Empty)
.Replace("\r\n", "\n")
.Replace("<br />\n", "\n")
.Replace("<br />", string.Empty)
.Replace("</div>", string.Empty)
.Replace("[+]", string.Empty)
.ReplaceHtmlWithSiteCodes();
oldContents = Regex.Replace(oldContents, @"<div .*?>", string.Empty, RegexOptions.Compiled);
// Create state variables
bool addToLast = false;
SiteCode? lastSiteCode = null;
string newContents = string.Empty;
// Process the contents block line-by-line
string[] contentsSeparated = oldContents.Split('\n');
for (int i = 0; i < contentsSeparated.Length; i++)
{
string contentLine = contentsSeparated[i].Trim();
// If we have an empty line, we want to treat this as intentional
if (string.IsNullOrEmpty(contentLine))
{
addToLast = false;
lastSiteCode = null;
newContents += $"{contentLine}\n";
continue;
}
// Otherwise, we need to find what tag is in use
bool foundTag = false;
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))
{
// If we have a null site code, just skip
if (siteCode == null)
continue;
// If the line doesn't contain this tag, just skip
var shortName = siteCode.ShortName();
if (shortName == null || !contentLine.Contains(shortName))
continue;
// Cache the current site code
lastSiteCode = siteCode;
// If we don't already have this site code, add it to the dictionary
if (!info.CommonDiscInfo.ContentsSpecialFields!.ContainsKey(siteCode.Value))
info.CommonDiscInfo.ContentsSpecialFields[siteCode.Value] = $"(VERIFY THIS) {contentLine.Replace(shortName, string.Empty).Trim()}";
// A subset of tags can be multiline
addToLast = siteCode.IsMultiLine();
// Mark as having found a tag
foundTag = true;
break;
}
// If we didn't find a known tag, just add the line, just in case
if (!foundTag)
{
if (addToLast && lastSiteCode != null)
{
if (!string.IsNullOrEmpty(info.CommonDiscInfo.ContentsSpecialFields![lastSiteCode.Value]))
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += "\n";
info.CommonDiscInfo.ContentsSpecialFields[lastSiteCode.Value] += contentLine;
}
else
{
newContents += $"{contentLine}\n";
}
}
}
// Set the new contents field
info.CommonDiscInfo.Contents = newContents;
}
}
// Added
match = Constants.AddedRegex.Match(discData);
if (match.Success)
{
if (DateTime.TryParse(match.Groups[1].Value, out DateTime added))
info.Added = added;
else
info.Added = null;
}
// Last Modified
match = Constants.LastModifiedRegex.Match(discData);
if (match.Success)
{
if (DateTime.TryParse(match.Groups[1].Value, out DateTime lastModified))
info.LastModified = lastModified;
else
info.LastModified = null;
}
return true;
}
/// <summary>
/// Ensure all required sections in a submission info exist
/// </summary>
/// <param name="info">SubmissionInfo object to verify</param>
public static SubmissionInfo EnsureAllSections(SubmissionInfo? info)
{
// If there's no info, create one
info ??= new SubmissionInfo();
// Ensure all sections
info.CommonDiscInfo ??= new CommonDiscInfoSection();
info.VersionAndEditions ??= new VersionAndEditionsSection();
info.EDC ??= new EDCSection();
info.ParentCloneRelationship ??= new ParentCloneRelationshipSection();
info.Extras ??= new ExtrasSection();
info.CopyProtection ??= new CopyProtectionSection();
info.DumpersAndStatus ??= new DumpersAndStatusSection();
info.TracksAndWriteOffsets ??= new TracksAndWriteOffsetsSection();
info.SizeAndChecksums ??= new SizeAndChecksumsSection();
info.DumpingInfo ??= new DumpingInfoSection();
info.Artifacts ??= [];
// Ensure special dictionaries
info.CommonDiscInfo.CommentsSpecialFields ??= [];
info.CommonDiscInfo.ContentsSpecialFields ??= [];
return info;
}
/// <summary>
/// Inject information from a seed SubmissionInfo into the existing one
/// </summary>
/// <param name="info">Existing submission information</param>
/// <param name="seed">User-supplied submission information</param>
public static SubmissionInfo? InjectSubmissionInformation(SubmissionInfo? info, SubmissionInfo? seed)
{
// If we have any invalid info
if (seed == null)
return info;
// Ensure that required sections exist
info = EnsureAllSections(info);
// Otherwise, inject information as necessary
if (info.CommonDiscInfo != null && seed.CommonDiscInfo != null)
{
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Title)) info.CommonDiscInfo.Title = seed.CommonDiscInfo.Title;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.ForeignTitleNonLatin)) info.CommonDiscInfo.ForeignTitleNonLatin = seed.CommonDiscInfo.ForeignTitleNonLatin;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.DiscNumberLetter)) info.CommonDiscInfo.DiscNumberLetter = seed.CommonDiscInfo.DiscNumberLetter;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.DiscTitle)) info.CommonDiscInfo.DiscTitle = seed.CommonDiscInfo.DiscTitle;
if (seed.CommonDiscInfo.Category != null) info.CommonDiscInfo.Category = seed.CommonDiscInfo.Category;
if (seed.CommonDiscInfo.Region != null) info.CommonDiscInfo.Region = seed.CommonDiscInfo.Region;
if (seed.CommonDiscInfo.Languages != null) info.CommonDiscInfo.Languages = seed.CommonDiscInfo.Languages;
if (seed.CommonDiscInfo.LanguageSelection != null) info.CommonDiscInfo.LanguageSelection = seed.CommonDiscInfo.LanguageSelection;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Serial)) info.CommonDiscInfo.Serial = seed.CommonDiscInfo.Serial;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Barcode)) info.CommonDiscInfo.Barcode = seed.CommonDiscInfo.Barcode;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Comments)) info.CommonDiscInfo.Comments = seed.CommonDiscInfo.Comments;
if (seed.CommonDiscInfo.CommentsSpecialFields != null) info.CommonDiscInfo.CommentsSpecialFields = seed.CommonDiscInfo.CommentsSpecialFields;
if (!string.IsNullOrEmpty(seed.CommonDiscInfo.Contents)) info.CommonDiscInfo.Contents = seed.CommonDiscInfo.Contents;
if (seed.CommonDiscInfo.ContentsSpecialFields != null) info.CommonDiscInfo.ContentsSpecialFields = seed.CommonDiscInfo.ContentsSpecialFields;
// Info that always overwrites
info.CommonDiscInfo.Layer0MasteringRing = seed.CommonDiscInfo.Layer0MasteringRing;
info.CommonDiscInfo.Layer0MasteringSID = seed.CommonDiscInfo.Layer0MasteringSID;
info.CommonDiscInfo.Layer0ToolstampMasteringCode = seed.CommonDiscInfo.Layer0ToolstampMasteringCode;
info.CommonDiscInfo.Layer0MouldSID = seed.CommonDiscInfo.Layer0MouldSID;
info.CommonDiscInfo.Layer0AdditionalMould = seed.CommonDiscInfo.Layer0AdditionalMould;
info.CommonDiscInfo.Layer1MasteringRing = seed.CommonDiscInfo.Layer1MasteringRing;
info.CommonDiscInfo.Layer1MasteringSID = seed.CommonDiscInfo.Layer1MasteringSID;
info.CommonDiscInfo.Layer1ToolstampMasteringCode = seed.CommonDiscInfo.Layer1ToolstampMasteringCode;
info.CommonDiscInfo.Layer1MouldSID = seed.CommonDiscInfo.Layer1MouldSID;
info.CommonDiscInfo.Layer1AdditionalMould = seed.CommonDiscInfo.Layer1AdditionalMould;
info.CommonDiscInfo.Layer2MasteringRing = seed.CommonDiscInfo.Layer2MasteringRing;
info.CommonDiscInfo.Layer2MasteringSID = seed.CommonDiscInfo.Layer2MasteringSID;
info.CommonDiscInfo.Layer2ToolstampMasteringCode = seed.CommonDiscInfo.Layer2ToolstampMasteringCode;
info.CommonDiscInfo.Layer3MasteringRing = seed.CommonDiscInfo.Layer3MasteringRing;
info.CommonDiscInfo.Layer3MasteringSID = seed.CommonDiscInfo.Layer3MasteringSID;
info.CommonDiscInfo.Layer3ToolstampMasteringCode = seed.CommonDiscInfo.Layer3ToolstampMasteringCode;
}
if (info.VersionAndEditions != null && seed.VersionAndEditions != null)
{
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.VersionAndEditions.Version)) info.VersionAndEditions.Version = seed.VersionAndEditions.Version;
if (!string.IsNullOrEmpty(seed.VersionAndEditions.OtherEditions)) info.VersionAndEditions.OtherEditions = seed.VersionAndEditions.OtherEditions;
}
if (info.CopyProtection != null && seed.CopyProtection != null)
{
// Info that only overwrites if supplied
if (!string.IsNullOrEmpty(seed.CopyProtection.Protection)) info.CopyProtection.Protection = seed.CopyProtection.Protection;
}
return info;
}
#endregion
#region Helpers
/// <summary>
/// Process a text block and replace with internal identifiers
/// </summary>
/// <param name="text">Text block to process</param>
/// <returns>Processed text block, if possible</returns>
internal static string ReplaceHtmlWithSiteCodes(this string text)
{
// Empty strings are ignored
if (text.Length == 0)
return text;
foreach (SiteCode? siteCode in Enum.GetValues(typeof(SiteCode)))
{
var longname = siteCode.LongName();
if (!string.IsNullOrEmpty(longname))
text = text.Replace(longname, siteCode.ShortName());
}
// For some outdated tags, we need to use alternate names
text = text.Replace("<b>Demos</b>:", ((SiteCode?)SiteCode.PlayableDemos).ShortName());
text = text.Replace("DMI:", ((SiteCode?)SiteCode.DMIHash).ShortName());
text = text.Replace("<b>LucasArts ID</b>:", ((SiteCode?)SiteCode.LucasArtsID).ShortName());
text = text.Replace("PFI:", ((SiteCode?)SiteCode.PFIHash).ShortName());
text = text.Replace("SS:", ((SiteCode?)SiteCode.SSHash).ShortName());
text = text.Replace("SSv1:", ((SiteCode?)SiteCode.SSHash).ShortName());
text = text.Replace("<b>SSv1</b>:", ((SiteCode?)SiteCode.SSHash).ShortName());
text = text.Replace("SSv2:", ((SiteCode?)SiteCode.SSHash).ShortName());
text = text.Replace("<b>SSv2</b>:", ((SiteCode?)SiteCode.SSHash).ShortName());
text = text.Replace("SS version:", ((SiteCode?)SiteCode.SSVersion).ShortName());
text = text.Replace("Universal Hash (SHA-1):", ((SiteCode?)SiteCode.UniversalHash).ShortName());
text = text.Replace("XeMID:", ((SiteCode?)SiteCode.XeMID).ShortName());
text = text.Replace("XMID:", ((SiteCode?)SiteCode.XMID).ShortName());
return text;
}
#endregion
}
}

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class DiscCategoryConverter : JsonConverter<DiscCategory?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override DiscCategory? ReadJson(JsonReader reader, Type objectType, DiscCategory? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToDiscCategory(value);
}
public override void WriteJson(JsonWriter writer, DiscCategory? value, JsonSerializer serializer)

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class DiscTypeConverter : JsonConverter<DiscType?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override DiscType? ReadJson(JsonReader reader, Type objectType, DiscType? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToDiscType(value);
}
public override void WriteJson(JsonWriter writer, DiscType? value, JsonSerializer serializer)

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using SabreTools.RedumpLib.Data;
@@ -10,11 +11,31 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class LanguageConverter : JsonConverter<Language?[]>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override Language?[] ReadJson(JsonReader reader, Type objectType, Language?[]? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue ?? [];
// Get the current depth for checking
int currentDepth = reader.Depth;
// Read the array while it exists
List<Language> languages = [];
while (reader.Read() && reader.Depth > currentDepth)
{
string? value = reader.Value as string;
if (value == null)
continue;
Language? lang = Data.Extensions.ToLanguage(value);
if (lang != null)
languages.Add(lang.Value);
}
return [.. languages];
}
public override void WriteJson(JsonWriter writer, Language?[]? value, JsonSerializer serializer)

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using SabreTools.RedumpLib.Data;
@@ -10,11 +11,31 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class LanguageSelectionConverter : JsonConverter<LanguageSelection?[]>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override LanguageSelection?[] ReadJson(JsonReader reader, Type objectType, LanguageSelection?[]? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue ?? [];
// Get the current depth for checking
int currentDepth = reader.Depth;
// Read the array while it exists
List<LanguageSelection> selections = [];
while (reader.Read() && reader.Depth > currentDepth)
{
string? value = reader.Value as string;
if (value == null)
continue;
LanguageSelection? sel = Data.Extensions.ToLanguageSelection(value);
if (sel != null)
selections.Add(sel.Value);
}
return [.. selections];
}
public override void WriteJson(JsonWriter writer, LanguageSelection?[]? value, JsonSerializer serializer)

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class RegionConverter : JsonConverter<Region?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override Region? ReadJson(JsonReader reader, Type objectType, Region? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToRegion(value);
}
public override void WriteJson(JsonWriter writer, Region? value, JsonSerializer serializer)

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class SystemConverter : JsonConverter<RedumpSystem?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override RedumpSystem? ReadJson(JsonReader reader, Type objectType, RedumpSystem? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
string? value = reader.Value as string;
if (value == null)
return null;
// Try to parse the value
return Data.Extensions.ToRedumpSystem(value);
}
public override void WriteJson(JsonWriter writer, RedumpSystem? value, JsonSerializer serializer)

View File

@@ -10,11 +10,21 @@ namespace SabreTools.RedumpLib.Converters
/// </summary>
public class YesNoConverter : JsonConverter<YesNo?>
{
public override bool CanRead { get { return false; } }
public override bool CanRead { get { return true; } }
public override YesNo? ReadJson(JsonReader reader, Type objectType, YesNo? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
// If we have a value already, don't overwrite it
if (hasExistingValue)
return existingValue;
// Read the value
if (reader.Value is bool bVal)
return Data.Extensions.ToYesNo(bVal);
else if (reader.Value is string sVal)
return Data.Extensions.ToYesNo(sVal);
return null;
}
public override void WriteJson(JsonWriter writer, YesNo? value, JsonSerializer serializer)

View File

@@ -4,168 +4,172 @@ namespace SabreTools.RedumpLib.Data
{
public static class Constants
{
// TODO: Add RegexOptions.Compiled
#region Regular Expressions
/// <summary>
/// Regex matching the added field on a disc page
/// </summary>
public static Regex AddedRegex = new Regex(@"<tr><th>Added</th><td>(.*?)</td></tr>");
public static Regex AddedRegex = new(@"<tr><th>Added</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the barcode field on a disc page
/// </summary>
public static Regex BarcodeRegex = new Regex(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>");
public static Regex BarcodeRegex = new(@"<tr><th>Barcode</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the BCA field on a disc page
/// </summary>
public static Regex BcaRegex = new Regex(@"<h3>BCA .*?/></h3></td><td .*?></td></tr>"
public static Regex BcaRegex = new(@"<h3>BCA .*?/></h3></td><td .*?></td></tr>"
+ "<tr><th>Row</th><th>Contents</th><th>ASCII</th></tr>"
+ "<tr><td>(?<row1number>.*?)</td><td>(?<row1contents>.*?)</td><td>(?<row1ascii>.*?)</td></tr>"
+ "<tr><td>(?<row2number>.*?)</td><td>(?<row2contents>.*?)</td><td>(?<row2ascii>.*?)</td></tr>"
+ "<tr><td>(?<row3number>.*?)</td><td>(?<row3contents>.*?)</td><td>(?<row3ascii>.*?)</td></tr>"
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Singleline);
+ "<tr><td>(?<row4number>.*?)</td><td>(?<row4contents>.*?)</td><td>(?<row4ascii>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the category field on a disc page
/// </summary>
public static Regex CategoryRegex = new Regex(@"<tr><th>Category</th><td>(.*?)</td></tr>");
public static Regex CategoryRegex = new(@"<tr><th>Category</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the comments field on a disc page
/// </summary>
public static Regex CommentsRegex = new Regex(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Singleline);
public static Regex CommentsRegex = new(@"<tr><th>Comments</th></tr><tr><td>(.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the contents field on a disc page
/// </summary>
public static Regex ContentsRegex = new Regex(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Singleline);
public static Regex ContentsRegex = new(@"<tr><th>Contents</th></tr><tr .*?><td>(.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching individual disc links on a results page
/// </summary>
public static Regex DiscRegex = new Regex(@"<a href=""/disc/(\d+)/"">");
public static Regex DiscRegex = new(@"<a href=""/disc/(\d+)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the disc number or letter field on a disc page
/// </summary>
public static Regex DiscNumberLetterRegex = new Regex(@"\((.*?)\)");
public static Regex DiscNumberLetterRegex = new(@"\((.*?)\)", RegexOptions.Compiled);
/// <summary>
/// Regex matching the dumpers on a disc page
/// </summary>
public static Regex DumpersRegex = new Regex(@"<a href=""/discs/dumper/(.*?)/"">");
public static Regex DumpersRegex = new(@"<a href=""/discs/dumper/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the edition field on a disc page
/// </summary>
public static Regex EditionRegex = new Regex(@"<tr><th>Edition</th><td>(.*?)</td></tr>");
public static Regex EditionRegex = new(@"<tr><th>Edition</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the error count field on a disc page
/// </summary>
public static Regex ErrorCountRegex = new Regex(@"<tr><th>Errors count</th><td>(.*?)</td></tr>");
public static Regex ErrorCountRegex = new(@"<tr><th>Errors count</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the foreign title field on a disc page
/// </summary>
public static Regex ForeignTitleRegex = new Regex(@"<h2>(.*?)</h2>");
public static Regex ForeignTitleRegex = new(@"<h2>(.*?)</h2>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the "full match" ID list from a WIP disc page
/// </summary>
public static Regex FullMatchRegex = new Regex(@"<td class=""static"">full match ids: (.*?)</td>");
public static Regex FullMatchRegex = new(@"<td class=""static"">full match ids: (.*?)</td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the languages field on a disc page
/// </summary>
public static Regex LanguagesRegex = new Regex(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*");
public static Regex LanguagesRegex = new(@"<img src=""/images/languages/(.*?)\.png"" alt="".*?"" title="".*?"" />\s*", RegexOptions.Compiled);
/// <summary>
/// Regex matching the last modified field on a disc page
/// </summary>
public static Regex LastModifiedRegex = new Regex(@"<tr><th>Last modified</th><td>(.*?)</td></tr>");
public static Regex LastModifiedRegex = new(@"<tr><th>Last modified</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the media field on a disc page
/// </summary>
public static Regex MediaRegex = new Regex(@"<tr><th>Media</th><td>(.*?)</td></tr>");
public static Regex MediaRegex = new(@"<tr><th>Media</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching individual WIP disc links on a results page
/// </summary>
public static Regex NewDiscRegex = new Regex(@"<a (style=.*)?href=""/newdisc/(\d+)/"">");
public static Regex NewDiscRegex = new(@"<a (style=.*)?href=""/newdisc/(\d+)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the "partial match" ID list from a WIP disc page
/// </summary>
public static Regex PartialMatchRegex = new Regex(@"<td class=""static"">partial match ids: (.*?)</td>");
public static Regex PartialMatchRegex = new(@"<td class=""static"">partial match ids: (.*?)</td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the disc key on a PS3 disc page
/// </summary>
public static Regex PS3DiscKey = new(@"<th>Disc Key</th><th>Disc ID</th><th>Permanent Information & Control \(PIC\)</th></tr><tr><td>(.*?)</td><td>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the PVD field on a disc page
/// </summary>
public static Regex PvdRegex = new Regex(@"<h3>Primary Volume Descriptor (PVD) <img .*?/></h3></td><td .*?></td></tr>"
public static Regex PvdRegex = new(@"<h3>Primary Volume Descriptor (PVD) <img .*?/></h3></td><td .*?></td></tr>"
+ @"<tr><th>Record / Entry</th><th>Contents</th><th>Date</th><th>Time</th><th>GMT</th></tr>"
+ @"<tr><td>Creation</td><td>(?<creationbytes>.*?)</td><td>(?<creationdate>.*?)</td><td>(?<creationtime>.*?)</td><td>(?<creationtimezone>.*?)</td></tr>"
+ @"<tr><td>Modification</td><td>(?<modificationbytes>.*?)</td><td>(?<modificationdate>.*?)</td><td>(?<modificationtime>.*?)</td><td>(?<modificationtimezone>.*?)</td></tr>"
+ @"<tr><td>Expiration</td><td>(?<expirationbytes>.*?)</td><td>(?<expirationdate>.*?)</td><td>(?<expirationtime>.*?)</td><td>(?<expirationtimezone>.*?)</td></tr>"
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Singleline);
+ @"<tr><td>Effective</td><td>(?<effectivebytes>.*?)</td><td>(?<effectivedate>.*?)</td><td>(?<effectivetime>.*?)</td><td>(?<effectivetimezone>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the region field on a disc page
/// </summary>
public static Regex RegionRegex = new Regex(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">");
public static Regex RegionRegex = new(@"<tr><th>Region</th><td><a href=""/discs/region/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching a double-layer disc ringcode information
/// </summary>
public static Regex RingCodeDoubleRegex = new Regex(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
public static Regex RingCodeDoubleRegex = new(@"", RegexOptions.Compiled | RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching a single-layer disc ringcode information
/// </summary>
public static Regex RingCodeSingleRegex = new Regex(@"", RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
public static Regex RingCodeSingleRegex = new(@"", RegexOptions.Compiled | RegexOptions.Singleline); // Varies based on available fields, like Addtional Mould
/// <summary>
/// Regex matching the serial field on a disc page
/// </summary>
public static Regex SerialRegex = new Regex(@"<tr><th>Serial</th><td>(.*?)</td></tr>");
public static Regex SerialRegex = new(@"<tr><th>Serial</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the system field on a disc page
/// </summary>
public static Regex SystemRegex = new Regex(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">");
public static Regex SystemRegex = new(@"<tr><th>System</th><td><a href=""/discs/system/(.*?)/"">", RegexOptions.Compiled);
/// <summary>
/// Regex matching the title field on a disc page
/// </summary>
public static Regex TitleRegex = new Regex(@"<h1>(.*?)</h1>");
public static Regex TitleRegex = new(@"<h1>(.*?)</h1>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the current nonce token for login
/// </summary>
public static Regex TokenRegex = new Regex(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />");
public static Regex TokenRegex = new(@"<input type=""hidden"" name=""csrf_token"" value=""(.*?)"" />", RegexOptions.Compiled);
/// <summary>
/// Regex matching a single track on a disc page
/// </summary>
public static Regex TrackRegex = new Regex(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Singleline);
public static Regex TrackRegex = new(@"<tr><td>(?<number>.*?)</td><td>(?<type>.*?)</td><td>(?<pregap>.*?)</td><td>(?<length>.*?)</td><td>(?<sectors>.*?)</td><td>(?<size>.*?)</td><td>(?<crc32>.*?)</td><td>(?<md5>.*?)</td><td>(?<sha1>.*?)</td></tr>", RegexOptions.Compiled | RegexOptions.Singleline);
/// <summary>
/// Regex matching the track count on a disc page
/// </summary>
public static Regex TrackCountRegex = new Regex(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>");
public static Regex TrackCountRegex = new(@"<tr><th>Number of tracks</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the version field on a disc page
/// </summary>
public static Regex VersionRegex = new Regex(@"<tr><th>Version</th><td>(.*?)</td></tr>");
public static Regex VersionRegex = new(@"<tr><th>Version</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
/// <summary>
/// Regex matching the write offset field on a disc page
/// </summary>
public static Regex WriteOffsetRegex = new Regex(@"<tr><th>Write offset</th><td>(.*?)</td></tr>");
public static Regex WriteOffsetRegex = new(@"<tr><th>Write offset</th><td>(.*?)</td></tr>", RegexOptions.Compiled);
#endregion
@@ -301,6 +305,5 @@ namespace SabreTools.RedumpLib.Data
public const string Sha1Ext = "sha1/";
#endregion
}
}

View File

@@ -281,7 +281,7 @@ namespace SabreTools.RedumpLib.Data
[Language(LongName = "Bini; Edo", ThreeLetterCode = "bin")]
Bini,
[Language(LongName = "Bislama", TwoLetterCode = "bla", ThreeLetterCode = "bis")]
[Language(LongName = "Bislama", TwoLetterCode = "bi", ThreeLetterCode = "bis")]
Bislama,
// Blin; Bilin
@@ -2008,9 +2008,6 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.DiscBasedConsole, LongName = "Philips CD-i", ShortName = "cdi", HasCues = true, HasDat = true)]
PhilipsCDi,
[System(Category = SystemCategory.DiscBasedConsole, LongName = "Philips CD-i Digital Video", ShortName = "cdi-video", IsBanned = true)]
PhilipsCDiDigitalVideo,
[System(Category = SystemCategory.DiscBasedConsole, Available = false, LongName = "Pioneer LaserActive")]
PioneerLaserActive,
@@ -2234,7 +2231,7 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.Computer, LongName = "NEC PC-98 series", ShortName = "pc-98", HasCues = true, HasDat = true)]
NECPC98series,
[System(Category = SystemCategory.Computer, LongName = "Sharp X68000", ShortName = "x86kcd", HasCues = true, HasDat = true)]
[System(Category = SystemCategory.Computer, LongName = "Sharp X68000", ShortName = "x68k", HasCues = true, HasDat = true)]
SharpX68000,
// End of computer section delimiter
@@ -2331,7 +2328,7 @@ namespace SabreTools.RedumpLib.Data
[System(Category = SystemCategory.Arcade, Available = false, LongName = "Merit Industries MegaTouch XL")]
MeritIndustriesMegaTouchXL,
[System(Category = SystemCategory.Arcade, LongName = "Namco · Sega · Nintendo Triforce", ShortName = "triforce", HasCues = true, HasDat = true, HasGdi = true)]
[System(Category = SystemCategory.Arcade, LongName = "Namco · Sega · Nintendo Triforce", ShortName = "trf", HasCues = true, HasDat = true, HasGdi = true)]
NamcoSegaNintendoTriforce,
[System(Category = SystemCategory.Arcade, LongName = "Namco System 12", ShortName = "ns12")]
@@ -2488,8 +2485,105 @@ namespace SabreTools.RedumpLib.Data
/// </remarks>
public enum Region
{
// TODO: Should "regions" and multi-country sets be phased out?
// TODO: Should "regions" be moved to the end?
#region Aggregates - Redump Only
[HumanReadable(LongName = "Asia", ShortName = "A")]
Asia,
[HumanReadable(LongName = "Asia, Europe", ShortName = "A,E")]
AsiaEurope,
[HumanReadable(LongName = "Asia, USA", ShortName = "A,U")]
AsiaUSA,
[HumanReadable(LongName = "Australia, Germany", ShortName = "Au,G")]
AustraliaGermany,
[HumanReadable(LongName = "Australia, New Zealand", ShortName = "Au,Nz")]
AustraliaNewZealand,
[HumanReadable(LongName = "Austria, Switzerland", ShortName = "At,Ch")]
AustriaSwitzerland,
[HumanReadable(LongName = "Belgium, Netherlands", ShortName = "Be,N")]
BelgiumNetherlands,
[HumanReadable(LongName = "Europe", ShortName = "E")]
Europe,
[HumanReadable(LongName = "Europe, Asia", ShortName = "E,A")]
EuropeAsia,
[HumanReadable(LongName = "Europe, Australia", ShortName = "E,Au")]
EuropeAustralia,
[HumanReadable(LongName = "Europe, Canada", ShortName = "E,Ca")]
EuropeCanada,
[HumanReadable(LongName = "Europe, Germany", ShortName = "E,G")]
EuropeGermany,
[HumanReadable(LongName = "Export", ShortName = "Ex")]
Export,
[HumanReadable(LongName = "France, Spain", ShortName = "F,S")]
FranceSpain,
[HumanReadable(LongName = "Greater China", ShortName = "GC")]
GreaterChina,
[HumanReadable(LongName = "Japan, Asia", ShortName = "J,A")]
JapanAsia,
[HumanReadable(LongName = "Japan, Europe", ShortName = "J,E")]
JapanEurope,
[HumanReadable(LongName = "Japan, Korea", ShortName = "J,K")]
JapanKorea,
[HumanReadable(LongName = "Japan, USA", ShortName = "J,U")]
JapanUSA,
[HumanReadable(LongName = "Latin America", ShortName = "LAm")]
LatinAmerica,
[HumanReadable(LongName = "Scandinavia", ShortName = "Sca")]
Scandinavia,
[HumanReadable(LongName = "Spain, Portugal", ShortName = "S,Pt")]
SpainPortugal,
[HumanReadable(LongName = "UK, Australia", ShortName = "Uk,Au")]
UKAustralia,
[HumanReadable(LongName = "USA, Asia", ShortName = "U,A")]
USAAsia,
[HumanReadable(LongName = "USA, Australia", ShortName = "U,Au")]
USAAustralia,
[HumanReadable(LongName = "USA, Brazil", ShortName = "U,B")]
USABrazil,
[HumanReadable(LongName = "USA, Canada", ShortName = "U,Ca")]
USACanada,
[HumanReadable(LongName = "USA, Europe", ShortName = "U,E")]
USAEurope,
[HumanReadable(LongName = "USA, Germany", ShortName = "U,G")]
USAGermany,
[HumanReadable(LongName = "USA, Japan", ShortName = "U,J")]
USAJapan,
[HumanReadable(LongName = "USA, Korea", ShortName = "U,K")]
USAKorea,
[HumanReadable(LongName = "World", ShortName = "W")]
World,
#endregion
#region A
@@ -2535,30 +2629,12 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Ascension Island", ShortName = "Ac")]
AscensionIsland,
[HumanReadable(LongName = "Asia", ShortName = "A")]
Asia,
[HumanReadable(LongName = "Asia, Europe", ShortName = "A,E")]
AsiaEurope,
[HumanReadable(LongName = "Asia, USA", ShortName = "A,U")]
AsiaUSA,
[HumanReadable(LongName = "Australia", ShortName = "Au")]
Australia,
[HumanReadable(LongName = "Australia, Germany", ShortName = "Au,G")]
AustraliaGermany,
[HumanReadable(LongName = "Australia, New Zealand", ShortName = "Au,Nz")]
AustraliaNewZealand,
[HumanReadable(LongName = "Austria", ShortName = "At")]
Austria,
[HumanReadable(LongName = "Austria, Switzerland", ShortName = "At,Ch")]
AustriaSwitzerland,
[HumanReadable(LongName = "Azerbaijan", ShortName = "Az")]
Azerbaijan,
@@ -2584,9 +2660,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Belgium", ShortName = "Be")]
Belgium,
[HumanReadable(LongName = "Belgium, Netherlands", ShortName = "Be,N")]
BelgiumNetherlands,
[HumanReadable(LongName = "Belize", ShortName = "Bz")]
Belize,
@@ -2767,21 +2840,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Ethiopia", ShortName = "Et")]
Ethiopia,
[HumanReadable(LongName = "Europe", ShortName = "E")]
Europe,
[HumanReadable(LongName = "Europe, Asia", ShortName = "E,A")]
EuropeAsia,
[HumanReadable(LongName = "Europe, Australia", ShortName = "E,Au")]
EuropeAustralia,
[HumanReadable(LongName = "Europe, Canada", ShortName = "E,Ca")]
EuropeCanada,
[HumanReadable(LongName = "Europe, Germany", ShortName = "E,G")]
EuropeGermany,
// Commented out to avoid confusion
//[HumanReadable(LongName = "European Union", ShortName = "Eu")]
//EuropeanUnion,
@@ -2790,9 +2848,6 @@ namespace SabreTools.RedumpLib.Data
//[HumanReadable(LongName = "Eurozone", ShortName = "Ez")]
//Eurozone,
[HumanReadable(LongName = "Export", ShortName = "Ex")]
Export,
#endregion
#region F
@@ -2821,9 +2876,6 @@ namespace SabreTools.RedumpLib.Data
//[HumanReadable(LongName = "France, Metropolitan", ShortName = "Fx")]
//FranceMetropolitan,
[HumanReadable(LongName = "France, Spain", ShortName = "F,S")]
FranceSpain,
[HumanReadable(LongName = "French Guiana", ShortName = "Gf")]
FrenchGuiana,
@@ -2856,9 +2908,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Gibraltar", ShortName = "Gi")]
Gibraltar,
[HumanReadable(LongName = "Greater China", ShortName = "GC")]
GreaterChina,
[HumanReadable(LongName = "Greece", ShortName = "Gr")]
Greece,
@@ -2958,18 +3007,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Japan", ShortName = "J")]
Japan,
[HumanReadable(LongName = "Japan, Asia", ShortName = "J,A")]
JapanAsia,
[HumanReadable(LongName = "Japan, Europe", ShortName = "J,E")]
JapanEurope,
[HumanReadable(LongName = "Japan, Korea", ShortName = "J,K")]
JapanKorea,
[HumanReadable(LongName = "Japan, USA", ShortName = "J,U")]
JapanUSA,
[HumanReadable(LongName = "Jersey", ShortName = "Je")]
Jersey,
@@ -3009,9 +3046,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "(Laos) Lao People's Democratic Republic", ShortName = "La")]
Laos,
[HumanReadable(LongName = "Latin America", ShortName = "LAm")]
LatinAmerica,
[HumanReadable(LongName = "Latvia", ShortName = "Lv")]
Latvia,
@@ -3264,9 +3298,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Saudi Arabia", ShortName = "Sa")]
SaudiArabia,
[HumanReadable(LongName = "Scandinavia", ShortName = "Sca")]
Scandinavia,
[HumanReadable(LongName = "Senegal", ShortName = "Sn")]
Senegal,
@@ -3310,9 +3341,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Spain", ShortName = "S")]
Spain,
[HumanReadable(LongName = "Spain, Portugal", ShortName = "S,Pt")]
SpainPortugal,
[HumanReadable(LongName = "Sri Lanka", ShortName = "Lk")]
SriLanka,
@@ -3397,9 +3425,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "UK", ShortName = "Uk")]
UnitedKingdom,
[HumanReadable(LongName = "UK, Australia", ShortName = "Uk,Au")]
UKAustralia,
[HumanReadable(LongName = "Ukraine", ShortName = "Ue")]
Ukraine,
@@ -3424,30 +3449,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "USA", ShortName = "U")]
UnitedStatesOfAmerica,
[HumanReadable(LongName = "USA, Asia", ShortName = "U,A")]
USAAsia,
[HumanReadable(LongName = "USA, Australia", ShortName = "U,Au")]
USAAustralia,
[HumanReadable(LongName = "USA, Brazil", ShortName = "U,B")]
USABrazil,
[HumanReadable(LongName = "USA, Canada", ShortName = "U,Ca")]
USACanada,
[HumanReadable(LongName = "USA, Europe", ShortName = "U,E")]
USAEurope,
[HumanReadable(LongName = "USA, Germany", ShortName = "U,G")]
USAGermany,
[HumanReadable(LongName = "USA, Japan", ShortName = "U,J")]
USAJapan,
[HumanReadable(LongName = "USA, Korea", ShortName = "U,K")]
USAKorea,
[HumanReadable(LongName = "USSR", ShortName = "Su")]
USSR,
@@ -3483,9 +3484,6 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(LongName = "Western Sahara", ShortName = "Eh")]
WesternSahara,
[HumanReadable(LongName = "World", ShortName = "W")]
World,
#endregion
#region Y
@@ -3526,34 +3524,50 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:ALTF]", LongName = "<b>Alternative Foreign Title</b>:")]
AlternativeForeignTitle,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Applications</b>:", LongName = "<b>Applications</b>:")]
Applications,
[HumanReadable(ShortName = "[T:BID]", LongName = "<b>Bandai ID</b>:")]
BandaiID,
[HumanReadable(ShortName = "[T:BBFC]", LongName = "<b>BBFC Reg. No.</b>:")]
BBFCRegistrationNumber,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Bethesda ID</b>:", LongName = "<b>Bethesda ID</b>:")]
BethesdaID,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>CD Projekt ID</b>:", LongName = "<b>CD Projekt ID</b>:")]
CDProjektID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Compatible OS</b>:", LongName = "<b>Compatible OS</b>:")]
CompatibleOS,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Disc Hologram ID</b>:", LongName = "<b>Disc Hologram ID</b>:")]
DiscHologramID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>DMI</b>:", LongName = "<b>DMI</b>:")]
DMIHash,
[HumanReadable(ShortName = "[T:DNAS]", LongName = "<b>DNAS Disc ID</b>:")]
DNASDiscID,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Eidos ID</b>:", LongName = "<b>Eidos ID</b>:")]
EidosID,
[HumanReadable(ShortName = "[T:EAID]", LongName = "<b>Electronic Arts ID</b>:")]
ElectronicArtsID,
[HumanReadable(ShortName = "[T:X]", LongName = "<b>Extras</b>:")]
Extras,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Filename</b>:", LongName = "<b>Filename</b>:")]
Filename,
@@ -3563,7 +3577,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:GF]", LongName = "<b>Game Footage</b>:")]
GameFootage,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Games</b>:", LongName = "<b>Games</b>:")]
Games,
@@ -3573,7 +3587,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:GTID]", LongName = "<b>GT Interactive ID</b>:")]
GTInteractiveID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Internal Name</b>:", LongName = "<b>Internal Name</b>:")]
InternalName,
@@ -3601,11 +3615,11 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:LAID]", LongName = "<b>Lucas Arts ID</b>:")]
LucasArtsID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Microsoft ID</b>:", LongName = "<b>Microsoft ID</b>:")]
MicrosoftID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Multisession</b>:", LongName = "<b>Multisession</b>:")]
Multisession,
@@ -3627,7 +3641,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:P]", LongName = "<b>Patches</b>:")]
Patches,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>PFI</b>:", LongName = "<b>PFI</b>:")]
PFIHash,
@@ -3643,10 +3657,18 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:PPN]", LongName = "<b>PPN</b>:")]
PPN,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag for some systems yet
[HumanReadable(ShortName = "<b>Protection</b>:", LongName = "<b>Protection</b>:")]
Protection,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Ring non-zero data start</b>:", LongName = "<b>Ring non-zero data start</b>:")]
RingNonZeroDataStart,
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Ring Perfect Audio Offset</b>:", LongName = "<b>Ring Perfect Audio Offset</b>:")]
RingPerfectAudioOffset,
[HumanReadable(ShortName = "[T:RD]", LongName = "<b>Rolling Demos</b>:")]
RollingDemos,
@@ -3662,15 +3684,15 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:S]", LongName = "<b>Series</b>:")]
Series,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Sierra ID</b>:", LongName = "<b>Sierra ID</b>:")]
SierraID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>SS</b>:", LongName = "<b>SS</b>:")]
SSHash,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>SS version</b>:", LongName = "<b>SS version</b>:")]
SSVersion,
@@ -3683,7 +3705,7 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:UID]", LongName = "<b>Ubisoft ID</b>:")]
UbisoftID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>Universal Hash (SHA-1)</b>:", LongName = "<b>Universal Hash (SHA-1)</b>:")]
UniversalHash,
@@ -3702,11 +3724,11 @@ namespace SabreTools.RedumpLib.Data
[HumanReadable(ShortName = "[T:VCD]", LongName = "<b>V-CD</b>")]
VCD,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>XeMID</b>:", LongName = "<b>XeMID</b>:")]
XeMID,
// TODO: This doesn't have a site tag yet
// This doesn't have a site tag yet
[HumanReadable(ShortName = "<b>XMID</b>:", LongName = "<b>XMID</b>:")]
XMID,
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
#if NET40_OR_GREATER || NETCOREAPP
using System.Linq;
#endif
using Newtonsoft.Json;
using SabreTools.RedumpLib.Converters;
@@ -69,10 +71,24 @@ namespace SabreTools.RedumpLib.Data
public DumpingInfoSection? DumpingInfo { get; set; } = new DumpingInfoSection();
[JsonProperty(PropertyName = "artifacts", DefaultValueHandling = DefaultValueHandling.Ignore)]
public Dictionary<string, string>? Artifacts { get; set; } = new Dictionary<string, string>();
public Dictionary<string, string>? Artifacts { get; set; } = [];
public object Clone()
{
#if NET20 || NET35
Dictionary<string, string>? artifacts = null;
if (this.Artifacts != null)
{
artifacts = new Dictionary<string, string>();
foreach (var kvp in this.Artifacts)
{
artifacts[kvp.Key] = kvp.Value;
}
}
#else
var artifacts = this.Artifacts?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
#endif
return new SubmissionInfo
{
SchemaVersion = this.SchemaVersion,
@@ -90,7 +106,7 @@ namespace SabreTools.RedumpLib.Data
TracksAndWriteOffsets = this.TracksAndWriteOffsets?.Clone() as TracksAndWriteOffsetsSection,
SizeAndChecksums = this.SizeAndChecksums?.Clone() as SizeAndChecksumsSection,
DumpingInfo = this.DumpingInfo?.Clone() as DumpingInfoSection,
Artifacts = this.Artifacts?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
Artifacts = artifacts,
};
}
}
@@ -101,16 +117,16 @@ namespace SabreTools.RedumpLib.Data
public class CommonDiscInfoSection : ICloneable
{
// Name not defined by Redump
[JsonProperty(PropertyName = "d_system", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_system", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(SystemConverter))]
public RedumpSystem? System { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_media", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_media", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(DiscTypeConverter))]
public DiscType? Media { get; set; }
[JsonProperty(PropertyName = "d_title", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_title", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Title { get; set; }
[JsonProperty(PropertyName = "d_title_foreign", DefaultValueHandling = DefaultValueHandling.Ignore)]
@@ -122,15 +138,15 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_label", NullValueHandling = NullValueHandling.Ignore)]
public string? DiscTitle { get; set; }
[JsonProperty(PropertyName = "d_category", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_category", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(DiscCategoryConverter))]
public DiscCategory? Category { get; set; }
[JsonProperty(PropertyName = "d_region", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_region", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(RegionConverter))]
public Region? Region { get; set; }
[JsonProperty(PropertyName = "d_languages", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_languages", DefaultValueHandling = DefaultValueHandling.Include)]
[JsonConverter(typeof(LanguageConverter))]
public Language?[]? Languages { get; set; }
@@ -147,7 +163,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_id", NullValueHandling = NullValueHandling.Ignore)]
public string? RingId { get; private set; }
[JsonProperty(PropertyName = "d_ring_0_ma1", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma1", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer0MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma1_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -162,7 +178,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_mo1", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer0AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma2", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer1MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma2_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -177,7 +193,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_mo2", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer1AdditionalMould { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma3", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer2MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma3_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -186,7 +202,7 @@ namespace SabreTools.RedumpLib.Data
[JsonProperty(PropertyName = "d_ring_0_ts3", NullValueHandling = NullValueHandling.Ignore)]
public string? Layer2ToolstampMasteringCode { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_ring_0_ma4", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Layer3MasteringRing { get; set; }
[JsonProperty(PropertyName = "d_ring_0_ma4_sid", NullValueHandling = NullValueHandling.Ignore)]
@@ -233,6 +249,31 @@ namespace SabreTools.RedumpLib.Data
public object Clone()
{
#if NET20 || NET35
Dictionary<SiteCode, string>? commentsSpecialFields = null;
if (this.CommentsSpecialFields != null)
{
commentsSpecialFields = new Dictionary<SiteCode, string>();
foreach (var kvp in this.CommentsSpecialFields)
{
commentsSpecialFields[kvp.Key] = kvp.Value;
}
}
Dictionary<SiteCode, string>? contentsSpecialFields = null;
if (this.ContentsSpecialFields != null)
{
contentsSpecialFields = new Dictionary<SiteCode, string>();
foreach (var kvp in this.ContentsSpecialFields)
{
contentsSpecialFields[kvp.Key] = kvp.Value;
}
}
#else
var commentsSpecialFields = this.CommentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
var contentsSpecialFields = this.ContentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
#endif
return new CommonDiscInfoSection
{
System = this.System,
@@ -271,9 +312,9 @@ namespace SabreTools.RedumpLib.Data
EXEDateBuildDate = this.EXEDateBuildDate,
ErrorsCount = this.ErrorsCount,
Comments = this.Comments,
CommentsSpecialFields = this.CommentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
CommentsSpecialFields = commentsSpecialFields,
Contents = this.Contents,
ContentsSpecialFields = this.ContentsSpecialFields?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
ContentsSpecialFields = contentsSpecialFields,
};
}
}
@@ -414,13 +455,27 @@ namespace SabreTools.RedumpLib.Data
public object Clone()
{
#if NET20 || NET35
Dictionary<string, List<string>?>? fullProtections = null;
if (this.FullProtections != null)
{
fullProtections = new Dictionary<string, List<string>?>();
foreach (var kvp in this.FullProtections)
{
fullProtections[kvp.Key] = kvp.Value;
}
}
#else
var fullProtections = this.FullProtections?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
#endif
return new CopyProtectionSection
{
AntiModchip = this.AntiModchip,
LibCrypt = this.LibCrypt,
LibCryptData = this.LibCryptData,
Protection = this.Protection,
FullProtections = this.FullProtections?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
FullProtections = fullProtections,
SecuROMData = this.SecuROMData,
};
}
@@ -530,40 +585,55 @@ namespace SabreTools.RedumpLib.Data
/// </summary>
public class DumpingInfoSection : ICloneable
{
// Name not defined by Redump -- Only used with MPF
[JsonProperty(PropertyName = "d_frontend_version", DefaultValueHandling = DefaultValueHandling.Include)]
public string? FrontendVersion { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_program", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_dumping_program", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingProgram { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_dumping_date", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_dumping_date", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingDate { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_manufacturer", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_dumping_params", DefaultValueHandling = DefaultValueHandling.Include)]
public string? DumpingParameters { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_manufacturer", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Manufacturer { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_model", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_drive_model", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Model { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_drive_firmware", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_drive_firmware", DefaultValueHandling = DefaultValueHandling.Include)]
public string? Firmware { get; set; }
// Name not defined by Redump
[JsonProperty(PropertyName = "d_reported_disc_type", Required = Required.AllowNull)]
[JsonProperty(PropertyName = "d_reported_disc_type", DefaultValueHandling = DefaultValueHandling.Include)]
public string? ReportedDiscType { get; set; }
// Name not defined by Redump -- Only used with Redumper
[JsonProperty(PropertyName = "d_errors_c2", NullValueHandling = NullValueHandling.Ignore)]
public string? C2ErrorsCount { get; set; }
public object Clone()
{
return new DumpingInfoSection
{
FrontendVersion = this.FrontendVersion,
DumpingProgram = this.DumpingProgram,
DumpingDate = this.DumpingDate,
DumpingParameters = this.DumpingParameters,
Manufacturer = this.Manufacturer,
Model = this.Model,
Firmware = this.Firmware,
ReportedDiscType = this.ReportedDiscType,
C2ErrorsCount = this.C2ErrorsCount,
};
}
}

View File

@@ -0,0 +1,75 @@
namespace SabreTools.RedumpLib.Data
{
/// <summary>
/// Template field values for submission info
/// </summary>
internal static class Template
{
// Manual information
public const string TitleField = "Title";
public const string ForeignTitleField = "Foreign Title (Non-latin)";
public const string DiscNumberField = "Disc Number / Letter";
public const string DiscTitleField = "Disc Title";
public const string SystemField = "System";
public const string MediaTypeField = "Media Type";
public const string CategoryField = "Category";
public const string RegionField = "Region";
public const string LanguagesField = "Languages";
public const string PlaystationLanguageSelectionViaField = "Language Selection Via";
public const string DiscSerialField = "Disc Serial";
public const string BarcodeField = "Barcode";
public const string CommentsField = "Comments";
public const string ContentsField = "Contents";
public const string VersionField = "Version";
public const string EditionField = "Edition/Release";
public const string PlayStation3WiiDiscKeyField = "Disc Key";
public const string PlayStation3DiscIDField = "Disc ID";
public const string GameCubeWiiBCAField = "BCA";
public const string CopyProtectionField = "Copy Protection";
public const string MasteringRingField = "Mastering Code (laser branded/etched)";
public const string MasteringSIDField = "Mastering SID Code";
public const string MouldSIDField = "Mould SID Code";
public const string AdditionalMouldField = "Additional Mould";
public const string ToolstampField = "Toolstamp or Mastering Code (engraved/stamped)";
// Automatic Information
public const string FrontendVersionField = "Frontend Version";
public const string DumpingProgramField = "Dumping Program";
public const string DumpingDateField = "Date";
public const string DumpingParametersField = "Parameters";
public const string DumpingDriveManufacturer = "Manufacturer";
public const string DumpingDriveModel = "Model";
public const string DumpingDriveFirmware = "Firmware";
public const string ReportedDiscType = "Reported Disc Type";
public const string C2ErrorCountField = "C2 Error Count";
public const string PVDField = "Primary Volume Descriptor (PVD)";
public const string DATField = "DAT";
public const string SizeField = "Size";
public const string CRC32Field = "CRC32";
public const string MD5Field = "MD5";
public const string SHA1Field = "SHA1";
public const string FullyMatchingIDField = "Fully Matching ID";
public const string PartiallyMatchingIDsField = "Partially Matching IDs";
public const string ErrorCountField = "Error Count";
public const string CuesheetField = "Cuesheet";
public const string SubIntentionField = "SubIntention Data (SecuROM/LibCrypt)";
public const string WriteOffsetField = "Write Offset";
public const string LayerbreakField = "Layerbreak";
public const string EXEDateBuildDate = "EXE/Build Date";
public const string HeaderField = "Header";
public const string PICField = "Permanent Information & Control (PIC)";
public const string PlayStationEDCField = "EDC";
public const string PlayStationAntiModchipField = "Anti-modchip";
public const string PlayStationLibCryptField = "LibCrypt";
public const string XBOXSSRanges = "Security Sector Ranges";
// Default values
public const string RequiredValue = "(REQUIRED)";
public const string RequiredIfExistsValue = "(REQUIRED, IF EXISTS)";
public const string OptionalValue = "(OPTIONAL)";
public const string DiscNotDetected = "Disc Not Detected";
}
}

View File

@@ -0,0 +1,187 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
namespace SabreTools.RedumpLib
{
/// <summary>
/// Contains logic for dealing with downloads
/// </summary>
public class Downloader
{
#region Properties
/// <summary>
/// Which Redump feature is being used
/// </summary>
public Feature Feature { get; set; }
/// <summary>
/// Minimum ID for downloading page information (Feature.Site, Feature.WIP only)
/// </summary>
public int MinimumId { get; set; }
/// <summary>
/// Maximum ID for downloading page information (Feature.Site, Feature.WIP only)
/// </summary>
public int MaximumId { get; set; }
/// <summary>
/// Quicksearch text for downloading
/// </summary>
public string? QueryString { get; set; }
/// <summary>
/// Directory to save all outputted files to
/// </summary>
public string? OutDir { get; set; }
/// <summary>
/// Use named subfolders for discrete download sets (Feature.Packs only)
/// </summary>
public bool UseSubfolders { get; set; }
/// <summary>
/// Use the last modified page to try to grab all new discs (Feature.Site, Feature.WIP only)
/// </summary>
public bool OnlyNew { get; set; }
/// <summary>
/// Only list the page IDs but don't download
/// </summary>
public bool OnlyList { get; set; }
/// <summary>
/// Don't replace forward slashes with `-` in queries
/// </summary>
public bool NoSlash { get; set; }
/// <summary>
/// Force continuing downloads until user cancels or pages run out
/// </summary>
public bool Force { get; set; }
/// <summary>
/// Redump username
/// </summary>
public string? Username { get; set; }
/// <summary>
/// Redump password
/// </summary>
public string? Password { get; set; }
#endregion
#region Private Vars
/// <summary>
/// Current HTTP rc to use
/// </summary>
private readonly RedumpClient _client;
#endregion
/// <summary>
/// Constructor
/// </summary>
public Downloader()
{
_client = new RedumpClient();
}
/// <summary>
/// Constructor
/// </summary>
/// <param name="client">Preconfigured client</param>
public Downloader(RedumpClient client)
{
_client = client;
}
/// <summary>
/// Run the downloads that should go
/// </summary>
/// <returns>List of IDs that were processed on success, empty on error</returns>
/// <remarks>Packs will never return anything other than empty</remarks>
public async Task<List<int>> Download()
{
// Login to Redump, if possible
if (!_client.LoggedIn)
await _client.Login(Username ?? string.Empty, Password ?? string.Empty);
// Create output list
List<int> processedIds = [];
switch (Feature)
{
case Feature.Packs:
await Packs.DownloadPacks(_client, OutDir, UseSubfolders);
break;
case Feature.Quicksearch:
processedIds = await ProcessQuicksearch();
break;
case Feature.Site:
processedIds = await ProcessSite();
break;
case Feature.User:
processedIds = await ProcessUser();
break;
case Feature.WIP:
processedIds = await ProcessWIP();
break;
default:
return [];
}
return processedIds;
}
/// <summary>
/// Process the Quicksearch feature
/// </summary>
private async Task<List<int>> ProcessQuicksearch()
{
if (OnlyList)
return await Search.ListSearchResults(_client, QueryString, NoSlash);
else
return await Search.DownloadSearchResults(_client, QueryString, OutDir, NoSlash);
}
/// <summary>
/// Process the Site feature
/// </summary>
private async Task<List<int>> ProcessSite()
{
if (OnlyNew)
return await Discs.DownloadLastModified(_client, OutDir, Force);
else
return await Discs.DownloadSiteRange(_client, OutDir, MinimumId, MaximumId);
}
/// <summary>
/// Process the User feature
/// </summary>
private async Task<List<int>> ProcessUser()
{
if (OnlyList)
return await User.ListUser(_client, Username);
else if (OnlyNew)
return await User.DownloadUserLastModified(_client, Username, OutDir);
else
return await User.DownloadUser(_client, Username, OutDir);
}
/// <summary>
/// Process the WIP feature
/// </summary>
private async Task<List<int>> ProcessWIP()
{
if (OnlyNew)
return await WIP.DownloadLastSubmitted(_client, OutDir);
else
return await WIP.DownloadWIPRange(_client, OutDir, MinimumId, MaximumId);
}
}
}

View File

@@ -0,0 +1,9 @@
#if NET20
namespace System.Runtime.CompilerServices
{
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Method)]
internal sealed class ExtensionAttribute : Attribute {}
}
#endif

View File

@@ -0,0 +1,781 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Text.RegularExpressions;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib
{
public static class Formatter
{
/// <summary>
/// Ordered set of comment codes for output
/// </summary>
internal static readonly SiteCode[] OrderedCommentCodes =
[
// Identifying Info
SiteCode.AlternativeTitle,
SiteCode.AlternativeForeignTitle,
SiteCode.InternalName,
SiteCode.InternalSerialName,
SiteCode.VolumeLabel,
SiteCode.Multisession,
SiteCode.UniversalHash,
SiteCode.RingNonZeroDataStart,
SiteCode.RingPerfectAudioOffset,
SiteCode.XMID,
SiteCode.XeMID,
SiteCode.DMIHash,
SiteCode.PFIHash,
SiteCode.SSHash,
SiteCode.SSVersion,
SiteCode.Filename,
SiteCode.Protection,
SiteCode.BBFCRegistrationNumber,
SiteCode.DiscHologramID,
SiteCode.DNASDiscID,
SiteCode.ISBN,
SiteCode.ISSN,
SiteCode.PPN,
SiteCode.VFCCode,
SiteCode.CompatibleOS,
SiteCode.Genre,
SiteCode.Series,
SiteCode.PostgapType,
SiteCode.VCD,
// Publisher / Company IDs
SiteCode.AcclaimID,
SiteCode.ActivisionID,
SiteCode.BandaiID,
SiteCode.BethesdaID,
SiteCode.CDProjektID,
SiteCode.EidosID,
SiteCode.ElectronicArtsID,
SiteCode.FoxInteractiveID,
SiteCode.GTInteractiveID,
SiteCode.JASRACID,
SiteCode.KingRecordsID,
SiteCode.KoeiID,
SiteCode.KonamiID,
SiteCode.LucasArtsID,
SiteCode.MicrosoftID,
SiteCode.NaganoID,
SiteCode.NamcoID,
SiteCode.NipponIchiSoftwareID,
SiteCode.OriginID,
SiteCode.PonyCanyonID,
SiteCode.SegaID,
SiteCode.SelenID,
SiteCode.SierraID,
SiteCode.TaitoID,
SiteCode.UbisoftID,
SiteCode.ValveID,
];
/// <summary>
/// Ordered set of content codes for output
/// </summary>
internal static readonly SiteCode[] OrderedContentCodes =
[
// Applications
SiteCode.Applications,
// Games
SiteCode.Games,
SiteCode.NetYarozeGames,
// Demos
SiteCode.PlayableDemos,
SiteCode.RollingDemos,
SiteCode.TechDemos,
// Video
SiteCode.GameFootage,
SiteCode.Videos,
// Miscellaneous
SiteCode.Patches,
SiteCode.Savegames,
SiteCode.Extras,
];
/// <summary>
/// Format the output data in a human readable way
/// </summary>
/// <param name="info">Information object that should contain normalized values</param>
/// <param name="enableRedumpCompatibility">True to enable Redump compatiblity, false otherwise</param>
/// <returns>String representing each line of an output file, null on error</returns>
public static string? FormatOutputData(SubmissionInfo? info, bool enableRedumpCompatibility, out string? status)
{
// Check to see if the inputs are valid
if (info == null)
{
status = "Submission information was missing";
return null;
}
try
{
// Create the string builder for output
var output = new StringBuilder();
// Preamble for submission
output.AppendLine("Users who wish to submit this information to Redump must ensure that all of the fields below are accurate for the exact media they have.");
output.AppendLine("Please double-check to ensure that there are no fields that need verification, such as the version or copy protection.");
output.AppendLine("If there are no fields in need of verification or all fields are accurate, this preamble can be removed before submission.");
output.AppendLine();
// Common Disc Info section
FormatOutputData(output,
info.CommonDiscInfo,
info.SizeAndChecksums,
info.TracksAndWriteOffsets,
info.FullyMatchedID,
info.PartiallyMatchedIDs);
output.AppendLine();
// Version and Editions section
FormatOutputData(output, info.VersionAndEditions);
output.AppendLine();
// EDC section
FormatOutputData(output, info.EDC, info.CommonDiscInfo?.System);
output.AppendLine();
// Extras section
FormatOutputData(output, info.Extras);
output.AppendLine();
// Copy Protection section
FormatOutputData(output, info.CopyProtection, info.CommonDiscInfo?.System);
output.AppendLine();
// Tracks and Write Offsets section
if (!string.IsNullOrEmpty(info.TracksAndWriteOffsets?.ClrMameProData))
{
FormatOutputData(output, info.TracksAndWriteOffsets!);
output.AppendLine();
}
// Size & Checksum section
else
{
FormatOutputData(output,
info.SizeAndChecksums,
info.CommonDiscInfo?.Media.ToMediaType(),
info.CommonDiscInfo?.System,
enableRedumpCompatibility);
output.AppendLine();
}
// Dumping Info section
FormatOutputData(output, info.DumpingInfo);
status = "Formatting complete!";
// Make sure there aren't any instances of two blank lines in a row
return RemoveConsecutiveEmptyLines(output.ToString());
}
catch (Exception ex)
{
status = $"Error formatting submission info: {ex}";
return null;
}
}
/// <summary>
/// Process any fields that have to be combined
/// </summary>
/// <param name="info">Information object to normalize</param>
public static void ProcessSpecialFields(SubmissionInfo info)
{
// If there is no submission info
if (info?.CommonDiscInfo == null)
return;
// Process the comments field
if (info.CommonDiscInfo.CommentsSpecialFields != null && info.CommonDiscInfo.CommentsSpecialFields.Count > 0)
{
// If the field is missing, add an empty one to fill in
info.CommonDiscInfo.Comments ??= string.Empty;
// Add all special fields before any comments
var orderedTags = OrderCommentTags(info.CommonDiscInfo.CommentsSpecialFields);
var formattedTags = Array.ConvertAll(orderedTags, kvp => FormatSiteTag(kvp.Key, kvp.Value));
info.CommonDiscInfo.Comments = string.Join("\n", formattedTags) + "\n" + info.CommonDiscInfo.Comments;
// Normalize the assembled string
info.CommonDiscInfo.Comments = info.CommonDiscInfo.Comments.Replace("\r\n", "\n");
info.CommonDiscInfo.Comments = info.CommonDiscInfo.Comments.Replace("\n\n", "\n");
info.CommonDiscInfo.Comments = info.CommonDiscInfo.Comments.Trim();
// Wipe out the special fields dictionary
info.CommonDiscInfo.CommentsSpecialFields = null;
}
// Process the contents field
if (info.CommonDiscInfo.ContentsSpecialFields != null && info.CommonDiscInfo.ContentsSpecialFields.Count > 0)
{
// If the field is missing, add an empty one to fill in
info.CommonDiscInfo.Contents ??= string.Empty;
// Add all special fields before any contents
var orderedTags = OrderContentTags(info.CommonDiscInfo.ContentsSpecialFields);
var formattedTags = Array.ConvertAll(orderedTags, kvp => FormatSiteTag(kvp.Key, kvp.Value));
info.CommonDiscInfo.Contents = string.Join("\n", formattedTags) + "\n" + info.CommonDiscInfo.Contents;
// Normalize the assembled string
info.CommonDiscInfo.Contents = info.CommonDiscInfo.Contents.Replace("\r\n", "\n");
info.CommonDiscInfo.Contents = info.CommonDiscInfo.Contents.Replace("\n\n", "\n");
info.CommonDiscInfo.Contents = info.CommonDiscInfo.Contents.Trim();
// Wipe out the special fields dictionary
info.CommonDiscInfo.ContentsSpecialFields = null;
}
}
/// <summary>
/// Format a CommonDiscInfoSection
/// </summary>
internal static void FormatOutputData(StringBuilder output,
CommonDiscInfoSection? section,
SizeAndChecksumsSection? sac,
TracksAndWriteOffsetsSection? tawo,
int? fullyMatchedID,
List<int>? partiallyMatchedIDs)
{
// Sony-printed discs have layers in the opposite order
var system = section?.System;
bool reverseOrder = system.HasReversedRingcodes();
output.AppendLine("Common Disc Info:");
AddIfExists(output, Template.TitleField, section?.Title, 1);
AddIfExists(output, Template.ForeignTitleField, section?.ForeignTitleNonLatin, 1);
AddIfExists(output, Template.DiscNumberField, section?.DiscNumberLetter, 1);
AddIfExists(output, Template.DiscTitleField, section?.DiscTitle, 1);
AddIfExists(output, Template.SystemField, section?.System.LongName(), 1);
AddIfExists(output, Template.MediaTypeField, GetFixedMediaType(
section?.Media.ToMediaType(),
sac?.PICIdentifier,
sac?.Size,
sac?.Layerbreak,
sac?.Layerbreak2,
sac?.Layerbreak3),
1);
AddIfExists(output, Template.CategoryField, section?.Category.LongName(), 1);
AddIfExists(output, Template.FullyMatchingIDField, fullyMatchedID?.ToString(), 1);
AddIfExists(output, Template.PartiallyMatchingIDsField, partiallyMatchedIDs, 1);
AddIfExists(output, Template.RegionField, section?.Region.LongName() ?? "SPACE! (CHANGE THIS)", 1);
AddIfExists(output, Template.LanguagesField,
Array.ConvertAll(section?.Languages ?? [null], l => l.LongName() ?? "ADD LANGUAGES HERE (ONLY IF YOU TESTED)"), 1);
AddIfExists(output, Template.PlaystationLanguageSelectionViaField,
Array.ConvertAll(section?.LanguageSelection ?? [], l => l.LongName()), 1);
AddIfExists(output, Template.DiscSerialField, section?.Serial, 1);
output.AppendLine();
// All ringcode information goes in an indented area
output.AppendLine("\tRingcode Information:");
output.AppendLine();
// If we have a triple-layer disc
if (sac?.Layerbreak3 != default && sac?.Layerbreak3 != default(long))
{
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringRingField, section?.Layer0MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringSIDField, section?.Layer0MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.ToolstampField, section?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, section?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, section?.Layer0AdditionalMould, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringRingField, section?.Layer1MasteringRing, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringSIDField, section?.Layer1MasteringSID, 0);
AddIfExists(output, "Layer 1 " + Template.ToolstampField, section?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, section?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, section?.Layer1AdditionalMould, 0);
AddIfExists(output, "Layer 2 " + Template.MasteringRingField, section?.Layer2MasteringRing, 0);
AddIfExists(output, "Layer 2 " + Template.MasteringSIDField, section?.Layer2MasteringSID, 0);
AddIfExists(output, "Layer 2 " + Template.ToolstampField, section?.Layer2ToolstampMasteringCode, 0);
AddIfExists(output, (reverseOrder ? "Layer 3 (Inner) " : "Layer 3 (Outer) ") + Template.MasteringRingField, section?.Layer3MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 3 (Inner) " : "Layer 3 (Outer) ") + Template.MasteringSIDField, section?.Layer3MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 3 (Inner) " : "Layer 3 (Outer) ") + Template.ToolstampField, section?.Layer3ToolstampMasteringCode, 0);
}
// If we have a triple-layer disc
else if (sac?.Layerbreak2 != default && sac?.Layerbreak2 != default(long))
{
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringRingField, section?.Layer0MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringSIDField, section?.Layer0MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.ToolstampField, section?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, section?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, section?.Layer0AdditionalMould, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringRingField, section?.Layer1MasteringRing, 0);
AddIfExists(output, "Layer 1 " + Template.MasteringSIDField, section?.Layer1MasteringSID, 0);
AddIfExists(output, "Layer 1 " + Template.ToolstampField, section?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, section?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, section?.Layer1AdditionalMould, 0);
AddIfExists(output, (reverseOrder ? "Layer 2 (Inner) " : "Layer 2 (Outer) ") + Template.MasteringRingField, section?.Layer2MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 2 (Inner) " : "Layer 2 (Outer) ") + Template.MasteringSIDField, section?.Layer2MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 2 (Inner) " : "Layer 2 (Outer) ") + Template.ToolstampField, section?.Layer2ToolstampMasteringCode, 0);
}
// If we have a dual-layer disc
else if (sac?.Layerbreak != default && sac?.Layerbreak != default(long))
{
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringRingField, section?.Layer0MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.MasteringSIDField, section?.Layer0MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 0 (Outer) " : "Layer 0 (Inner) ") + Template.ToolstampField, section?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, section?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, section?.Layer0AdditionalMould, 0);
AddIfExists(output, (reverseOrder ? "Layer 1 (Inner) " : "Layer 1 (Outer) ") + Template.MasteringRingField, section?.Layer1MasteringRing, 0);
AddIfExists(output, (reverseOrder ? "Layer 1 (Inner) " : "Layer 1 (Outer) ") + Template.MasteringSIDField, section?.Layer1MasteringSID, 0);
AddIfExists(output, (reverseOrder ? "Layer 1 (Inner) " : "Layer 1 (Outer) ") + Template.ToolstampField, section?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, section?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, section?.Layer1AdditionalMould, 0);
}
// If we have a single-layer disc
else
{
AddIfExists(output, "Data Side " + Template.MasteringRingField, section?.Layer0MasteringRing, 0);
AddIfExists(output, "Data Side " + Template.MasteringSIDField, section?.Layer0MasteringSID, 0);
AddIfExists(output, "Data Side " + Template.ToolstampField, section?.Layer0ToolstampMasteringCode, 0);
AddIfExists(output, "Data Side " + Template.MouldSIDField, section?.Layer0MouldSID, 0);
AddIfExists(output, "Data Side " + Template.AdditionalMouldField, section?.Layer0AdditionalMould, 0);
AddIfExists(output, "Label Side " + Template.MasteringRingField, section?.Layer1MasteringRing, 0);
AddIfExists(output, "Label Side " + Template.MasteringSIDField, section?.Layer1MasteringSID, 0);
AddIfExists(output, "Label Side " + Template.ToolstampField, section?.Layer1ToolstampMasteringCode, 0);
AddIfExists(output, "Label Side " + Template.MouldSIDField, section?.Layer1MouldSID, 0);
AddIfExists(output, "Label Side " + Template.AdditionalMouldField, section?.Layer1AdditionalMould, 0);
}
var offset = tawo?.OtherWriteOffsets;
if (int.TryParse(offset, out int i))
offset = i.ToString("+#;-#;0");
AddIfExists(output, Template.WriteOffsetField, offset, 0);
output.AppendLine();
AddIfExists(output, Template.BarcodeField, section?.Barcode, 1);
AddIfExists(output, Template.EXEDateBuildDate, section?.EXEDateBuildDate, 1);
AddIfExists(output, Template.ErrorCountField, section?.ErrorsCount, 1);
AddIfExists(output, Template.CommentsField, section?.Comments?.Trim(), 1);
AddIfExists(output, Template.ContentsField, section?.Contents?.Trim(), 1);
}
/// <summary>
/// Format a VersionAndEditionsSection
/// </summary>
internal static void FormatOutputData(StringBuilder output, VersionAndEditionsSection? section)
{
output.AppendLine("Version and Editions:");
AddIfExists(output, Template.VersionField, section?.Version, 1);
AddIfExists(output, Template.EditionField, section?.OtherEditions, 1);
}
/// <summary>
/// Format a EDCSection
/// </summary>
internal static void FormatOutputData(StringBuilder output, EDCSection? section, RedumpSystem? system)
{
// Check the section can be added
if (system != RedumpSystem.SonyPlayStation)
return;
output.AppendLine("EDC:");
AddIfExists(output, Template.PlayStationEDCField, section?.EDC.LongName(), 1);
}
/// <summary>
/// Format a ExtrasSection
/// </summary>
internal static void FormatOutputData(StringBuilder output, ExtrasSection? section)
{
// Optional sections have to exist to format
if (section == null)
return;
// Check the section can be added
if (section.PVD == null
&& section.PIC == null
&& section.BCA == null
&& section.SecuritySectorRanges == null)
{
return;
}
output.AppendLine("Extras:");
AddIfExists(output, Template.PVDField, section.PVD?.Trim(), 1);
AddIfExists(output, Template.PlayStation3WiiDiscKeyField, section.DiscKey, 1);
AddIfExists(output, Template.PlayStation3DiscIDField, section.DiscID, 1);
AddIfExists(output, Template.PICField, section.PIC, 1);
AddIfExists(output, Template.HeaderField, section.Header, 1);
AddIfExists(output, Template.GameCubeWiiBCAField, section.BCA, 1);
AddIfExists(output, Template.XBOXSSRanges, section.SecuritySectorRanges, 1);
}
/// <summary>
/// Format a ExtrasSection
/// </summary>
internal static void FormatOutputData(StringBuilder output,
CopyProtectionSection? section,
RedumpSystem? system)
{
// Optional sections have to exist to format
if (section == null)
return;
// Check the section can be added
if (string.IsNullOrEmpty(section.Protection)
&& (section.AntiModchip == null || section.AntiModchip == YesNo.NULL)
&& (section.LibCrypt == null || section.LibCrypt == YesNo.NULL)
&& string.IsNullOrEmpty(section.LibCryptData)
&& string.IsNullOrEmpty(section.SecuROMData))
{
return;
}
output.AppendLine("Copy Protection:");
if (system == RedumpSystem.SonyPlayStation)
{
AddIfExists(output, Template.PlayStationAntiModchipField, section.AntiModchip.LongName(), 1);
AddIfExists(output, Template.PlayStationLibCryptField, section.LibCrypt.LongName(), 1);
AddIfExists(output, Template.SubIntentionField, section.LibCryptData, 1);
}
AddIfExists(output, Template.CopyProtectionField, section.Protection, 1);
AddIfExists(output, Template.SubIntentionField, section.SecuROMData, 1);
}
/// <summary>
/// Format a TracksAndWriteOffsetsSection
/// </summary>
internal static void FormatOutputData(StringBuilder output, TracksAndWriteOffsetsSection section)
{
output.AppendLine("Tracks and Write Offsets:");
AddIfExists(output, Template.DATField, section.ClrMameProData + "\n", 1);
AddIfExists(output, Template.CuesheetField, section.Cuesheet, 1);
var offset = section.OtherWriteOffsets;
if (int.TryParse(offset, out int i))
offset = i.ToString("+#;-#;0");
AddIfExists(output, Template.WriteOffsetField, offset, 1);
}
/// <summary>
/// Format a SizeAndChecksumsSection
/// </summary>
internal static void FormatOutputData(StringBuilder output,
SizeAndChecksumsSection? section,
MediaType? mediaType,
RedumpSystem? system,
bool enableRedumpCompatibility)
{
output.AppendLine("Size & Checksum:");
// Gross hack because of automatic layerbreaks in Redump
if (!enableRedumpCompatibility
|| (mediaType != MediaType.BluRay && system.IsXGD() == false))
{
AddIfExists(output, Template.LayerbreakField, section?.Layerbreak, 1);
}
AddIfExists(output, Template.SizeField, section?.Size.ToString(), 1);
AddIfExists(output, Template.CRC32Field, section?.CRC32, 1);
AddIfExists(output, Template.MD5Field, section?.MD5, 1);
AddIfExists(output, Template.SHA1Field, section?.SHA1, 1);
}
/// <summary>
/// Format a DumpingInfoSection
/// </summary>
internal static void FormatOutputData(StringBuilder output, DumpingInfoSection? section)
{
output.AppendLine("Dumping Info:");
AddIfExists(output, Template.FrontendVersionField, section?.FrontendVersion, 1);
AddIfExists(output, Template.DumpingProgramField, section?.DumpingProgram, 1);
AddIfExists(output, Template.DumpingDateField, section?.DumpingDate, 1);
AddIfExists(output, Template.DumpingParametersField, section?.DumpingParameters, 1);
AddIfExists(output, Template.DumpingDriveManufacturer, section?.Manufacturer, 1);
AddIfExists(output, Template.DumpingDriveModel, section?.Model, 1);
AddIfExists(output, Template.DumpingDriveFirmware, section?.Firmware, 1);
AddIfExists(output, Template.ReportedDiscType, section?.ReportedDiscType, 1);
AddIfExists(output, Template.C2ErrorCountField, section?.C2ErrorsCount, 1);
}
#region Helpers
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">String builder representing the output</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(StringBuilder output, string key, string? value, int indent)
{
// If there's no valid value to write
if (value == null)
return;
string prefix = string.Empty;
for (int i = 0; i < indent; i++)
prefix += "\t";
// Skip fields that need to keep internal whitespace intact
if (key != "Primary Volume Descriptor (PVD)"
&& key != "Header"
&& key != "Cuesheet")
{
// Convert to tabs
#if NETCOREAPP
value = value.Replace("<tab>", "\t", StringComparison.OrdinalIgnoreCase);
#else
value = value.Replace("<tab>", "\t");
value = value.Replace("<TAB>", "\t");
value = value.Replace("<Tab>", "\t");
#endif
value = value.Replace(" ", "\t");
// Sanitize whitespace around tabs
value = Regex.Replace(value, @"\s*\t\s*", "\t", RegexOptions.Compiled);
}
// If the value contains a newline
value = value.Replace("\r\n", "\n");
if (value.Contains("\n"))
{
output.AppendLine(prefix + key + ":"); output.AppendLine();
string[] values = value.Split('\n');
foreach (string val in values)
output.AppendLine(val);
output.AppendLine();
}
// For all regular values
else
{
output.AppendLine(prefix + key + ": " + value);
}
}
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">String builder representing the output</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(StringBuilder output, string key, string?[]? value, int indent)
{
// If there's no valid value to write
if (value == null || value.Length == 0)
return;
AddIfExists(output, key, string.Join(", ", value), indent);
}
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">String builder representing the output</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(StringBuilder output, string key, long? value, int indent)
{
// If there's no valid value to write
if (value == null || value == default(long))
return;
string prefix = string.Empty;
for (int i = 0; i < indent; i++)
prefix += "\t";
output.AppendLine(prefix + key + ": " + value);
}
/// <summary>
/// Add the properly formatted key and value, if possible
/// </summary>
/// <param name="output">String builder representing the output</param>
/// <param name="key">Name of the output key to write</param>
/// <param name="value">Name of the output value to write</param>
/// <param name="indent">Number of tabs to indent the line</param>
private static void AddIfExists(StringBuilder output, string key, List<int>? value, int indent)
{
// If there's no valid value to write
if (value == null || value.Count == 0)
return;
AddIfExists(output, key, string.Join(", ", [.. value.ConvertAll(o => o.ToString())]), indent);
}
/// <summary>
/// Format a single site tag to string
/// </summary>
/// <param name="code">Site tag to format</param>
/// <param name="value">String value to use</param>
/// <returns>String-formatted tag and value</returns>
internal static string FormatSiteTag(SiteCode code, string value)
{
// Do not format empty tags
if (value.Length == 0)
return string.Empty;
bool isMultiLine = code.IsMultiLine();
string line = $"{code.ShortName()}{(isMultiLine ? "\n" : " ")}";
// Special case for boolean fields
if (code.IsBoolean())
{
if (value != true.ToString())
return string.Empty;
return line.Trim();
}
return $"{line}{value}{(isMultiLine ? "\n" : string.Empty)}";
}
/// <summary>
/// Get the adjusted name of the media based on layers, if applicable
/// </summary>
/// <param name="mediaType">MediaType to get the proper name for</param>
/// <param name="picIdentifier">PIC identifier string (BD only)</param>
/// <param name="size">Size of the current media</param>
/// <param name="layerbreak">First layerbreak value, as applicable</param>
/// <param name="layerbreak2">Second layerbreak value, as applicable</param>
/// <param name="layerbreak3">Third layerbreak value, as applicable</param>
/// <returns>String representation of the media, including layer specification</returns>
internal static string? GetFixedMediaType(MediaType? mediaType, string? picIdentifier, long? size, long? layerbreak, long? layerbreak2, long? layerbreak3)
{
switch (mediaType)
{
case MediaType.DVD:
if (layerbreak != default && layerbreak != default(long))
return $"{mediaType.LongName()}-9";
else
return $"{mediaType.LongName()}-5";
case MediaType.BluRay:
if (layerbreak3 != default && layerbreak3 != default(long))
return $"{mediaType.LongName()}-128";
else if (layerbreak2 != default && layerbreak2 != default(long))
return $"{mediaType.LongName()}-100";
else if (layerbreak != default && layerbreak != default(long) && picIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
return $"{mediaType.LongName()}-66";
else if (layerbreak != default && layerbreak != default(long) && size > 53_687_063_712)
return $"{mediaType.LongName()}-66";
else if (layerbreak != default && layerbreak != default(long))
return $"{mediaType.LongName()}-50";
else if (picIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
return $"{mediaType.LongName()}-33";
else if (size > 26_843_531_856)
return $"{mediaType.LongName()}-33";
else
return $"{mediaType.LongName()}-25";
case MediaType.HDDVD:
if (layerbreak != default && layerbreak != default(long))
return $"{mediaType.LongName()}-DL";
else
return $"{mediaType.LongName()}-SL";
case MediaType.UMD:
if (layerbreak != default && layerbreak != default(long))
return $"{mediaType.LongName()}-DL";
else
return $"{mediaType.LongName()}-SL";
default:
return mediaType.LongName();
}
}
/// <summary>
/// Order comment code tags according to Redump requirements
/// </summary>
/// <returns>Ordered list of KeyValuePairs representing the tags and values</returns>
internal static KeyValuePair<SiteCode, string>[] OrderCommentTags(Dictionary<SiteCode, string> tags)
{
// If the input is invalid, just return an empty set
if (tags == null || tags.Count == 0)
return [];
// Loop through the ordered set of codes and add if needed
var sorted = new List<KeyValuePair<SiteCode, string>>();
foreach (var code in OrderedCommentCodes)
{
// Only add if it exists
if (!tags.ContainsKey(code))
continue;
// Get the tag value
string value = tags[code];
if (value.Length == 0)
continue;
// Add to the set
sorted.Add(new KeyValuePair<SiteCode, string>(code, value));
}
return [.. sorted];
}
/// <summary>
/// Order content code tags according to Redump requirements
/// </summary>
/// <returns>Ordered list of KeyValuePairs representing the tags and values</returns>
internal static KeyValuePair<SiteCode, string>[] OrderContentTags(Dictionary<SiteCode, string> tags)
{
// If the input is invalid, just return an empty set
if (tags == null || tags.Count == 0)
return [];
// Loop through the ordered set of codes and add if needed
var sorted = new List<KeyValuePair<SiteCode, string>>();
foreach (var code in OrderedContentCodes)
{
// Only add if it exists
if (!tags.ContainsKey(code))
continue;
// Get the tag value
string value = tags[code];
if (value.Length == 0)
continue;
// Add to the set
sorted.Add(new KeyValuePair<SiteCode, string>(code, value));
}
return [.. sorted];
}
/// <summary>
/// Make sure there aren't any instances of two blank lines in a row
/// </summary>
internal static string RemoveConsecutiveEmptyLines(string str)
{
str = Regex.Replace(str, @"(\r\n){2,}", "\r\n\r\n");
return Regex.Replace(str, @"(\n){2,}", "\n\n");
}
#endregion
}
}

View File

@@ -0,0 +1,474 @@
#if NET20 || NET35
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Text;
namespace SabreTools.RedumpLib
{
/// <see href="https://github.com/dotnet/runtime/blob/main/src/libraries/System.Private.CoreLib/src/System/Net/WebUtility.cs"/>
internal static class WebUtility
{
// some consts copied from Char / CharUnicodeInfo since we don't have friend access to those types
private const char HIGH_SURROGATE_START = '\uD800';
private const char LOW_SURROGATE_START = '\uDC00';
private const char LOW_SURROGATE_END = '\uDFFF';
private const int UNICODE_PLANE00_END = 0x00FFFF;
private const int UNICODE_PLANE01_START = 0x10000;
private const int UNICODE_PLANE16_END = 0x10FFFF;
public static string? HtmlDecode(string? value)
{
if (string.IsNullOrEmpty(value))
{
return value;
}
char[] valueSpan = value!.ToCharArray();
int index = Array.IndexOf(valueSpan, '&');
if (index < 0)
{
return value;
}
// In the worst case the decoded string has the same length.
// For small inputs we use stack allocation.
StringBuilder sb = value.Length <= 256 ?
new StringBuilder(256) :
new StringBuilder(value.Length);
char[] take = new char[index];
Array.Copy(valueSpan, take, index);
sb.Append(take);
char[] skip = new char[valueSpan.Length - index];
Array.Copy(valueSpan, index, skip, 0, skip.Length);
HtmlDecode(skip, ref sb);
return sb.ToString();
}
private static void HtmlDecode(char[] input, ref StringBuilder output)
{
for (int i = 0; i < input.Length; i++)
{
char ch = input[i];
if (ch == '&')
{
// We found a '&'. Now look for the next ';' or '&'. The idea is that
// if we find another '&' before finding a ';', then this is not an entity,
// and the next '&' might start a real entity (VSWhidbey 275184)
char[] inputSlice = new char[input.Length - (i + 1)];
Array.Copy(input, i + 1, inputSlice, 0, inputSlice.Length);
int semicolonPos = Array.IndexOf(inputSlice, ';');
int ampersandPos = Array.IndexOf(inputSlice, '&');
int entityLength;
if (semicolonPos > -1 && ampersandPos > -1)
entityLength = Math.Min(semicolonPos, ampersandPos);
else if (semicolonPos <= -1 && ampersandPos > -1)
entityLength = ampersandPos;
else if (semicolonPos > -1 && ampersandPos <= -1)
entityLength = semicolonPos;
else
entityLength = -1;
if (entityLength >= 0 && inputSlice[entityLength] == ';')
{
int entityEndPosition = (i + 1) + entityLength;
if (entityLength > 1 && inputSlice[0] == '#')
{
// The # syntax can be in decimal or hex, e.g.
// &#229; --> decimal
// &#xE5; --> same char in hex
// See http://www.w3.org/TR/REC-html40/charset.html#entities
int offset = inputSlice[1] == 'x' || inputSlice[1] == 'X' ? 2 : 1;
char[] inputSliceNoPrefix = new char[entityLength - offset];
Array.Copy(inputSlice, offset, inputSliceNoPrefix, 0, inputSliceNoPrefix.Length);
bool parsedSuccessfully = inputSlice[1] == 'x' || inputSlice[1] == 'X'
? uint.TryParse(new string(inputSliceNoPrefix), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out uint parsedValue)
: uint.TryParse(new string(inputSliceNoPrefix), NumberStyles.Integer, CultureInfo.InvariantCulture, out parsedValue);
if (parsedSuccessfully)
{
// decoded character must be U+0000 .. U+10FFFF, excluding surrogates
parsedSuccessfully = ((parsedValue < HIGH_SURROGATE_START) || (LOW_SURROGATE_END < parsedValue && parsedValue <= UNICODE_PLANE16_END));
}
if (parsedSuccessfully)
{
if (parsedValue <= UNICODE_PLANE00_END)
{
// single character
output.Append((char)parsedValue);
}
else
{
// multi-character
ConvertSmpToUtf16(parsedValue, out char leadingSurrogate, out char trailingSurrogate);
output.Append(leadingSurrogate);
output.Append(trailingSurrogate);
}
i = entityEndPosition; // already looked at everything until semicolon
continue;
}
}
else
{
char[] entity = new char[entityLength];
Array.Copy(inputSlice, entity, entityLength);
i = entityEndPosition; // already looked at everything until semicolon
char entityChar = HtmlEntities.Lookup(entity);
if (entityChar != (char)0)
{
ch = entityChar;
}
else
{
output.Append('&');
output.Append(entity);
output.Append(';');
continue;
}
}
}
}
output.Append(ch);
}
}
// similar to Char.ConvertFromUtf32, but doesn't check arguments or generate strings
// input is assumed to be an SMP character
private static void ConvertSmpToUtf16(uint smpChar, out char leadingSurrogate, out char trailingSurrogate)
{
int utf32 = (int)(smpChar - UNICODE_PLANE01_START);
leadingSurrogate = (char)((utf32 / 0x400) + HIGH_SURROGATE_START);
trailingSurrogate = (char)((utf32 % 0x400) + LOW_SURROGATE_START);
}
// helper class for lookup of HTML encoding entities
private static class HtmlEntities
{
// The list is from http://www.w3.org/TR/REC-html40/sgml/entities.html, except for &apos;, which
// is defined in http://www.w3.org/TR/2008/REC-xml-20081126/#sec-predefined-ent.
private static Dictionary<ulong, char> InitializeLookupTable()
{
byte[] tableData =
[
0x74, 0x6F, 0x75, 0x71, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("quot")*/ 0x22, 0x00, /*'\x0022'*/
0x70, 0x6D, 0x61, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("amp")*/ 0x26, 0x00, /*'\x0026'*/
0x73, 0x6F, 0x70, 0x61, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("apos")*/ 0x27, 0x00, /*'\x0027'*/
0x74, 0x6C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("lt")*/ 0x3C, 0x00, /*'\x003c'*/
0x74, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("gt")*/ 0x3E, 0x00, /*'\x003e'*/
0x70, 0x73, 0x62, 0x6E, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("nbsp")*/ 0xA0, 0x00, /*'\x00a0'*/
0x6C, 0x63, 0x78, 0x65, 0x69, 0x00, 0x00, 0x00, /*ToUInt64Key("iexcl")*/ 0xA1, 0x00, /*'\x00a1'*/
0x74, 0x6E, 0x65, 0x63, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("cent")*/ 0xA2, 0x00, /*'\x00a2'*/
0x64, 0x6E, 0x75, 0x6F, 0x70, 0x00, 0x00, 0x00, /*ToUInt64Key("pound")*/ 0xA3, 0x00, /*'\x00a3'*/
0x6E, 0x65, 0x72, 0x72, 0x75, 0x63, 0x00, 0x00, /*ToUInt64Key("curren")*/ 0xA4, 0x00, /*'\x00a4'*/
0x6E, 0x65, 0x79, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("yen")*/ 0xA5, 0x00, /*'\x00a5'*/
0x72, 0x61, 0x62, 0x76, 0x72, 0x62, 0x00, 0x00, /*ToUInt64Key("brvbar")*/ 0xA6, 0x00, /*'\x00a6'*/
0x74, 0x63, 0x65, 0x73, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("sect")*/ 0xA7, 0x00, /*'\x00a7'*/
0x6C, 0x6D, 0x75, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("uml")*/ 0xA8, 0x00, /*'\x00a8'*/
0x79, 0x70, 0x6F, 0x63, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("copy")*/ 0xA9, 0x00, /*'\x00a9'*/
0x66, 0x64, 0x72, 0x6F, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("ordf")*/ 0xAA, 0x00, /*'\x00aa'*/
0x6F, 0x75, 0x71, 0x61, 0x6C, 0x00, 0x00, 0x00, /*ToUInt64Key("laquo")*/ 0xAB, 0x00, /*'\x00ab'*/
0x74, 0x6F, 0x6E, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("not")*/ 0xAC, 0x00, /*'\x00ac'*/
0x79, 0x68, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("shy")*/ 0xAD, 0x00, /*'\x00ad'*/
0x67, 0x65, 0x72, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("reg")*/ 0xAE, 0x00, /*'\x00ae'*/
0x72, 0x63, 0x61, 0x6D, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("macr")*/ 0xAF, 0x00, /*'\x00af'*/
0x67, 0x65, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("deg")*/ 0xB0, 0x00, /*'\x00b0'*/
0x6E, 0x6D, 0x73, 0x75, 0x6C, 0x70, 0x00, 0x00, /*ToUInt64Key("plusmn")*/ 0xB1, 0x00, /*'\x00b1'*/
0x32, 0x70, 0x75, 0x73, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("sup2")*/ 0xB2, 0x00, /*'\x00b2'*/
0x33, 0x70, 0x75, 0x73, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("sup3")*/ 0xB3, 0x00, /*'\x00b3'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x00, 0x00, 0x00, /*ToUInt64Key("acute")*/ 0xB4, 0x00, /*'\x00b4'*/
0x6F, 0x72, 0x63, 0x69, 0x6D, 0x00, 0x00, 0x00, /*ToUInt64Key("micro")*/ 0xB5, 0x00, /*'\x00b5'*/
0x61, 0x72, 0x61, 0x70, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("para")*/ 0xB6, 0x00, /*'\x00b6'*/
0x74, 0x6F, 0x64, 0x64, 0x69, 0x6D, 0x00, 0x00, /*ToUInt64Key("middot")*/ 0xB7, 0x00, /*'\x00b7'*/
0x6C, 0x69, 0x64, 0x65, 0x63, 0x00, 0x00, 0x00, /*ToUInt64Key("cedil")*/ 0xB8, 0x00, /*'\x00b8'*/
0x31, 0x70, 0x75, 0x73, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("sup1")*/ 0xB9, 0x00, /*'\x00b9'*/
0x6D, 0x64, 0x72, 0x6F, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("ordm")*/ 0xBA, 0x00, /*'\x00ba'*/
0x6F, 0x75, 0x71, 0x61, 0x72, 0x00, 0x00, 0x00, /*ToUInt64Key("raquo")*/ 0xBB, 0x00, /*'\x00bb'*/
0x34, 0x31, 0x63, 0x61, 0x72, 0x66, 0x00, 0x00, /*ToUInt64Key("frac14")*/ 0xBC, 0x00, /*'\x00bc'*/
0x32, 0x31, 0x63, 0x61, 0x72, 0x66, 0x00, 0x00, /*ToUInt64Key("frac12")*/ 0xBD, 0x00, /*'\x00bd'*/
0x34, 0x33, 0x63, 0x61, 0x72, 0x66, 0x00, 0x00, /*ToUInt64Key("frac34")*/ 0xBE, 0x00, /*'\x00be'*/
0x74, 0x73, 0x65, 0x75, 0x71, 0x69, 0x00, 0x00, /*ToUInt64Key("iquest")*/ 0xBF, 0x00, /*'\x00bf'*/
0x65, 0x76, 0x61, 0x72, 0x67, 0x41, 0x00, 0x00, /*ToUInt64Key("Agrave")*/ 0xC0, 0x00, /*'\x00c0'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x41, 0x00, 0x00, /*ToUInt64Key("Aacute")*/ 0xC1, 0x00, /*'\x00c1'*/
0x63, 0x72, 0x69, 0x63, 0x41, 0x00, 0x00, 0x00, /*ToUInt64Key("Acirc")*/ 0xC2, 0x00, /*'\x00c2'*/
0x65, 0x64, 0x6C, 0x69, 0x74, 0x41, 0x00, 0x00, /*ToUInt64Key("Atilde")*/ 0xC3, 0x00, /*'\x00c3'*/
0x6C, 0x6D, 0x75, 0x41, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Auml")*/ 0xC4, 0x00, /*'\x00c4'*/
0x67, 0x6E, 0x69, 0x72, 0x41, 0x00, 0x00, 0x00, /*ToUInt64Key("Aring")*/ 0xC5, 0x00, /*'\x00c5'*/
0x67, 0x69, 0x6C, 0x45, 0x41, 0x00, 0x00, 0x00, /*ToUInt64Key("AElig")*/ 0xC6, 0x00, /*'\x00c6'*/
0x6C, 0x69, 0x64, 0x65, 0x63, 0x43, 0x00, 0x00, /*ToUInt64Key("Ccedil")*/ 0xC7, 0x00, /*'\x00c7'*/
0x65, 0x76, 0x61, 0x72, 0x67, 0x45, 0x00, 0x00, /*ToUInt64Key("Egrave")*/ 0xC8, 0x00, /*'\x00c8'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x45, 0x00, 0x00, /*ToUInt64Key("Eacute")*/ 0xC9, 0x00, /*'\x00c9'*/
0x63, 0x72, 0x69, 0x63, 0x45, 0x00, 0x00, 0x00, /*ToUInt64Key("Ecirc")*/ 0xCA, 0x00, /*'\x00ca'*/
0x6C, 0x6D, 0x75, 0x45, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Euml")*/ 0xCB, 0x00, /*'\x00cb'*/
0x65, 0x76, 0x61, 0x72, 0x67, 0x49, 0x00, 0x00, /*ToUInt64Key("Igrave")*/ 0xCC, 0x00, /*'\x00cc'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x49, 0x00, 0x00, /*ToUInt64Key("Iacute")*/ 0xCD, 0x00, /*'\x00cd'*/
0x63, 0x72, 0x69, 0x63, 0x49, 0x00, 0x00, 0x00, /*ToUInt64Key("Icirc")*/ 0xCE, 0x00, /*'\x00ce'*/
0x6C, 0x6D, 0x75, 0x49, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Iuml")*/ 0xCF, 0x00, /*'\x00cf'*/
0x48, 0x54, 0x45, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("ETH")*/ 0xD0, 0x00, /*'\x00d0'*/
0x65, 0x64, 0x6C, 0x69, 0x74, 0x4E, 0x00, 0x00, /*ToUInt64Key("Ntilde")*/ 0xD1, 0x00, /*'\x00d1'*/
0x65, 0x76, 0x61, 0x72, 0x67, 0x4F, 0x00, 0x00, /*ToUInt64Key("Ograve")*/ 0xD2, 0x00, /*'\x00d2'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x4F, 0x00, 0x00, /*ToUInt64Key("Oacute")*/ 0xD3, 0x00, /*'\x00d3'*/
0x63, 0x72, 0x69, 0x63, 0x4F, 0x00, 0x00, 0x00, /*ToUInt64Key("Ocirc")*/ 0xD4, 0x00, /*'\x00d4'*/
0x65, 0x64, 0x6C, 0x69, 0x74, 0x4F, 0x00, 0x00, /*ToUInt64Key("Otilde")*/ 0xD5, 0x00, /*'\x00d5'*/
0x6C, 0x6D, 0x75, 0x4F, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Ouml")*/ 0xD6, 0x00, /*'\x00d6'*/
0x73, 0x65, 0x6D, 0x69, 0x74, 0x00, 0x00, 0x00, /*ToUInt64Key("times")*/ 0xD7, 0x00, /*'\x00d7'*/
0x68, 0x73, 0x61, 0x6C, 0x73, 0x4F, 0x00, 0x00, /*ToUInt64Key("Oslash")*/ 0xD8, 0x00, /*'\x00d8'*/
0x65, 0x76, 0x61, 0x72, 0x67, 0x55, 0x00, 0x00, /*ToUInt64Key("Ugrave")*/ 0xD9, 0x00, /*'\x00d9'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x55, 0x00, 0x00, /*ToUInt64Key("Uacute")*/ 0xDA, 0x00, /*'\x00da'*/
0x63, 0x72, 0x69, 0x63, 0x55, 0x00, 0x00, 0x00, /*ToUInt64Key("Ucirc")*/ 0xDB, 0x00, /*'\x00db'*/
0x6C, 0x6D, 0x75, 0x55, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Uuml")*/ 0xDC, 0x00, /*'\x00dc'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x59, 0x00, 0x00, /*ToUInt64Key("Yacute")*/ 0xDD, 0x00, /*'\x00dd'*/
0x4E, 0x52, 0x4F, 0x48, 0x54, 0x00, 0x00, 0x00, /*ToUInt64Key("THORN")*/ 0xDE, 0x00, /*'\x00de'*/
0x67, 0x69, 0x6C, 0x7A, 0x73, 0x00, 0x00, 0x00, /*ToUInt64Key("szlig")*/ 0xDF, 0x00, /*'\x00df'*/
0x65, 0x76, 0x61, 0x72, 0x67, 0x61, 0x00, 0x00, /*ToUInt64Key("agrave")*/ 0xE0, 0x00, /*'\x00e0'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x61, 0x00, 0x00, /*ToUInt64Key("aacute")*/ 0xE1, 0x00, /*'\x00e1'*/
0x63, 0x72, 0x69, 0x63, 0x61, 0x00, 0x00, 0x00, /*ToUInt64Key("acirc")*/ 0xE2, 0x00, /*'\x00e2'*/
0x65, 0x64, 0x6C, 0x69, 0x74, 0x61, 0x00, 0x00, /*ToUInt64Key("atilde")*/ 0xE3, 0x00, /*'\x00e3'*/
0x6C, 0x6D, 0x75, 0x61, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("auml")*/ 0xE4, 0x00, /*'\x00e4'*/
0x67, 0x6E, 0x69, 0x72, 0x61, 0x00, 0x00, 0x00, /*ToUInt64Key("aring")*/ 0xE5, 0x00, /*'\x00e5'*/
0x67, 0x69, 0x6C, 0x65, 0x61, 0x00, 0x00, 0x00, /*ToUInt64Key("aelig")*/ 0xE6, 0x00, /*'\x00e6'*/
0x6C, 0x69, 0x64, 0x65, 0x63, 0x63, 0x00, 0x00, /*ToUInt64Key("ccedil")*/ 0xE7, 0x00, /*'\x00e7'*/
0x65, 0x76, 0x61, 0x72, 0x67, 0x65, 0x00, 0x00, /*ToUInt64Key("egrave")*/ 0xE8, 0x00, /*'\x00e8'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x65, 0x00, 0x00, /*ToUInt64Key("eacute")*/ 0xE9, 0x00, /*'\x00e9'*/
0x63, 0x72, 0x69, 0x63, 0x65, 0x00, 0x00, 0x00, /*ToUInt64Key("ecirc")*/ 0xEA, 0x00, /*'\x00ea'*/
0x6C, 0x6D, 0x75, 0x65, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("euml")*/ 0xEB, 0x00, /*'\x00eb'*/
0x65, 0x76, 0x61, 0x72, 0x67, 0x69, 0x00, 0x00, /*ToUInt64Key("igrave")*/ 0xEC, 0x00, /*'\x00ec'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x69, 0x00, 0x00, /*ToUInt64Key("iacute")*/ 0xED, 0x00, /*'\x00ed'*/
0x63, 0x72, 0x69, 0x63, 0x69, 0x00, 0x00, 0x00, /*ToUInt64Key("icirc")*/ 0xEE, 0x00, /*'\x00ee'*/
0x6C, 0x6D, 0x75, 0x69, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("iuml")*/ 0xEF, 0x00, /*'\x00ef'*/
0x68, 0x74, 0x65, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("eth")*/ 0xF0, 0x00, /*'\x00f0'*/
0x65, 0x64, 0x6C, 0x69, 0x74, 0x6E, 0x00, 0x00, /*ToUInt64Key("ntilde")*/ 0xF1, 0x00, /*'\x00f1'*/
0x65, 0x76, 0x61, 0x72, 0x67, 0x6F, 0x00, 0x00, /*ToUInt64Key("ograve")*/ 0xF2, 0x00, /*'\x00f2'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x6F, 0x00, 0x00, /*ToUInt64Key("oacute")*/ 0xF3, 0x00, /*'\x00f3'*/
0x63, 0x72, 0x69, 0x63, 0x6F, 0x00, 0x00, 0x00, /*ToUInt64Key("ocirc")*/ 0xF4, 0x00, /*'\x00f4'*/
0x65, 0x64, 0x6C, 0x69, 0x74, 0x6F, 0x00, 0x00, /*ToUInt64Key("otilde")*/ 0xF5, 0x00, /*'\x00f5'*/
0x6C, 0x6D, 0x75, 0x6F, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("ouml")*/ 0xF6, 0x00, /*'\x00f6'*/
0x65, 0x64, 0x69, 0x76, 0x69, 0x64, 0x00, 0x00, /*ToUInt64Key("divide")*/ 0xF7, 0x00, /*'\x00f7'*/
0x68, 0x73, 0x61, 0x6C, 0x73, 0x6F, 0x00, 0x00, /*ToUInt64Key("oslash")*/ 0xF8, 0x00, /*'\x00f8'*/
0x65, 0x76, 0x61, 0x72, 0x67, 0x75, 0x00, 0x00, /*ToUInt64Key("ugrave")*/ 0xF9, 0x00, /*'\x00f9'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x75, 0x00, 0x00, /*ToUInt64Key("uacute")*/ 0xFA, 0x00, /*'\x00fa'*/
0x63, 0x72, 0x69, 0x63, 0x75, 0x00, 0x00, 0x00, /*ToUInt64Key("ucirc")*/ 0xFB, 0x00, /*'\x00fb'*/
0x6C, 0x6D, 0x75, 0x75, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("uuml")*/ 0xFC, 0x00, /*'\x00fc'*/
0x65, 0x74, 0x75, 0x63, 0x61, 0x79, 0x00, 0x00, /*ToUInt64Key("yacute")*/ 0xFD, 0x00, /*'\x00fd'*/
0x6E, 0x72, 0x6F, 0x68, 0x74, 0x00, 0x00, 0x00, /*ToUInt64Key("thorn")*/ 0xFE, 0x00, /*'\x00fe'*/
0x6C, 0x6D, 0x75, 0x79, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("yuml")*/ 0xFF, 0x00, /*'\x00ff'*/
0x67, 0x69, 0x6C, 0x45, 0x4F, 0x00, 0x00, 0x00, /*ToUInt64Key("OElig")*/ 0x52, 0x01, /*'\x0152'*/
0x67, 0x69, 0x6C, 0x65, 0x6F, 0x00, 0x00, 0x00, /*ToUInt64Key("oelig")*/ 0x53, 0x01, /*'\x0153'*/
0x6E, 0x6F, 0x72, 0x61, 0x63, 0x53, 0x00, 0x00, /*ToUInt64Key("Scaron")*/ 0x60, 0x01, /*'\x0160'*/
0x6E, 0x6F, 0x72, 0x61, 0x63, 0x73, 0x00, 0x00, /*ToUInt64Key("scaron")*/ 0x61, 0x01, /*'\x0161'*/
0x6C, 0x6D, 0x75, 0x59, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Yuml")*/ 0x78, 0x01, /*'\x0178'*/
0x66, 0x6F, 0x6E, 0x66, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("fnof")*/ 0x92, 0x01, /*'\x0192'*/
0x63, 0x72, 0x69, 0x63, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("circ")*/ 0xC6, 0x02, /*'\x02c6'*/
0x65, 0x64, 0x6C, 0x69, 0x74, 0x00, 0x00, 0x00, /*ToUInt64Key("tilde")*/ 0xDC, 0x02, /*'\x02dc'*/
0x61, 0x68, 0x70, 0x6C, 0x41, 0x00, 0x00, 0x00, /*ToUInt64Key("Alpha")*/ 0x91, 0x03, /*'\x0391'*/
0x61, 0x74, 0x65, 0x42, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Beta")*/ 0x92, 0x03, /*'\x0392'*/
0x61, 0x6D, 0x6D, 0x61, 0x47, 0x00, 0x00, 0x00, /*ToUInt64Key("Gamma")*/ 0x93, 0x03, /*'\x0393'*/
0x61, 0x74, 0x6C, 0x65, 0x44, 0x00, 0x00, 0x00, /*ToUInt64Key("Delta")*/ 0x94, 0x03, /*'\x0394'*/
0x6E, 0x6F, 0x6C, 0x69, 0x73, 0x70, 0x45, 0x00, /*ToUInt64Key("Epsilon")*/ 0x95, 0x03, /*'\x0395'*/
0x61, 0x74, 0x65, 0x5A, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Zeta")*/ 0x96, 0x03, /*'\x0396'*/
0x61, 0x74, 0x45, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Eta")*/ 0x97, 0x03, /*'\x0397'*/
0x61, 0x74, 0x65, 0x68, 0x54, 0x00, 0x00, 0x00, /*ToUInt64Key("Theta")*/ 0x98, 0x03, /*'\x0398'*/
0x61, 0x74, 0x6F, 0x49, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Iota")*/ 0x99, 0x03, /*'\x0399'*/
0x61, 0x70, 0x70, 0x61, 0x4B, 0x00, 0x00, 0x00, /*ToUInt64Key("Kappa")*/ 0x9A, 0x03, /*'\x039a'*/
0x61, 0x64, 0x62, 0x6D, 0x61, 0x4C, 0x00, 0x00, /*ToUInt64Key("Lambda")*/ 0x9B, 0x03, /*'\x039b'*/
0x75, 0x4D, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Mu")*/ 0x9C, 0x03, /*'\x039c'*/
0x75, 0x4E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Nu")*/ 0x9D, 0x03, /*'\x039d'*/
0x69, 0x58, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Xi")*/ 0x9E, 0x03, /*'\x039e'*/
0x6E, 0x6F, 0x72, 0x63, 0x69, 0x6D, 0x4F, 0x00, /*ToUInt64Key("Omicron")*/ 0x9F, 0x03, /*'\x039f'*/
0x69, 0x50, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Pi")*/ 0xA0, 0x03, /*'\x03a0'*/
0x6F, 0x68, 0x52, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Rho")*/ 0xA1, 0x03, /*'\x03a1'*/
0x61, 0x6D, 0x67, 0x69, 0x53, 0x00, 0x00, 0x00, /*ToUInt64Key("Sigma")*/ 0xA3, 0x03, /*'\x03a3'*/
0x75, 0x61, 0x54, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Tau")*/ 0xA4, 0x03, /*'\x03a4'*/
0x6E, 0x6F, 0x6C, 0x69, 0x73, 0x70, 0x55, 0x00, /*ToUInt64Key("Upsilon")*/ 0xA5, 0x03, /*'\x03a5'*/
0x69, 0x68, 0x50, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Phi")*/ 0xA6, 0x03, /*'\x03a6'*/
0x69, 0x68, 0x43, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Chi")*/ 0xA7, 0x03, /*'\x03a7'*/
0x69, 0x73, 0x50, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("Psi")*/ 0xA8, 0x03, /*'\x03a8'*/
0x61, 0x67, 0x65, 0x6D, 0x4F, 0x00, 0x00, 0x00, /*ToUInt64Key("Omega")*/ 0xA9, 0x03, /*'\x03a9'*/
0x61, 0x68, 0x70, 0x6C, 0x61, 0x00, 0x00, 0x00, /*ToUInt64Key("alpha")*/ 0xB1, 0x03, /*'\x03b1'*/
0x61, 0x74, 0x65, 0x62, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("beta")*/ 0xB2, 0x03, /*'\x03b2'*/
0x61, 0x6D, 0x6D, 0x61, 0x67, 0x00, 0x00, 0x00, /*ToUInt64Key("gamma")*/ 0xB3, 0x03, /*'\x03b3'*/
0x61, 0x74, 0x6C, 0x65, 0x64, 0x00, 0x00, 0x00, /*ToUInt64Key("delta")*/ 0xB4, 0x03, /*'\x03b4'*/
0x6E, 0x6F, 0x6C, 0x69, 0x73, 0x70, 0x65, 0x00, /*ToUInt64Key("epsilon")*/ 0xB5, 0x03, /*'\x03b5'*/
0x61, 0x74, 0x65, 0x7A, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("zeta")*/ 0xB6, 0x03, /*'\x03b6'*/
0x61, 0x74, 0x65, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("eta")*/ 0xB7, 0x03, /*'\x03b7'*/
0x61, 0x74, 0x65, 0x68, 0x74, 0x00, 0x00, 0x00, /*ToUInt64Key("theta")*/ 0xB8, 0x03, /*'\x03b8'*/
0x61, 0x74, 0x6F, 0x69, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("iota")*/ 0xB9, 0x03, /*'\x03b9'*/
0x61, 0x70, 0x70, 0x61, 0x6B, 0x00, 0x00, 0x00, /*ToUInt64Key("kappa")*/ 0xBA, 0x03, /*'\x03ba'*/
0x61, 0x64, 0x62, 0x6D, 0x61, 0x6C, 0x00, 0x00, /*ToUInt64Key("lambda")*/ 0xBB, 0x03, /*'\x03bb'*/
0x75, 0x6D, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("mu")*/ 0xBC, 0x03, /*'\x03bc'*/
0x75, 0x6E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("nu")*/ 0xBD, 0x03, /*'\x03bd'*/
0x69, 0x78, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("xi")*/ 0xBE, 0x03, /*'\x03be'*/
0x6E, 0x6F, 0x72, 0x63, 0x69, 0x6D, 0x6F, 0x00, /*ToUInt64Key("omicron")*/ 0xBF, 0x03, /*'\x03bf'*/
0x69, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("pi")*/ 0xC0, 0x03, /*'\x03c0'*/
0x6F, 0x68, 0x72, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("rho")*/ 0xC1, 0x03, /*'\x03c1'*/
0x66, 0x61, 0x6D, 0x67, 0x69, 0x73, 0x00, 0x00, /*ToUInt64Key("sigmaf")*/ 0xC2, 0x03, /*'\x03c2'*/
0x61, 0x6D, 0x67, 0x69, 0x73, 0x00, 0x00, 0x00, /*ToUInt64Key("sigma")*/ 0xC3, 0x03, /*'\x03c3'*/
0x75, 0x61, 0x74, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("tau")*/ 0xC4, 0x03, /*'\x03c4'*/
0x6E, 0x6F, 0x6C, 0x69, 0x73, 0x70, 0x75, 0x00, /*ToUInt64Key("upsilon")*/ 0xC5, 0x03, /*'\x03c5'*/
0x69, 0x68, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("phi")*/ 0xC6, 0x03, /*'\x03c6'*/
0x69, 0x68, 0x63, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("chi")*/ 0xC7, 0x03, /*'\x03c7'*/
0x69, 0x73, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("psi")*/ 0xC8, 0x03, /*'\x03c8'*/
0x61, 0x67, 0x65, 0x6D, 0x6F, 0x00, 0x00, 0x00, /*ToUInt64Key("omega")*/ 0xC9, 0x03, /*'\x03c9'*/
0x6D, 0x79, 0x73, 0x61, 0x74, 0x65, 0x68, 0x74, /*ToUInt64Key("thetasym")*/0xD1, 0x03, /*'\x03d1'*/
0x68, 0x69, 0x73, 0x70, 0x75, 0x00, 0x00, 0x00, /*ToUInt64Key("upsih")*/ 0xD2, 0x03, /*'\x03d2'*/
0x76, 0x69, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("piv")*/ 0xD6, 0x03, /*'\x03d6'*/
0x70, 0x73, 0x6E, 0x65, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("ensp")*/ 0x02, 0x20, /*'\x2002'*/
0x70, 0x73, 0x6D, 0x65, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("emsp")*/ 0x03, 0x20, /*'\x2003'*/
0x70, 0x73, 0x6E, 0x69, 0x68, 0x74, 0x00, 0x00, /*ToUInt64Key("thinsp")*/ 0x09, 0x20, /*'\x2009'*/
0x6A, 0x6E, 0x77, 0x7A, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("zwnj")*/ 0x0C, 0x20, /*'\x200c'*/
0x6A, 0x77, 0x7A, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("zwj")*/ 0x0D, 0x20, /*'\x200d'*/
0x6D, 0x72, 0x6C, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("lrm")*/ 0x0E, 0x20, /*'\x200e'*/
0x6D, 0x6C, 0x72, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("rlm")*/ 0x0F, 0x20, /*'\x200f'*/
0x68, 0x73, 0x61, 0x64, 0x6E, 0x00, 0x00, 0x00, /*ToUInt64Key("ndash")*/ 0x13, 0x20, /*'\x2013'*/
0x68, 0x73, 0x61, 0x64, 0x6D, 0x00, 0x00, 0x00, /*ToUInt64Key("mdash")*/ 0x14, 0x20, /*'\x2014'*/
0x6F, 0x75, 0x71, 0x73, 0x6C, 0x00, 0x00, 0x00, /*ToUInt64Key("lsquo")*/ 0x18, 0x20, /*'\x2018'*/
0x6F, 0x75, 0x71, 0x73, 0x72, 0x00, 0x00, 0x00, /*ToUInt64Key("rsquo")*/ 0x19, 0x20, /*'\x2019'*/
0x6F, 0x75, 0x71, 0x62, 0x73, 0x00, 0x00, 0x00, /*ToUInt64Key("sbquo")*/ 0x1A, 0x20, /*'\x201a'*/
0x6F, 0x75, 0x71, 0x64, 0x6C, 0x00, 0x00, 0x00, /*ToUInt64Key("ldquo")*/ 0x1C, 0x20, /*'\x201c'*/
0x6F, 0x75, 0x71, 0x64, 0x72, 0x00, 0x00, 0x00, /*ToUInt64Key("rdquo")*/ 0x1D, 0x20, /*'\x201d'*/
0x6F, 0x75, 0x71, 0x64, 0x62, 0x00, 0x00, 0x00, /*ToUInt64Key("bdquo")*/ 0x1E, 0x20, /*'\x201e'*/
0x72, 0x65, 0x67, 0x67, 0x61, 0x64, 0x00, 0x00, /*ToUInt64Key("dagger")*/ 0x20, 0x20, /*'\x2020'*/
0x72, 0x65, 0x67, 0x67, 0x61, 0x44, 0x00, 0x00, /*ToUInt64Key("Dagger")*/ 0x21, 0x20, /*'\x2021'*/
0x6C, 0x6C, 0x75, 0x62, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("bull")*/ 0x22, 0x20, /*'\x2022'*/
0x70, 0x69, 0x6C, 0x6C, 0x65, 0x68, 0x00, 0x00, /*ToUInt64Key("hellip")*/ 0x26, 0x20, /*'\x2026'*/
0x6C, 0x69, 0x6D, 0x72, 0x65, 0x70, 0x00, 0x00, /*ToUInt64Key("permil")*/ 0x30, 0x20, /*'\x2030'*/
0x65, 0x6D, 0x69, 0x72, 0x70, 0x00, 0x00, 0x00, /*ToUInt64Key("prime")*/ 0x32, 0x20, /*'\x2032'*/
0x65, 0x6D, 0x69, 0x72, 0x50, 0x00, 0x00, 0x00, /*ToUInt64Key("Prime")*/ 0x33, 0x20, /*'\x2033'*/
0x6F, 0x75, 0x71, 0x61, 0x73, 0x6C, 0x00, 0x00, /*ToUInt64Key("lsaquo")*/ 0x39, 0x20, /*'\x2039'*/
0x6F, 0x75, 0x71, 0x61, 0x73, 0x72, 0x00, 0x00, /*ToUInt64Key("rsaquo")*/ 0x3A, 0x20, /*'\x203a'*/
0x65, 0x6E, 0x69, 0x6C, 0x6F, 0x00, 0x00, 0x00, /*ToUInt64Key("oline")*/ 0x3E, 0x20, /*'\x203e'*/
0x6C, 0x73, 0x61, 0x72, 0x66, 0x00, 0x00, 0x00, /*ToUInt64Key("frasl")*/ 0x44, 0x20, /*'\x2044'*/
0x6F, 0x72, 0x75, 0x65, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("euro")*/ 0xAC, 0x20, /*'\x20ac'*/
0x65, 0x67, 0x61, 0x6D, 0x69, 0x00, 0x00, 0x00, /*ToUInt64Key("image")*/ 0x11, 0x21, /*'\x2111'*/
0x70, 0x72, 0x65, 0x69, 0x65, 0x77, 0x00, 0x00, /*ToUInt64Key("weierp")*/ 0x18, 0x21, /*'\x2118'*/
0x6C, 0x61, 0x65, 0x72, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("real")*/ 0x1C, 0x21, /*'\x211c'*/
0x65, 0x64, 0x61, 0x72, 0x74, 0x00, 0x00, 0x00, /*ToUInt64Key("trade")*/ 0x22, 0x21, /*'\x2122'*/
0x6D, 0x79, 0x73, 0x66, 0x65, 0x6C, 0x61, 0x00, /*ToUInt64Key("alefsym")*/ 0x35, 0x21, /*'\x2135'*/
0x72, 0x72, 0x61, 0x6C, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("larr")*/ 0x90, 0x21, /*'\x2190'*/
0x72, 0x72, 0x61, 0x75, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("uarr")*/ 0x91, 0x21, /*'\x2191'*/
0x72, 0x72, 0x61, 0x72, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("rarr")*/ 0x92, 0x21, /*'\x2192'*/
0x72, 0x72, 0x61, 0x64, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("darr")*/ 0x93, 0x21, /*'\x2193'*/
0x72, 0x72, 0x61, 0x68, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("harr")*/ 0x94, 0x21, /*'\x2194'*/
0x72, 0x72, 0x61, 0x72, 0x63, 0x00, 0x00, 0x00, /*ToUInt64Key("crarr")*/ 0xB5, 0x21, /*'\x21b5'*/
0x72, 0x72, 0x41, 0x6C, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("lArr")*/ 0xD0, 0x21, /*'\x21d0'*/
0x72, 0x72, 0x41, 0x75, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("uArr")*/ 0xD1, 0x21, /*'\x21d1'*/
0x72, 0x72, 0x41, 0x72, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("rArr")*/ 0xD2, 0x21, /*'\x21d2'*/
0x72, 0x72, 0x41, 0x64, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("dArr")*/ 0xD3, 0x21, /*'\x21d3'*/
0x72, 0x72, 0x41, 0x68, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("hArr")*/ 0xD4, 0x21, /*'\x21d4'*/
0x6C, 0x6C, 0x61, 0x72, 0x6F, 0x66, 0x00, 0x00, /*ToUInt64Key("forall")*/ 0x00, 0x22, /*'\x2200'*/
0x74, 0x72, 0x61, 0x70, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("part")*/ 0x02, 0x22, /*'\x2202'*/
0x74, 0x73, 0x69, 0x78, 0x65, 0x00, 0x00, 0x00, /*ToUInt64Key("exist")*/ 0x03, 0x22, /*'\x2203'*/
0x79, 0x74, 0x70, 0x6D, 0x65, 0x00, 0x00, 0x00, /*ToUInt64Key("empty")*/ 0x05, 0x22, /*'\x2205'*/
0x61, 0x6C, 0x62, 0x61, 0x6E, 0x00, 0x00, 0x00, /*ToUInt64Key("nabla")*/ 0x07, 0x22, /*'\x2207'*/
0x6E, 0x69, 0x73, 0x69, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("isin")*/ 0x08, 0x22, /*'\x2208'*/
0x6E, 0x69, 0x74, 0x6F, 0x6E, 0x00, 0x00, 0x00, /*ToUInt64Key("notin")*/ 0x09, 0x22, /*'\x2209'*/
0x69, 0x6E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("ni")*/ 0x0B, 0x22, /*'\x220b'*/
0x64, 0x6F, 0x72, 0x70, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("prod")*/ 0x0F, 0x22, /*'\x220f'*/
0x6D, 0x75, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("sum")*/ 0x11, 0x22, /*'\x2211'*/
0x73, 0x75, 0x6E, 0x69, 0x6D, 0x00, 0x00, 0x00, /*ToUInt64Key("minus")*/ 0x12, 0x22, /*'\x2212'*/
0x74, 0x73, 0x61, 0x77, 0x6F, 0x6C, 0x00, 0x00, /*ToUInt64Key("lowast")*/ 0x17, 0x22, /*'\x2217'*/
0x63, 0x69, 0x64, 0x61, 0x72, 0x00, 0x00, 0x00, /*ToUInt64Key("radic")*/ 0x1A, 0x22, /*'\x221a'*/
0x70, 0x6F, 0x72, 0x70, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("prop")*/ 0x1D, 0x22, /*'\x221d'*/
0x6E, 0x69, 0x66, 0x6E, 0x69, 0x00, 0x00, 0x00, /*ToUInt64Key("infin")*/ 0x1E, 0x22, /*'\x221e'*/
0x67, 0x6E, 0x61, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("ang")*/ 0x20, 0x22, /*'\x2220'*/
0x64, 0x6E, 0x61, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("and")*/ 0x27, 0x22, /*'\x2227'*/
0x72, 0x6F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("or")*/ 0x28, 0x22, /*'\x2228'*/
0x70, 0x61, 0x63, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("cap")*/ 0x29, 0x22, /*'\x2229'*/
0x70, 0x75, 0x63, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("cup")*/ 0x2A, 0x22, /*'\x222a'*/
0x74, 0x6E, 0x69, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("int")*/ 0x2B, 0x22, /*'\x222b'*/
0x34, 0x65, 0x72, 0x65, 0x68, 0x74, 0x00, 0x00, /*ToUInt64Key("there4")*/ 0x34, 0x22, /*'\x2234'*/
0x6D, 0x69, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("sim")*/ 0x3C, 0x22, /*'\x223c'*/
0x67, 0x6E, 0x6F, 0x63, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("cong")*/ 0x45, 0x22, /*'\x2245'*/
0x70, 0x6D, 0x79, 0x73, 0x61, 0x00, 0x00, 0x00, /*ToUInt64Key("asymp")*/ 0x48, 0x22, /*'\x2248'*/
0x65, 0x6E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("ne")*/ 0x60, 0x22, /*'\x2260'*/
0x76, 0x69, 0x75, 0x71, 0x65, 0x00, 0x00, 0x00, /*ToUInt64Key("equiv")*/ 0x61, 0x22, /*'\x2261'*/
0x65, 0x6C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("le")*/ 0x64, 0x22, /*'\x2264'*/
0x65, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("ge")*/ 0x65, 0x22, /*'\x2265'*/
0x62, 0x75, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("sub")*/ 0x82, 0x22, /*'\x2282'*/
0x70, 0x75, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("sup")*/ 0x83, 0x22, /*'\x2283'*/
0x62, 0x75, 0x73, 0x6E, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("nsub")*/ 0x84, 0x22, /*'\x2284'*/
0x65, 0x62, 0x75, 0x73, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("sube")*/ 0x86, 0x22, /*'\x2286'*/
0x65, 0x70, 0x75, 0x73, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("supe")*/ 0x87, 0x22, /*'\x2287'*/
0x73, 0x75, 0x6C, 0x70, 0x6F, 0x00, 0x00, 0x00, /*ToUInt64Key("oplus")*/ 0x95, 0x22, /*'\x2295'*/
0x73, 0x65, 0x6D, 0x69, 0x74, 0x6F, 0x00, 0x00, /*ToUInt64Key("otimes")*/ 0x97, 0x22, /*'\x2297'*/
0x70, 0x72, 0x65, 0x70, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("perp")*/ 0xA5, 0x22, /*'\x22a5'*/
0x74, 0x6F, 0x64, 0x73, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("sdot")*/ 0xC5, 0x22, /*'\x22c5'*/
0x6C, 0x69, 0x65, 0x63, 0x6C, 0x00, 0x00, 0x00, /*ToUInt64Key("lceil")*/ 0x08, 0x23, /*'\x2308'*/
0x6C, 0x69, 0x65, 0x63, 0x72, 0x00, 0x00, 0x00, /*ToUInt64Key("rceil")*/ 0x09, 0x23, /*'\x2309'*/
0x72, 0x6F, 0x6F, 0x6C, 0x66, 0x6C, 0x00, 0x00, /*ToUInt64Key("lfloor")*/ 0x0A, 0x23, /*'\x230a'*/
0x72, 0x6F, 0x6F, 0x6C, 0x66, 0x72, 0x00, 0x00, /*ToUInt64Key("rfloor")*/ 0x0B, 0x23, /*'\x230b'*/
0x67, 0x6E, 0x61, 0x6C, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("lang")*/ 0x29, 0x23, /*'\x2329'*/
0x67, 0x6E, 0x61, 0x72, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("rang")*/ 0x2A, 0x23, /*'\x232a'*/
0x7A, 0x6F, 0x6C, 0x00, 0x00, 0x00, 0x00, 0x00, /*ToUInt64Key("loz")*/ 0xCA, 0x25, /*'\x25ca'*/
0x73, 0x65, 0x64, 0x61, 0x70, 0x73, 0x00, 0x00, /*ToUInt64Key("spades")*/ 0x60, 0x26, /*'\x2660'*/
0x73, 0x62, 0x75, 0x6C, 0x63, 0x00, 0x00, 0x00, /*ToUInt64Key("clubs")*/ 0x63, 0x26, /*'\x2663'*/
0x73, 0x74, 0x72, 0x61, 0x65, 0x68, 0x00, 0x00, /*ToUInt64Key("hearts")*/ 0x65, 0x26, /*'\x2665'*/
0x73, 0x6D, 0x61, 0x69, 0x64, 0x00, 0x00, 0x00, /*ToUInt64Key("diams")*/ 0x66, 0x26, /*'\x2666'*/
];
var dictionary = new Dictionary<ulong, char>(tableData.Length / (sizeof(ulong) + sizeof(char)));
while (tableData.Length > 0)
{
ulong key = BitConverter.ToUInt64(tableData, 0);
char value = (char)BitConverter.ToUInt16(tableData, sizeof(ulong));
dictionary[key] = value;
byte[] tempTableData = new byte[tableData.Length - (sizeof(ulong) + sizeof(char))];
Array.Copy(tableData, (sizeof(ulong) + sizeof(char)), tempTableData, 0, tempTableData.Length);
tableData = tempTableData;
}
return dictionary;
}
// maps entity strings => unicode chars
private static readonly Dictionary<ulong, char> s_lookupTable = InitializeLookupTable();
public static char Lookup(char[] entity)
{
// To avoid an allocation, keys of type "ulong" are used in the lookup table.
// Since all entity strings comprise 8 characters or less and are ASCII-only, they "fit" into an ulong (8 bytes).
if (entity.Length <= 8)
{
s_lookupTable.TryGetValue(ToUInt64Key(entity), out char result);
return result;
}
else
{
// Currently, there are no entities that are longer than 8 characters.
return (char)0;
}
}
private static ulong ToUInt64Key(char[] entity)
{
// The ulong key is the reversed single-byte character representation of the actual entity string.
ulong key = 0;
for (int i = 0; i < entity.Length; i++)
{
if (entity[i] > 0xFF)
{
return 0;
}
key = (key << 8) | entity[i];
}
return key;
}
}
}
}
#endif

View File

@@ -0,0 +1,40 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- Assembly Properties -->
<TargetFrameworks>net20;net35;net40;net452;net462;net472;net48;netcoreapp3.1;net5.0;net6.0;net7.0;net8.0;net9.0</TargetFrameworks>
<IncludeSymbols>true</IncludeSymbols>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Version>1.6.5</Version>
<!-- Package Properties -->
<Authors>Matt Nadareski</Authors>
<Description>Code to interact with redump.org</Description>
<Copyright>Copyright (c) Matt Nadareski 2020-2025</Copyright>
<PackageProjectUrl>https://github.com/SabreTools/</PackageProjectUrl>
<PackageReadmeFile>README.md</PackageReadmeFile>
<RepositoryUrl>https://github.com/SabreTools/SabreTools.RedumpLib</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageTags>web client redump</PackageTags>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="SabreTools.RedumpLib.Test" />
</ItemGroup>
<ItemGroup>
<None Include="../README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="MinAsyncBridge" Version="0.12.4" Condition="$(TargetFramework.StartsWith(`net2`)) OR $(TargetFramework.StartsWith(`net3`)) OR $(TargetFramework.StartsWith(`net40`))" />
<PackageReference Include="Net35.Actions" Version="1.1.0" Condition="$(TargetFramework.StartsWith(`net2`))" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="SabreTools.Models" Version="1.5.8" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,225 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
using SabreTools.RedumpLib.Web;
namespace SabreTools.RedumpLib
{
public static class Validator
{
/// <summary>
/// Adjust the disc type based on size and layerbreak information
/// </summary>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <returns>Corrected disc type, if possible</returns>
public static void NormalizeDiscType(SubmissionInfo info)
{
// If we have nothing valid, do nothing
if (info?.CommonDiscInfo?.Media == null || info?.SizeAndChecksums == null)
return;
switch (info.CommonDiscInfo.Media)
{
case DiscType.DVD5:
case DiscType.DVD9:
if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.DVD9;
else
info.CommonDiscInfo.Media = DiscType.DVD5;
break;
case DiscType.BD25:
case DiscType.BD33:
case DiscType.BD50:
case DiscType.BD66:
case DiscType.BD100:
case DiscType.BD128:
if (info.SizeAndChecksums.Layerbreak3 != default)
info.CommonDiscInfo.Media = DiscType.BD128;
else if (info.SizeAndChecksums.Layerbreak2 != default)
info.CommonDiscInfo.Media = DiscType.BD100;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.PICIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default && info.SizeAndChecksums.Size > 50_050_629_632)
info.CommonDiscInfo.Media = DiscType.BD66;
else if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.BD50;
else if (info.SizeAndChecksums.PICIdentifier == Models.PIC.Constants.DiscTypeIdentifierROMUltra)
info.CommonDiscInfo.Media = DiscType.BD33;
else if (info.SizeAndChecksums.Size > 25_025_314_816)
info.CommonDiscInfo.Media = DiscType.BD33;
else
info.CommonDiscInfo.Media = DiscType.BD25;
break;
case DiscType.HDDVDSL:
case DiscType.HDDVDDL:
if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.HDDVDDL;
else
info.CommonDiscInfo.Media = DiscType.HDDVDSL;
break;
case DiscType.UMDSL:
case DiscType.UMDDL:
if (info.SizeAndChecksums.Layerbreak != default)
info.CommonDiscInfo.Media = DiscType.UMDDL;
else
info.CommonDiscInfo.Media = DiscType.UMDSL;
break;
// All other disc types are not processed
default:
break;
}
}
/// <summary>
/// List the disc IDs associated with a given quicksearch query
/// </summary>
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="query">Query string to attempt to search for</param>
/// <param name="filterForwardSlashes">True to filter forward slashes, false otherwise</param>
/// <returns>All disc IDs for the given query, null on error</returns>
public async static Task<List<int>?> ListSearchResults(RedumpClient rc, string? query, bool filterForwardSlashes = true)
{
// If there is an invalid query
if (string.IsNullOrEmpty(query))
return null;
var ids = new List<int>();
// Strip quotes
query = query!.Trim('"', '\'');
// Special characters become dashes
query = query.Replace(' ', '-');
if (filterForwardSlashes)
query = query.Replace('/', '-');
query = query.Replace('\\', '/');
// Lowercase is defined per language
query = query.ToLowerInvariant();
// Keep getting quicksearch pages until there are none left
try
{
int pageNumber = 1;
while (true)
{
List<int> pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in: {ex}");
return null;
}
return ids;
}
/// <summary>
/// Validate a single track against Redump, if possible
/// </summary>
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="sha1">SHA-1 hash to check against</param>
/// <returns>List of found values, if possible</returns>
public async static Task<List<int>?> ValidateSingleTrack(RedumpClient rc, SubmissionInfo info, string? sha1)
{
// Get all matching IDs for the track
var newIds = await ListSearchResults(rc, sha1);
// If we got null back, there was an error
if (newIds == null)
return null;
// If no IDs match, just return
if (newIds.Count == 0)
return null;
// Join the list of found IDs to the existing list, if possible
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Count > 0)
info.PartiallyMatchedIDs.AddRange(newIds);
else
info.PartiallyMatchedIDs = newIds;
return newIds;
}
/// <summary>
/// Validate a universal hash against Redump, if possible
/// </summary>
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="info">Existing SubmissionInfo object to fill</param>
/// <param name="resultProgress">Optional result progress callback</param>
/// <returns>List of found values, if possible</returns>
public async static Task<List<int>?> ValidateUniversalHash(RedumpClient rc, SubmissionInfo info)
{
// If we don't have special fields
if (info.CommonDiscInfo?.CommentsSpecialFields == null)
return null;
// If we don't have a universal hash
string? universalHash = info.CommonDiscInfo.CommentsSpecialFields[SiteCode.UniversalHash];
if (string.IsNullOrEmpty(universalHash))
return null;
// Format the universal hash for finding within the comments
string universalHashQuery = $"{universalHash.Substring(0, universalHash.Length - 1)}/comments/only";
// Get all matching IDs for the hash
var newIds = await ListSearchResults(rc, universalHashQuery, filterForwardSlashes: false);
// If we got null back, there was an error
if (newIds == null)
return null;
// If no IDs match, just return
if (newIds.Count == 0)
return null;
// Join the list of found IDs to the existing list, if possible
if (info.PartiallyMatchedIDs != null && info.PartiallyMatchedIDs.Count > 0)
info.PartiallyMatchedIDs.AddRange(newIds);
else
info.PartiallyMatchedIDs = newIds;
return newIds;
}
/// <summary>
/// Validate that the current track count and remote track count match
/// </summary>
/// <param name="rc">RedumpClient for making the connection</param>
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="localCount">Local count of tracks for the current disc</param>
/// <returns>True if the track count matches, false otherwise</returns>
public async static Task<bool> ValidateTrackCount(RedumpClient rc, int id, int localCount)
{
// If we can't pull the remote data, we can't match
string? discData = await rc.DownloadSingleSiteID(id);
if (string.IsNullOrEmpty(discData))
return false;
// Discs with only 1 track don't have a track count listed
var match = Constants.TrackCountRegex.Match(discData);
if (!match.Success && localCount == 1)
return true;
else if (!match.Success)
return false;
// If the count isn't parseable, we're not taking chances
if (!int.TryParse(match.Groups[1].Value, out int remoteCount))
return false;
// Finally check to see if the counts match
return localCount == remoteCount;
}
}
}

View File

@@ -0,0 +1,43 @@
using System;
using System.Net;
#pragma warning disable SYSLIB0014 // 'WebClient.WebClient()' is obsolete
namespace SabreTools.RedumpLib.Web
{
internal class CookieWebClient : WebClient
{
// https://stackoverflow.com/questions/1777221/using-cookiecontainer-with-webclient-class
private readonly CookieContainer _container = new();
/// <summary>
/// Get the last downloaded filename, if possible
/// </summary>
public string? GetLastFilename()
{
// If the response headers are null or empty
if (ResponseHeaders == null || ResponseHeaders.Count == 0)
return null;
// If we don't have the response header we care about
string? headerValue = ResponseHeaders.Get("Content-Disposition");
if (string.IsNullOrEmpty(headerValue))
return null;
// Extract the filename from the value
return headerValue.Substring(headerValue.IndexOf("filename=") + 9).Replace("\"", "");
}
/// <inheritdoc/>
protected override WebRequest GetWebRequest(Uri address)
{
WebRequest request = base.GetWebRequest(address);
if (request is HttpWebRequest webRequest)
{
webRequest.Timeout = 30 * 1000; // 30 seconds
webRequest.CookieContainer = _container;
}
return request;
}
}
}

View File

@@ -0,0 +1,21 @@
using System;
using System.Threading;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Helper class for delaying
/// </summary>
internal static class DelayHelper
{
/// <summary>
/// Delay a random amount of time up to 5 seconds
/// </summary>
public static void DelayRandom()
{
var r = new Random();
int delay = r.Next(0, 50);
Thread.Sleep(delay * 100);
}
}
}

View File

@@ -0,0 +1,65 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with disc pages
/// </summary>
public static class Discs
{
/// <summary>
/// Download the last modified disc pages, until first failure
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="force">Force continuation of download</param>
/// <returns>All disc IDs in last modified range, empty on error</returns>
public static async Task<List<int>> DownloadLastModified(RedumpClient rc, string? outDir, bool force)
{
List<int> ids = [];
// Keep getting last modified pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.LastModifiedUrl, pageNumber++), outDir, !force);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
/// <summary>
/// Download the specified range of site disc pages
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="minId">Starting ID for the range</param>
/// <param name="maxId">Ending ID for the range (inclusive)</param>
/// <returns>All disc IDs in last modified range, empty on error</returns>
public static async Task<List<int>> DownloadSiteRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
{
List<int> ids = [];
if (!rc.LoggedIn)
{
Console.WriteLine("Site download functionality is only available to Redump members");
return ids;
}
for (int id = minId; id <= maxId; id++)
{
ids.Add(id);
if (await rc.DownloadSingleSiteID(id, outDir, true))
DelayHelper.DelayRandom(); // Intentional sleep here so we don't flood the server
}
return ids;
}
}
}

View File

@@ -0,0 +1,71 @@
using System;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with packs
/// </summary>
internal static class Packs
{
/// <summary>
/// Download premade packs
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="useSubfolders">True to use named subfolders to store downloads, false to store directly in the output directory</param>
public static async Task<bool> DownloadPacks(RedumpClient rc, string? outDir, bool useSubfolders)
{
var systems = (RedumpSystem[])Enum.GetValues(typeof(RedumpSystem));
await rc.DownloadPacks(Constants.PackCuesUrl, Array.FindAll(systems, s => s.HasCues()), "CUEs", outDir, useSubfolders ? "cue" : null);
await rc.DownloadPacks(Constants.PackDatfileUrl, Array.FindAll(systems, s => s.HasDat()), "DATs", outDir, useSubfolders ? "dat" : null);
await rc.DownloadPacks(Constants.PackDkeysUrl, Array.FindAll(systems, s => s.HasDkeys()), "Decrypted KEYS", outDir, useSubfolders ? "dkey" : null);
await rc.DownloadPacks(Constants.PackGdiUrl, Array.FindAll(systems, s => s.HasGdi()), "GDIs", outDir, useSubfolders ? "gdi" : null);
await rc.DownloadPacks(Constants.PackKeysUrl, Array.FindAll(systems, s => s.HasKeys()), "KEYS", outDir, useSubfolders ? "keys" : null);
await rc.DownloadPacks(Constants.PackLsdUrl, Array.FindAll(systems, s => s.HasLsd()), "LSD", outDir, useSubfolders ? "lsd" : null);
await rc.DownloadPacks(Constants.PackSbiUrl, Array.FindAll(systems, s => s.HasSbi()), "SBIs", outDir, useSubfolders ? "sbi" : null);
return true;
}
/// <summary>
/// Download premade packs for an individual system
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="system">RedumpSystem to get all possible packs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="useSubfolders">True to use named subfolders to store downloads, false to store directly in the output directory</param>
public static async Task<bool> DownloadPacksForSystem(RedumpClient rc, RedumpSystem? system, string? outDir, bool useSubfolders)
{
if (system == null)
return false;
var systemAsArray = new RedumpSystem[] { system.Value };
if (system.HasCues())
await rc.DownloadPacks(Constants.PackCuesUrl, systemAsArray, "CUEs", outDir, useSubfolders ? "cue" : null);
if (system.HasDat())
await rc.DownloadPacks(Constants.PackDatfileUrl, systemAsArray, "DATs", outDir, useSubfolders ? "dat" : null);
if (system.HasDkeys())
await rc.DownloadPacks(Constants.PackDkeysUrl, systemAsArray, "Decrypted KEYS", outDir, useSubfolders ? "dkey" : null);
if (system.HasGdi())
await rc.DownloadPacks(Constants.PackGdiUrl, systemAsArray, "GDIs", outDir, useSubfolders ? "gdi" : null);
if (system.HasKeys())
await rc.DownloadPacks(Constants.PackKeysUrl, systemAsArray, "KEYS", outDir, useSubfolders ? "keys" : null);
if (system.HasLsd())
await rc.DownloadPacks(Constants.PackLsdUrl, systemAsArray, "LSD", outDir, useSubfolders ? "lsd" : null);
if (system.HasSbi())
await rc.DownloadPacks(Constants.PackSbiUrl, systemAsArray, "SBIs", outDir, useSubfolders ? "sbi" : null);
return true;
}
}
}

View File

@@ -1,19 +1,20 @@
#if !NETFRAMEWORK
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
#if NETCOREAPP
using System.Net.Http;
using System.Net.Http.Headers;
#endif
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
public class RedumpHttpClient : HttpClient
public class RedumpClient
{
#region Properties
@@ -27,14 +28,44 @@ namespace SabreTools.RedumpLib.Web
/// </summary>
public bool IsStaff { get; private set; } = false;
/// <summary>
/// Maximum retry count for any operation
/// </summary>
public int RetryCount { get; private set; } = 3;
/// <summary>
/// Internal client for interaction
/// </summary>
#if NETFRAMEWORK
private CookieWebClient _internalClient;
#else
private HttpClient _internalClient;
#endif
#endregion
/// <summary>
/// Constructor
/// </summary>
public RedumpHttpClient()
: base(new HttpClientHandler { UseCookies = true })
public RedumpClient()
{
#if NETFRAMEWORK
_internalClient = new CookieWebClient();
#else
_internalClient = new HttpClient(new HttpClientHandler { UseCookies = true }) { Timeout = TimeSpan.FromSeconds(30) };
#endif
}
/// <summary>
/// Constructor
/// </summary>
public RedumpClient(int retryCount) : this()
{
// Ensure there are a positive number of retries
if (retryCount <= 0)
retryCount = 3;
RetryCount = retryCount;
}
#region Credentials
@@ -42,22 +73,22 @@ namespace SabreTools.RedumpLib.Web
/// <summary>
/// Validate supplied credentials
/// </summary>
public async static Task<(bool?, string?)> ValidateCredentials(string username, string password)
public async static Task<bool?> ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrWhiteSpace(username) || string.IsNullOrWhiteSpace(password))
return (false, null);
if (string.IsNullOrEmpty(username) || string.IsNullOrEmpty(password))
return false;
// Try logging in with the supplied credentials otherwise
using RedumpHttpClient httpClient = new();
var redumpClient = new RedumpClient();
bool? loggedIn = await httpClient.Login(username, password);
bool? loggedIn = await redumpClient.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
return true;
else if (loggedIn == false)
return (false, "Redump username and password denied!");
return false;
else
return (null, "An error occurred validating your credentials!");
return null;
}
/// <summary>
@@ -69,23 +100,27 @@ namespace SabreTools.RedumpLib.Web
public async Task<bool?> Login(string username, string password)
{
// Credentials verification
if (!string.IsNullOrWhiteSpace(username) && !string.IsNullOrWhiteSpace(password))
if (!string.IsNullOrEmpty(username) && !string.IsNullOrEmpty(password))
{
Console.WriteLine("Credentials entered, will attempt Redump login...");
}
else if (!string.IsNullOrWhiteSpace(username) && string.IsNullOrWhiteSpace(password))
else if (!string.IsNullOrEmpty(username) && string.IsNullOrEmpty(password))
{
Console.WriteLine("Only a username was specified, will not attempt Redump login...");
return false;
}
else if (string.IsNullOrWhiteSpace(username))
else if (string.IsNullOrEmpty(username))
{
Console.WriteLine("No credentials entered, will not attempt Redump login...");
return false;
}
// HTTP encode the password
#if NET20 || NET35 || NET40
password = Uri.EscapeUriString(password);
#else
password = WebUtility.UrlEncode(password);
#endif
// Attempt to login up to 3 times
for (int i = 0; i < 3; i++)
@@ -93,25 +128,36 @@ namespace SabreTools.RedumpLib.Web
try
{
// Get the current token from the login page
var loginPage = await GetStringAsync(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage).Groups[1].Value;
var loginPage = await DownloadString(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage ?? string.Empty).Groups[1].Value;
#if NETFRAMEWORK
// Construct the login request
_internalClient.Headers[HttpRequestHeader.ContentType] = "application/x-www-form-urlencoded";
_internalClient.Encoding = Encoding.UTF8;
// Send the login request and get the result
string? responseContent = _internalClient.UploadString(Constants.LoginUrl, $"form_sent=1&redirect_url=&csrf_token={token}&req_username={username}&req_password={password}&save_pass=0");
#else
// Construct the login request
var postContent = new StringContent($"form_sent=1&redirect_url=&csrf_token={token}&req_username={username}&req_password={password}&save_pass=0", Encoding.UTF8);
postContent.Headers.ContentType = MediaTypeHeaderValue.Parse("application/x-www-form-urlencoded");
// Send the login request and get the result
var response = await PostAsync(Constants.LoginUrl, postContent);
var response = await _internalClient.PostAsync(Constants.LoginUrl, postContent);
string? responseContent = null;
if (response?.Content != null)
responseContent = await response.Content.ReadAsStringAsync();
#endif
if (string.IsNullOrWhiteSpace(responseContent))
// An empty response indicates an error
if (string.IsNullOrEmpty(responseContent))
{
Console.WriteLine($"An error occurred while trying to log in on attempt {i}: No response");
continue;
}
// Explcit confirmation the login was wrong
if (responseContent.Contains("Incorrect username and/or password."))
{
Console.WriteLine("Invalid credentials entered, continuing without logging in...");
@@ -140,6 +186,108 @@ namespace SabreTools.RedumpLib.Web
#endregion
#region Generic Helpers
/// <summary>
/// Download from a URI to a byte array
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>Byte array from the URI, null on error</returns>
public async Task<byte[]?> DownloadData(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadData(uri));
#elif NETFRAMEWORK
return await Task.Run(() => _internalClient.DownloadData(uri));
#else
return await _internalClient.GetByteArrayAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
public async Task<string?> DownloadFile(string uri, string fileName)
{
#if NET40
await Task.Factory.StartNew(() => { _internalClient.DownloadFile(uri, fileName); return true; });
return _internalClient.GetLastFilename();
#elif NETFRAMEWORK
await Task.Run(() => _internalClient.DownloadFile(uri, fileName));
return _internalClient.GetLastFilename();
#else
// Make the call to get the file
var response = await _internalClient.GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
#endif
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <returns>String from the URI, null on error</returns>
public async Task<string?> DownloadString(string uri)
{
// Only retry a positive number of times
if (RetryCount <= 0)
return null;
for (int i = 0; i < RetryCount; i++)
{
try
{
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadString(uri));
#elif NETFRAMEWORK
return await Task.Run(() => _internalClient.DownloadString(uri));
#else
return await _internalClient.GetStringAsync(uri);
#endif
}
catch { }
// Sleep for 100ms if the last attempt failed
Thread.Sleep(100);
}
return null;
}
#endregion
#region Single Page Helpers
/// <summary>
@@ -149,10 +297,10 @@ namespace SabreTools.RedumpLib.Web
/// <returns>List of IDs from the page, empty on error</returns>
public async Task<List<int>> CheckSingleSitePage(string url)
{
List<int> ids = new();
List<int> ids = [];
// Try up to 3 times to retrieve the data
string? dumpsPage = await DownloadString(url, retries: 3);
// Try to retrieve the data
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
@@ -196,15 +344,17 @@ namespace SabreTools.RedumpLib.Web
/// <param name="url">Base URL to download using</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
/// <returns>List of IDs that were found on success, empty on error</returns>
public async Task<List<int>> CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
{
// Try up to 3 times to retrieve the data
string? dumpsPage = await DownloadString(url, retries: 3);
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
@@ -212,12 +362,13 @@ namespace SabreTools.RedumpLib.Web
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
{
ids.Add(id);
bool downloaded = await DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
return false;
return ids;
}
// Otherwise, traverse each dump on the page
@@ -231,9 +382,10 @@ namespace SabreTools.RedumpLib.Web
{
if (int.TryParse(match.Groups[1].Value, out int value))
{
ids.Add(value);
bool downloaded = await DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
}
catch (Exception ex)
@@ -243,7 +395,7 @@ namespace SabreTools.RedumpLib.Web
}
}
return true;
return ids;
}
/// <summary>
@@ -253,10 +405,10 @@ namespace SabreTools.RedumpLib.Web
/// <returns>List of IDs from the page, empty on error</returns>
public async Task<List<int>> CheckSingleWIPPage(string url)
{
List<int> ids = new();
List<int> ids = [];
// Try up to 3 times to retrieve the data
string? dumpsPage = await DownloadString(url, retries: 3);
// Try to retrieve the data
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
@@ -290,15 +442,17 @@ namespace SabreTools.RedumpLib.Web
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public async Task<bool> CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
/// <returns>List of IDs that were found on success, empty on error</returns>
public async Task<List<int>> CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
{
// Try up to 3 times to retrieve the data
string? dumpsPage = await DownloadString(url, retries: 3);
List<int> ids = [];
// Try to retrieve the data
string? dumpsPage = await DownloadString(url);
// If we have no dumps left
if (dumpsPage == null || dumpsPage.Contains("No discs found."))
return false;
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
@@ -311,9 +465,10 @@ namespace SabreTools.RedumpLib.Web
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
ids.Add(value);
bool downloaded = await DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
return ids;
}
}
catch (Exception ex)
@@ -323,7 +478,7 @@ namespace SabreTools.RedumpLib.Web
}
}
return true;
return ids;
}
#endregion
@@ -340,7 +495,13 @@ namespace SabreTools.RedumpLib.Web
{
try
{
return await GetByteArrayAsync(string.Format(url, system.ShortName()));
#if NET40
return await Task.Factory.StartNew(() => _internalClient.DownloadData(string.Format(url, system.ShortName())));
#elif NETFRAMEWORK
return await Task.Run(() => _internalClient.DownloadData(string.Format(url, system.ShortName())));
#else
return await _internalClient.GetByteArrayAsync(string.Format(url, system.ShortName()));
#endif
}
catch (Exception ex)
{
@@ -361,7 +522,7 @@ namespace SabreTools.RedumpLib.Web
try
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
if (string.IsNullOrEmpty(outDir))
outDir = Environment.CurrentDirectory;
string tempfile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString());
@@ -390,9 +551,9 @@ namespace SabreTools.RedumpLib.Web
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
// Try to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string? discPage = await DownloadString(discPageUri, retries: 3);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
@@ -420,7 +581,7 @@ namespace SabreTools.RedumpLib.Web
public async Task<bool> DownloadSingleSiteID(int id, string? outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
if (string.IsNullOrEmpty(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(6, '0');
@@ -428,9 +589,9 @@ namespace SabreTools.RedumpLib.Web
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
// Try to retrieve the data
string discPageUri = string.Format(Constants.DiscPageUrl, +id);
string? discPage = await DownloadString(discPageUri, retries: 3);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
@@ -552,9 +713,9 @@ namespace SabreTools.RedumpLib.Web
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
// Try to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string? discPage = await DownloadString(discPageUri, retries: 3);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
@@ -582,7 +743,7 @@ namespace SabreTools.RedumpLib.Web
public async Task<bool> DownloadSingleWIPID(int id, string? outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
if (string.IsNullOrEmpty(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(6, '0');
@@ -590,9 +751,9 @@ namespace SabreTools.RedumpLib.Web
Console.WriteLine($"Processing ID: {paddedId}");
try
{
// Try up to 3 times to retrieve the data
// Try to retrieve the data
string discPageUri = string.Format(Constants.WipDiscPageUrl, +id);
string? discPage = await DownloadString(discPageUri, retries: 3);
string? discPage = await DownloadString(discPageUri);
if (discPage == null || discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
@@ -666,7 +827,7 @@ namespace SabreTools.RedumpLib.Web
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem?[] systems, string title)
public async Task<Dictionary<RedumpSystem, byte[]>> DownloadPacks(string url, RedumpSystem[] systems, string title)
{
var packsDictionary = new Dictionary<RedumpSystem, byte[]>();
@@ -674,7 +835,7 @@ namespace SabreTools.RedumpLib.Web
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
if (!system.IsAvailable())
continue;
// If we didn't have credentials
@@ -683,13 +844,13 @@ namespace SabreTools.RedumpLib.Web
// If the system is unknown, we can't do anything
string? longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
if (string.IsNullOrEmpty(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName!.Length - 1)}");
byte[]? pack = await DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
packsDictionary.Add(system, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
@@ -706,13 +867,13 @@ namespace SabreTools.RedumpLib.Web
/// <param name="title">Name of the pack that is downloading</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public async Task<bool> DownloadPacks(string url, RedumpSystem?[] systems, string title, string? outDir, string? subfolder)
public async Task<bool> DownloadPacks(string url, RedumpSystem[] systems, string title, string? outDir, string? subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
if (!system.IsAvailable())
continue;
// If we didn't have credentials
@@ -721,10 +882,10 @@ namespace SabreTools.RedumpLib.Web
// If the system is unknown, we can't do anything
string? longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
if (string.IsNullOrEmpty(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName.Length - 1)}");
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName!.Length - 1)}");
await DownloadSinglePack(url, system, outDir, subfolder);
}
@@ -733,56 +894,6 @@ namespace SabreTools.RedumpLib.Web
return true;
}
/// <summary>
/// Download from a URI to a local file
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="fileName">Filename to write to</param>
/// <returns>The remote filename from the URI, null on error</returns>
private async Task<string?> DownloadFile(string uri, string fileName)
{
// Make the call to get the file
var response = await GetAsync(uri);
if (response?.Content?.Headers == null || !response.IsSuccessStatusCode)
{
Console.WriteLine($"Could not download {uri}");
return null;
}
// Copy the data to a local temp file
using (var responseStream = await response.Content.ReadAsStreamAsync())
using (var tempFileStream = File.OpenWrite(fileName))
{
responseStream.CopyTo(tempFileStream);
}
return response.Content.Headers.ContentDisposition?.FileName?.Replace("\"", "");
}
/// <summary>
/// Download from a URI to a string
/// </summary>
/// <param name="uri">Remote URI to retrieve</param>
/// <param name="retries">Number of times to retry on error</param>
/// <returns>String from the URI, null on error</returns>
private async Task<string?> DownloadString(string uri, int retries = 3)
{
// Only retry a positive number of times
if (retries <= 0)
return null;
for (int i = 0; i < retries; i++)
{
try
{
return await GetStringAsync(uri);
}
catch { }
}
return null;
}
/// <summary>
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
/// </summary>
@@ -793,14 +904,14 @@ namespace SabreTools.RedumpLib.Web
private static void MoveOrDelete(string tempfile, string? newfile, string outDir, string? subfolder)
{
// If we don't have a file to move to, just delete the temp file
if (string.IsNullOrWhiteSpace(newfile))
if (string.IsNullOrEmpty(newfile))
{
File.Delete(tempfile);
return;
}
// If we have a subfolder, create it and update the newfile name
if (!string.IsNullOrWhiteSpace(subfolder))
if (!string.IsNullOrEmpty(subfolder))
{
if (!Directory.Exists(Path.Combine(outDir, subfolder)))
Directory.CreateDirectory(Path.Combine(outDir, subfolder));
@@ -817,6 +928,4 @@ namespace SabreTools.RedumpLib.Web
#endregion
}
}
#endif
}

View File

@@ -0,0 +1,102 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with searches
/// </summary>
internal static class Search
{
/// <summary>
/// List the disc IDs associated with a given quicksearch query
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="query">Query string to attempt to search for</param>
/// <param name="noSlash">Don't replace slashes with `-` in queries</param>
/// <returns>All disc IDs for the given query, empty on error</returns>
public static async Task<List<int>> ListSearchResults(RedumpClient rc, string? query, bool noSlash)
{
// If the query is invalid
if (string.IsNullOrEmpty(query))
return [];
List<int> ids = [];
// Strip quotes
query = query!.Trim('"', '\'');
// Special characters become dashes
query = query.Replace(' ', '-');
query = query.Replace('\\', '-');
if (!noSlash)
query = query.Replace('/', '-');
// Lowercase is defined per language
query = query.ToLowerInvariant();
// Keep getting quicksearch pages until there are none left
try
{
int pageNumber = 1;
while (true)
{
List<int> pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++));
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in: {ex}");
return [];
}
return ids;
}
/// <summary>
/// Download the disc pages associated with a given quicksearch query
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="query">Query string to attempt to search for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="noSlash">Don't replace slashes with `-` in queries</param>
/// <returns>All disc IDs for the given query, empty on error</returns>
public static async Task<List<int>> DownloadSearchResults(RedumpClient rc, string? query, string? outDir, bool noSlash)
{
List<int> ids = [];
// If the query is invalid
if (string.IsNullOrEmpty(query))
return ids;
// Strip quotes
query = query!.Trim('"', '\'');
// Special characters become dashes
query = query.Replace(' ', '-');
query = query.Replace('\\', '-');
if (!noSlash)
query = query.Replace('/', '-');
// Lowercase is defined per language
query = query.ToLowerInvariant();
// Keep getting quicksearch pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.QuickSearchUrl, query, pageNumber++), outDir, false);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
}
}

View File

@@ -0,0 +1,110 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with users
/// </summary>
public static class User
{
/// <summary>
/// Download the disc pages associated with the given user
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> DownloadUser(RedumpClient rc, string? username, string? outDir)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return ids;
}
// Keep getting user pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++), outDir, false);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
/// <summary>
/// Download the last modified disc pages associated with the given user, until first failure
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> DownloadUserLastModified(RedumpClient rc, string? username, string? outDir)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return ids;
}
// Keep getting last modified user pages until there are none left
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsLastModifiedUrl, username, pageNumber++), outDir, true);
ids.AddRange(pageIds);
if (pageIds.Count == 0)
break;
}
return ids;
}
/// <summary>
/// List the disc IDs associated with the given user
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="username">Username to check discs for</param>
/// <returns>All disc IDs for the given user, empty on error</returns>
public static async Task<List<int>> ListUser(RedumpClient rc, string? username)
{
List<int> ids = [];
if (!rc.LoggedIn || string.IsNullOrEmpty(username))
{
Console.WriteLine("User download functionality is only available to Redump members");
return ids;
}
// Keep getting user pages until there are none left
try
{
int pageNumber = 1;
while (true)
{
var pageIds = await rc.CheckSingleSitePage(string.Format(Constants.UserDumpsUrl, username, pageNumber++));
ids.AddRange(pageIds);
if (pageIds.Count <= 1)
break;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in: {ex}");
return [];
}
return ids;
}
}
}

View File

@@ -0,0 +1,52 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
/// <summary>
/// Contains logic for dealing with WIP queue
/// </summary>
public static class WIP
{
/// <summary>
/// Download the last submitted WIP disc pages
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <returns>All disc IDs in last submitted range, empty on error</returns>
public static async Task<List<int>> DownloadLastSubmitted(RedumpClient rc, string? outDir)
{
return await rc.CheckSingleWIPPage(Constants.WipDumpsUrl, outDir, false);
}
/// <summary>
/// Download the specified range of WIP disc pages
/// </summary>
/// <param name="rc">RedumpClient for connectivity</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="minId">Starting ID for the range</param>
/// <param name="maxId">Ending ID for the range (inclusive)</param>
/// <returns>All disc IDs in last submitted range, empty on error</returns>
public static async Task<List<int>> DownloadWIPRange(RedumpClient rc, string? outDir, int minId = 0, int maxId = 0)
{
List<int> ids = [];
if (!rc.LoggedIn || !rc.IsStaff)
{
Console.WriteLine("WIP download functionality is only available to Redump moderators");
return ids;
}
for (int id = minId; id <= maxId; id++)
{
ids.Add(id);
if (await rc.DownloadSingleWIPID(id, outDir, true))
DelayHelper.DelayRandom(); // Intentional sleep here so we don't flood the server
}
return ids;
}
}
}

View File

@@ -1,833 +0,0 @@
#if NETFRAMEWORK
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Text;
using System.Text.RegularExpressions;
using SabreTools.RedumpLib.Data;
namespace SabreTools.RedumpLib.Web
{
// https://stackoverflow.com/questions/1777221/using-cookiecontainer-with-webclient-class
public class RedumpWebClient : WebClient
{
private readonly CookieContainer m_container = new CookieContainer();
/// <summary>
/// Determines if user is logged into Redump
/// </summary>
public bool LoggedIn { get; private set; } = false;
/// <summary>
/// Determines if the user is a staff member
/// </summary>
public bool IsStaff { get; private set; } = false;
/// <summary>
/// Get the last downloaded filename, if possible
/// </summary>
/// <returns></returns>
public string? GetLastFilename()
{
// If the response headers are null or empty
if (ResponseHeaders == null || ResponseHeaders.Count == 0)
return null;
// If we don't have the response header we care about
string headerValue = ResponseHeaders.Get("Content-Disposition");
if (string.IsNullOrWhiteSpace(headerValue))
return null;
// Extract the filename from the value
return headerValue.Substring(headerValue.IndexOf("filename=") + 9).Replace("\"", "");
}
/// <inheritdoc/>
protected override WebRequest GetWebRequest(Uri address)
{
WebRequest request = base.GetWebRequest(address);
if (request is HttpWebRequest webRequest)
webRequest.CookieContainer = m_container;
return request;
}
/// <summary>
/// Validate supplied credentials
/// </summary>
public static (bool?, string?) ValidateCredentials(string username, string password)
{
// If options are invalid or we're missing something key, just return
if (string.IsNullOrWhiteSpace(username) || string.IsNullOrWhiteSpace(password))
return (false, null);
// Try logging in with the supplied credentials otherwise
using (RedumpWebClient wc = new RedumpWebClient())
{
bool? loggedIn = wc.Login(username, password);
if (loggedIn == true)
return (true, "Redump username and password accepted!");
else if (loggedIn == false)
return (false, "Redump username and password denied!");
else
return (null, "An error occurred validating your credentials!");
}
}
/// <summary>
/// Login to Redump, if possible
/// </summary>
/// <param name="username">Redump username</param>
/// <param name="password">Redump password</param>
/// <returns>True if the user could be logged in, false otherwise, null on error</returns>
public bool? Login(string username, string password)
{
// Credentials verification
if (!string.IsNullOrWhiteSpace(username) && !string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Credentials entered, will attempt Redump login...");
}
else if (!string.IsNullOrWhiteSpace(username) && string.IsNullOrWhiteSpace(password))
{
Console.WriteLine("Only a username was specified, will not attempt Redump login...");
return false;
}
else if (string.IsNullOrWhiteSpace(username))
{
Console.WriteLine("No credentials entered, will not attempt Redump login...");
return false;
}
// HTTP encode the password
#if NET40
password = Uri.EscapeUriString(password);
#else
password = WebUtility.UrlEncode(password);
#endif
// Attempt to login up to 3 times
for (int i = 0; i < 3; i++)
{
try
{
// Get the current token from the login page
var loginPage = DownloadString(Constants.LoginUrl);
string token = Constants.TokenRegex.Match(loginPage).Groups[1].Value;
// Construct the login request
Headers[HttpRequestHeader.ContentType] = "application/x-www-form-urlencoded";
Encoding = Encoding.UTF8;
var response = UploadString(Constants.LoginUrl, $"form_sent=1&redirect_url=&csrf_token={token}&req_username={username}&req_password={password}&save_pass=0");
if (response.Contains("Incorrect username and/or password."))
{
Console.WriteLine("Invalid credentials entered, continuing without logging in...");
return false;
}
// The user was able to be logged in
Console.WriteLine("Credentials accepted! Logged into Redump...");
LoggedIn = true;
// If the user is a moderator or staff, set accordingly
if (response.Contains("http://forum.redump.org/forum/9/staff/"))
IsStaff = true;
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception occurred while trying to log in on attempt {i}: {ex}");
}
}
Console.WriteLine("Could not login to Redump in 3 attempts, continuing without logging in...");
return false;
}
#region Single Page Helpers
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <returns>List of IDs from the page, empty on error</returns>
public List<int> CheckSingleSitePage(string url)
{
List<int> ids = new List<int>();
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return ids;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
ids.Add(id);
return ids;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump site page as a list of possible IDs or disc page
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public bool CheckSingleSitePage(string url, string? outDir, bool failOnSingle)
{
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return false;
// If we have a single disc page already
if (dumpsPage.Contains("<b>Download:</b>"))
{
var value = Regex.Match(dumpsPage, @"/disc/(\d+)/sfv/").Groups[1].Value;
if (int.TryParse(value, out int id))
{
bool downloaded = DownloadSingleSiteID(id, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
return false;
}
// Otherwise, traverse each dump on the page
var matches = Constants.DiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[1].Value, out int value))
{
bool downloaded = DownloadSingleSiteID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <returns>List of IDs from the page, empty on error</returns>
public List<int> CheckSingleWIPPage(string url)
{
List<int> ids = new List<int>();
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return ids;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
ids.Add(value);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return ids;
}
/// <summary>
/// Process a Redump WIP page as a list of possible IDs or disc page
/// </summary>
/// <param name="wc">RedumpWebClient to access the packs</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="failOnSingle">True to return on first error, false otherwise</param>
/// <returns>True if the page could be downloaded, false otherwise</returns>
public bool CheckSingleWIPPage(string url, string? outDir, bool failOnSingle)
{
string dumpsPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
dumpsPage = DownloadString(url);
break;
}
catch { }
}
// If we have no dumps left
if (dumpsPage.Contains("No discs found."))
return false;
// Otherwise, traverse each dump on the page
var matches = Constants.NewDiscRegex.Matches(dumpsPage);
foreach (Match match in matches)
{
try
{
if (int.TryParse(match.Groups[2].Value, out int value))
{
bool downloaded = DownloadSingleWIPID(value, outDir, false);
if (!downloaded && failOnSingle)
return false;
}
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
continue;
}
}
return true;
}
#endregion
#region Download Helpers
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <returns>Byte array containing the downloaded pack, null on error</returns>
public byte[]? DownloadSinglePack(string url, RedumpSystem? system)
{
try
{
return DownloadData(string.Format(url, system.ShortName()));
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download a single pack
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">System to download packs for</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public void DownloadSinglePack(string url, RedumpSystem? system, string? outDir, string? subfolder)
{
try
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string tempfile = Path.Combine(outDir, "tmp" + Guid.NewGuid().ToString());
DownloadFile(string.Format(url, system.ShortName()), tempfile);
MoveOrDelete(tempfile, GetLastFilename(), outDir!, subfolder);
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public string? DownloadSingleSiteID(int id)
{
string paddedId = id.ToString().PadLeft(6, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.DiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual site ID data, if possible
/// </summary>
/// <param name="id">Redump disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public bool DownloadSingleSiteID(int id, string? outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(6, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.DiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"Disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the last modified date in both pages
var oldResult = Constants.LastModifiedRegex.Match(oldDiscPage);
var newResult = Constants.LastModifiedRegex.Match(discPage);
// If both pages contain the same modified date, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains a modified date, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// View Edit History
if (discPage.Contains($"<a href=\"/disc/{id}/changes/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.ChangesExt, Path.Combine(paddedIdDir, "changes.html"));
// CUE
if (discPage.Contains($"<a href=\"/disc/{id}/cue/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.CueExt, Path.Combine(paddedIdDir, paddedId + ".cue"));
// Edit disc
if (discPage.Contains($"<a href=\"/disc/{id}/edit/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.EditExt, Path.Combine(paddedIdDir, "edit.html"));
// GDI
if (discPage.Contains($"<a href=\"/disc/{id}/gdi/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.GdiExt, Path.Combine(paddedIdDir, paddedId + ".gdi"));
// KEYS
if (discPage.Contains($"<a href=\"/disc/{id}/key/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.KeyExt, Path.Combine(paddedIdDir, paddedId + ".key"));
// LSD
if (discPage.Contains($"<a href=\"/disc/{id}/lsd/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.LsdExt, Path.Combine(paddedIdDir, paddedId + ".lsd"));
// MD5
if (discPage.Contains($"<a href=\"/disc/{id}/md5/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Md5Ext, Path.Combine(paddedIdDir, paddedId + ".md5"));
// Review WIP entry
if (Constants.NewDiscRegex.IsMatch(discPage))
{
var match = Constants.NewDiscRegex.Match(discPage);
DownloadFile(string.Format(Constants.WipDiscPageUrl, match.Groups[2].Value), Path.Combine(paddedIdDir, "newdisc.html"));
}
// SBI
if (discPage.Contains($"<a href=\"/disc/{id}/sbi/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SbiExt, Path.Combine(paddedIdDir, paddedId + ".sbi"));
// SFV
if (discPage.Contains($"<a href=\"/disc/{id}/sfv/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.SfvExt, Path.Combine(paddedIdDir, paddedId + ".sfv"));
// SHA1
if (discPage.Contains($"<a href=\"/disc/{id}/sha1/\""))
DownloadFile(string.Format(Constants.DiscPageUrl, +id) + Constants.Sha1Ext, Path.Combine(paddedIdDir, paddedId + ".sha1"));
// HTML (Last in case of errors)
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <returns>String containing the page contents if successful, null on error</returns>
public string? DownloadSingleWIPID(int id)
{
string paddedId = id.ToString().PadLeft(6, '0');
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.WipDiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
Console.WriteLine($"ID {paddedId} could not be found!");
return null;
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return discPage;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return null;
}
}
/// <summary>
/// Download an individual WIP ID data, if possible
/// </summary>
/// <param name="id">Redump WIP disc ID to retrieve</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="rename">True to rename deleted entries, false otherwise</param>
/// <returns>True if all data was downloaded, false otherwise</returns>
public bool DownloadSingleWIPID(int id, string? outDir, bool rename)
{
// If no output directory is defined, use the current directory instead
if (string.IsNullOrWhiteSpace(outDir))
outDir = Environment.CurrentDirectory;
string paddedId = id.ToString().PadLeft(6, '0');
string paddedIdDir = Path.Combine(outDir, paddedId);
Console.WriteLine($"Processing ID: {paddedId}");
try
{
string discPage = string.Empty;
// Try up to 3 times to retrieve the data
for (int i = 0; i < 3; i++)
{
try
{
discPage = DownloadString(string.Format(Constants.WipDiscPageUrl, +id));
break;
}
catch { }
}
if (discPage.Contains($"WIP disc with ID \"{id}\" doesn't exist"))
{
try
{
if (rename)
{
if (Directory.Exists(paddedIdDir) && rename)
Directory.Move(paddedIdDir, paddedIdDir + "-deleted");
else
Directory.CreateDirectory(paddedIdDir + "-deleted");
}
}
catch { }
Console.WriteLine($"ID {paddedId} could not be found!");
return false;
}
// Check if the page has been updated since the last time it was downloaded, if possible
if (File.Exists(Path.Combine(paddedIdDir, "disc.html")))
{
// Read in the cached file
var oldDiscPage = File.ReadAllText(Path.Combine(paddedIdDir, "disc.html"));
// Check for the full match ID in both pages
var oldResult = Constants.FullMatchRegex.Match(oldDiscPage);
var newResult = Constants.FullMatchRegex.Match(discPage);
// If both pages contain the same ID, skip it
if (oldResult.Success && newResult.Success && oldResult.Groups[1].Value == newResult.Groups[1].Value)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
// If neither page contains an ID, skip it
else if (!oldResult.Success && !newResult.Success)
{
Console.WriteLine($"ID {paddedId} has not been changed since last download");
return false;
}
}
// Create ID subdirectory
Directory.CreateDirectory(paddedIdDir);
// HTML
using (var discStreamWriter = File.CreateText(Path.Combine(paddedIdDir, "disc.html")))
{
discStreamWriter.Write(discPage);
}
Console.WriteLine($"ID {paddedId} has been successfully downloaded");
return true;
}
catch (Exception ex)
{
Console.WriteLine($"An exception has occurred: {ex}");
return false;
}
}
#endregion
#region Helpers
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
public Dictionary<RedumpSystem, byte[]> DownloadPacks(string url, RedumpSystem?[] systems, string title)
{
var packsDictionary = new Dictionary<RedumpSystem, byte[]>();
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string? longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName!.Length - 1)}");
byte[]? pack = DownloadSinglePack(url, system);
if (pack != null)
packsDictionary.Add(system.Value, pack);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
return packsDictionary;
}
/// <summary>
/// Download a set of packs
/// </summary>
/// <param name="url">Base URL to download using</param>
/// <param name="system">Systems to download packs for</param>
/// <param name="title">Name of the pack that is downloading</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Named subfolder for the pack, used optionally</param>
public void DownloadPacks(string url, RedumpSystem?[] systems, string title, string? outDir, string? subfolder)
{
Console.WriteLine($"Downloading {title}");
foreach (var system in systems)
{
// If the system is invalid, we can't do anything
if (system == null || !system.IsAvailable())
continue;
// If we didn't have credentials
if (!LoggedIn && system.IsBanned())
continue;
// If the system is unknown, we can't do anything
string? longName = system.LongName();
if (string.IsNullOrWhiteSpace(longName))
continue;
Console.Write($"\r{longName}{new string(' ', Console.BufferWidth - longName!.Length - 1)}");
DownloadSinglePack(url, system, outDir, subfolder);
}
Console.Write($"\rComplete!{new string(' ', Console.BufferWidth - 10)}");
Console.WriteLine();
}
/// <summary>
/// Move a tempfile to a new name unless it aleady exists, in which case, delete the tempfile
/// </summary>
/// <param name="tempfile">Path to existing temporary file</param>
/// <param name="newfile">Path to new output file</param>
/// <param name="outDir">Output directory to save data to</param>
/// <param name="subfolder">Optional subfolder to append to the path</param>
private static void MoveOrDelete(string tempfile, string? newfile, string outDir, string? subfolder)
{
if (!string.IsNullOrWhiteSpace(newfile))
{
if (!string.IsNullOrWhiteSpace(subfolder))
{
if (!Directory.Exists(Path.Combine(outDir, subfolder)))
Directory.CreateDirectory(Path.Combine(outDir, subfolder));
newfile = Path.Combine(subfolder, newfile);
}
if (File.Exists(Path.Combine(outDir, newfile)))
File.Delete(tempfile);
else
File.Move(tempfile, Path.Combine(outDir, newfile));
}
else
{
File.Delete(tempfile);
}
}
#endregion
}
}
#endif

151
publish-nix.sh Executable file
View File

@@ -0,0 +1,151 @@
#! /bin/bash
# This batch file assumes the following:
# - .NET 9.0 (or newer) SDK is installed and in PATH
#
# If any of these are not satisfied, the operation may fail
# in an unpredictable way and result in an incomplete output.
# Optional parameters
USE_ALL=false
INCLUDE_DEBUG=false
NO_BUILD=false
NO_ARCHIVE=false
while getopts "udba" OPTION; do
case $OPTION in
u)
USE_ALL=true
;;
d)
INCLUDE_DEBUG=true
;;
b)
NO_BUILD=true
;;
a)
NO_ARCHIVE=true
;;
*)
echo "Invalid option provided"
exit 1
;;
esac
done
# Set the current directory as a variable
BUILD_FOLDER=$PWD
# Set the current commit hash
COMMIT=`git log --pretty=%H -1`
# Output the selected options
echo "Selected Options:"
echo " Use all frameworks (-u) $USE_ALL"
echo " Include debug builds (-d) $INCLUDE_DEBUG"
echo " No build (-b) $NO_BUILD"
echo " No archive (-a) $NO_ARCHIVE"
echo " "
# Create the build matrix arrays
FRAMEWORKS=("net9.0")
RUNTIMES=("win-x86" "win-x64" "win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Use expanded lists, if requested
if [ $USE_ALL = true ]
then
FRAMEWORKS=("net20" "net35" "net40" "net452" "net462" "net472" "net48" "netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
fi
# Create the filter arrays
SINGLE_FILE_CAPABLE=("net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
VALID_APPLE_FRAMEWORKS=("net6.0" "net7.0" "net8.0" "net9.0")
VALID_CROSS_PLATFORM_FRAMEWORKS=("netcoreapp3.1" "net5.0" "net6.0" "net7.0" "net8.0" "net9.0")
VALID_CROSS_PLATFORM_RUNTIMES=("win-arm64" "linux-x64" "linux-arm64" "osx-x64" "osx-arm64")
# Only build if requested
if [ $NO_BUILD = false ]
then
# Restore Nuget packages for all builds
echo "Restoring Nuget packages"
dotnet restore
# Create Nuget Package
dotnet pack SabreTools.RedumpLib/SabreTools.RedumpLib.csproj --output $BUILD_FOLDER
# Build Tool
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Build Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only .NET 5 and above can publish to a single file
if [[ $(echo ${SINGLE_FILE_CAPABLE[@]} | fgrep -w $FRAMEWORK) ]]; then
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
fi
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
else
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
fi
dotnet publish RedumpTool/RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
fi
done
done
fi
# Only create archives if requested
if [ $NO_ARCHIVE = false ]; then
# Create Tool archives
for FRAMEWORK in "${FRAMEWORKS[@]}"; do
for RUNTIME in "${RUNTIMES[@]}"; do
# Output the current build
echo "===== Archive Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if [[ ! $(echo ${VALID_CROSS_PLATFORM_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [[ $(echo ${VALID_CROSS_PLATFORM_RUNTIMES[@]} | fgrep -w $RUNTIME) ]]; then
echo "Skipped due to invalid combination"
continue
fi
fi
# If we have Apple silicon but an unsupported framework
if [[ ! $(echo ${VALID_APPLE_FRAMEWORKS[@]} | fgrep -w $FRAMEWORK) ]]; then
if [ $RUNTIME = "osx-arm64" ]; then
echo "Skipped due to no Apple Silicon support"
continue
fi
fi
# Only include Debug if set
if [ $INCLUDE_DEBUG = true ]; then
cd $BUILD_FOLDER/RedumpTool/bin/Debug/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/RedumpTool_${FRAMEWORK}_${RUNTIME}_debug.zip .
fi
cd $BUILD_FOLDER/RedumpTool/bin/Release/${FRAMEWORK}/${RUNTIME}/publish/
zip -r $BUILD_FOLDER/RedumpTool_${FRAMEWORK}_${RUNTIME}_release.zip .
done
done
# Reset the directory
cd $BUILD_FOLDER
fi

135
publish-win.ps1 Normal file
View File

@@ -0,0 +1,135 @@
# This batch file assumes the following:
# - .NET 9.0 (or newer) SDK is installed and in PATH
#
# If any of these are not satisfied, the operation may fail
# in an unpredictable way and result in an incomplete output.
# Optional parameters
param(
[Parameter(Mandatory = $false)]
[Alias("UseAll")]
[switch]$USE_ALL,
[Parameter(Mandatory = $false)]
[Alias("IncludeDebug")]
[switch]$INCLUDE_DEBUG,
[Parameter(Mandatory = $false)]
[Alias("NoBuild")]
[switch]$NO_BUILD,
[Parameter(Mandatory = $false)]
[Alias("NoArchive")]
[switch]$NO_ARCHIVE
)
# Set the current directory as a variable
$BUILD_FOLDER = $PSScriptRoot
# Set the current commit hash
$COMMIT = git log --pretty=format:"%H" -1
# Output the selected options
Write-Host "Selected Options:"
Write-Host " Use all frameworks (-UseAll) $USE_ALL"
Write-Host " Include debug builds (-IncludeDebug) $INCLUDE_DEBUG"
Write-Host " No build (-NoBuild) $NO_BUILD"
Write-Host " No archive (-NoArchive) $NO_ARCHIVE"
Write-Host " "
# Create the build matrix arrays
$FRAMEWORKS = @('net9.0')
$RUNTIMES = @('win-x86', 'win-x64', 'win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Use expanded lists, if requested
if ($USE_ALL.IsPresent)
{
$FRAMEWORKS = @('net20', 'net35', 'net40', 'net452', 'net462', 'net472', 'net48', 'netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
}
# Create the filter arrays
$SINGLE_FILE_CAPABLE = @('net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_APPLE_FRAMEWORKS = @('net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_CROSS_PLATFORM_FRAMEWORKS = @('netcoreapp3.1', 'net5.0', 'net6.0', 'net7.0', 'net8.0', 'net9.0')
$VALID_CROSS_PLATFORM_RUNTIMES = @('win-arm64', 'linux-x64', 'linux-arm64', 'osx-x64', 'osx-arm64')
# Only build if requested
if (!$NO_BUILD.IsPresent)
{
# Restore Nuget packages for all builds
Write-Host "Restoring Nuget packages"
dotnet restore
# Create Nuget Package
dotnet pack SabreTools.RedumpLib\SabreTools.RedumpLib.csproj --output $BUILD_FOLDER
# Build Tool
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Build Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only .NET 5 and above can publish to a single file
if ($SINGLE_FILE_CAPABLE -contains $FRAMEWORK) {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true
}
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:PublishSingleFile=true -p:DebugType=None -p:DebugSymbols=false
}
else {
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Debug --self-contained true --version-suffix $COMMIT
}
dotnet publish RedumpTool\RedumpTool.csproj -f $FRAMEWORK -r $RUNTIME -c Release --self-contained true --version-suffix $COMMIT -p:DebugType=None -p:DebugSymbols=false
}
}
}
}
# Only create archives if requested
if (!$NO_ARCHIVE.IsPresent) {
# Create Tool archives
foreach ($FRAMEWORK in $FRAMEWORKS) {
foreach ($RUNTIME in $RUNTIMES) {
# Output the current build
Write-Host "===== Archive Tool - $FRAMEWORK, $RUNTIME ====="
# If we have an invalid combination of framework and runtime
if ($VALID_CROSS_PLATFORM_FRAMEWORKS -notcontains $FRAMEWORK -and $VALID_CROSS_PLATFORM_RUNTIMES -contains $RUNTIME) {
Write-Host "Skipped due to invalid combination"
continue
}
# If we have Apple silicon but an unsupported framework
if ($VALID_APPLE_FRAMEWORKS -notcontains $FRAMEWORK -and $RUNTIME -eq 'osx-arm64') {
Write-Host "Skipped due to no Apple Silicon support"
continue
}
# Only include Debug if set
if ($INCLUDE_DEBUG.IsPresent) {
Set-Location -Path $BUILD_FOLDER\RedumpTool\bin\Debug\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\RedumpTool_${FRAMEWORK}_${RUNTIME}_debug.zip *
}
Set-Location -Path $BUILD_FOLDER\RedumpTool\bin\Release\${FRAMEWORK}\${RUNTIME}\publish\
7z a -tzip $BUILD_FOLDER\RedumpTool_${FRAMEWORK}_${RUNTIME}_release.zip *
}
}
# Reset the directory
Set-Location -Path $PSScriptRoot
}